Skip to content

Python: Naming updates #12233

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
Show file tree
Hide file tree
Changes from 5 commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion python/pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -118,7 +118,7 @@ ollama = [
]
onnx = [
"onnxruntime-genai ~= 0.5; python_version < '3.13' and platform_system != 'Windows'",
"onnxruntime == 1.22.0; platform_system == 'Windows'"
"onnxruntime == 1.22.0"
]
pandas = [
"pandas ~= 2.2"
Expand Down
18 changes: 9 additions & 9 deletions python/samples/concepts/caching/semantic_caching.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,10 +13,10 @@
from semantic_kernel.data import (
VectorSearchOptions,
VectorStore,
VectorStoreDataField,
VectorStoreKeyField,
VectorStoreRecordCollection,
VectorStoreRecordDataField,
VectorStoreRecordKeyField,
VectorStoreRecordVectorField,
VectorStoreVectorField,
vectorstoremodel,
)
from semantic_kernel.filters import FilterTypes, FunctionInvocationContext, PromptRenderContext
Expand All @@ -32,9 +32,9 @@
@vectorstoremodel(collection_name=COLLECTION_NAME)
@dataclass
class CacheRecord:
result: Annotated[str, VectorStoreRecordDataField(is_full_text_indexed=True)]
prompt: Annotated[str | None, VectorStoreRecordVectorField(dimensions=1536)] = None
id: Annotated[str, VectorStoreRecordKeyField] = field(default_factory=lambda: str(uuid4()))
result: Annotated[str, VectorStoreDataField(is_full_text_indexed=True)]
prompt: Annotated[str | None, VectorStoreVectorField(dimensions=1536)] = None
id: Annotated[str, VectorStoreKeyField] = field(default_factory=lambda: str(uuid4()))


# Define the filters, one for caching the results and one for using the cache.
Expand All @@ -50,7 +50,7 @@ def __init__(
raise ValueError("The vector store must have an embedding generator.")
self.vector_store = vector_store
self.collection: VectorStoreRecordCollection[str, CacheRecord] = vector_store.get_collection(
data_model_type=CacheRecord
record_type=CacheRecord
)
self.score_threshold = score_threshold

Expand All @@ -65,7 +65,7 @@ async def on_prompt_render(
closer the match.
"""
await next(context)
await self.collection.create_collection_if_not_exists()
await self.collection.ensure_collection_exists()
results = await self.collection.search(
context.rendered_prompt, options=VectorSearchOptions(vector_property_name="prompt", top=1)
)
Expand All @@ -86,7 +86,7 @@ async def on_function_invocation(
result = context.result
if result and result.rendered_prompt and RECORD_ID_KEY not in result.metadata:
cache_record = CacheRecord(prompt=result.rendered_prompt, result=str(result))
await self.collection.create_collection_if_not_exists()
await self.collection.ensure_collection_exists()
await self.collection.upsert(cache_record)


Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -7,15 +7,15 @@
from samples.concepts.setup.chat_completion_services import Services, get_chat_completion_service_and_request_settings
from semantic_kernel import Kernel
from semantic_kernel.connectors.ai import FunctionChoiceBehavior
from semantic_kernel.connectors.memory.azure_cosmos_db import AzureCosmosDBNoSQLStore
from semantic_kernel.connectors.memory.azure_cosmos_db import CosmosNoSqlStore
from semantic_kernel.contents import ChatHistory, ChatMessageContent
from semantic_kernel.core_plugins.math_plugin import MathPlugin
from semantic_kernel.core_plugins.time_plugin import TimePlugin
from semantic_kernel.data import (
VectorStore,
VectorStoreDataField,
VectorStoreKeyField,
VectorStoreRecordCollection,
VectorStoreRecordDataField,
VectorStoreRecordKeyField,
vectorstoremodel,
)

Expand All @@ -39,9 +39,9 @@
@vectorstoremodel
@dataclass
class ChatHistoryModel:
session_id: Annotated[str, VectorStoreRecordKeyField]
user_id: Annotated[str, VectorStoreRecordDataField(is_indexed=True)]
messages: Annotated[list[dict[str, str]], VectorStoreRecordDataField(is_indexed=True)]
session_id: Annotated[str, VectorStoreKeyField]
user_id: Annotated[str, VectorStoreDataField(is_indexed=True)]
messages: Annotated[list[dict[str, str]], VectorStoreDataField(is_indexed=True)]


# 2. We then create a class that extends the ChatHistory class
Expand All @@ -64,9 +64,9 @@ async def create_collection(self, collection_name: str) -> None:
"""
self.collection = self.store.get_collection(
collection_name=collection_name,
data_model_type=ChatHistoryModel,
record_type=ChatHistoryModel,
)
await self.collection.create_collection_if_not_exists()
await self.collection.ensure_collection_exists()

async def store_messages(self) -> None:
"""Store the chat history in the Cosmos DB.
Expand Down Expand Up @@ -175,7 +175,7 @@ async def main() -> None:

# First we enter the store context manager to connect.
# The create_database flag will create the database if it does not exist.
async with AzureCosmosDBNoSQLStore(create_database=True) as store:
async with CosmosNoSqlStore(create_database=True) as store:
# Then we create the chat history in CosmosDB.
history = ChatHistoryInCosmosDB(store=store, session_id=session_id, user_id="user")
# Finally we create the collection.
Expand All @@ -191,7 +191,7 @@ async def main() -> None:
except Exception:
print("Closing chat...")
if delete_when_done and history.collection:
await history.collection.delete_collection()
await history.collection.ensure_collection_deleted()


if __name__ == "__main__":
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ async def main(query: str):
records = load_records()
# Create the Azure AI Search collection
async with AzureAISearchCollection[str, HotelSampleClass](
data_model_type=HotelSampleClass, embedding_generator=OpenAITextEmbedding()
record_type=HotelSampleClass, embedding_generator=OpenAITextEmbedding()
) as collection:
# Check if the collection exists.
if not await collection.does_collection_exist():
Expand Down Expand Up @@ -64,7 +64,7 @@ async def main(query: str):
f"{result.record.Address.Country}): {result.record.Description} (score: {result.score})"
)

await collection.delete_collection()
await collection.ensure_collection_deleted()


if __name__ == "__main__":
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@

# Create an Azure AI Search collection.
collection = AzureAISearchCollection[str, HotelSampleClass](
data_model_type=HotelSampleClass, embedding_generator=OpenAITextEmbedding()
record_type=HotelSampleClass, embedding_generator=OpenAITextEmbedding()
)
# load the records
records = load_records()
Expand Down Expand Up @@ -210,7 +210,7 @@ async def chat():

delete_collection = input("Do you want to delete the collection? (y/n): ")
if delete_collection.lower() == "y":
await collection.delete_collection()
await collection.ensure_collection_deleted()
print("Collection deleted.")
else:
print("Collection not deleted.")
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,12 +15,7 @@
)
from pydantic import BaseModel, ConfigDict

from semantic_kernel.data import (
VectorStoreRecordDataField,
VectorStoreRecordKeyField,
VectorStoreRecordVectorField,
vectorstoremodel,
)
from semantic_kernel.data import VectorStoreDataField, VectorStoreKeyField, VectorStoreVectorField, vectorstoremodel

"""
The data model used for this sample is based on the hotel data model from the Azure AI Search samples.
Expand Down Expand Up @@ -60,31 +55,29 @@ class Address(BaseModel):

@vectorstoremodel(collection_name="hotel-index")
class HotelSampleClass(BaseModel):
HotelId: Annotated[str, VectorStoreRecordKeyField]
HotelName: Annotated[str | None, VectorStoreRecordDataField()] = None
HotelId: Annotated[str, VectorStoreKeyField]
HotelName: Annotated[str | None, VectorStoreDataField()] = None
Description: Annotated[
str,
VectorStoreRecordDataField(is_full_text_indexed=True),
VectorStoreDataField(is_full_text_indexed=True),
]
DescriptionVector: Annotated[
list[float] | str | None,
VectorStoreRecordVectorField(dimensions=1536),
VectorStoreVectorField(dimensions=1536),
] = None
Description_fr: Annotated[str, VectorStoreRecordDataField(is_full_text_indexed=True)]
Description_fr: Annotated[str, VectorStoreDataField(is_full_text_indexed=True)]
DescriptionFrVector: Annotated[
list[float] | str | None,
VectorStoreRecordVectorField(dimensions=1536),
] = None
Category: Annotated[str, VectorStoreRecordDataField()]
Tags: Annotated[list[str], VectorStoreRecordDataField(is_indexed=True)]
ParkingIncluded: Annotated[bool | None, VectorStoreRecordDataField()] = None
LastRenovationDate: Annotated[
str | None, VectorStoreRecordDataField(property_type=SearchFieldDataType.DateTimeOffset)
VectorStoreVectorField(dimensions=1536),
] = None
Rating: Annotated[float, VectorStoreRecordDataField()]
Location: Annotated[dict[str, Any], VectorStoreRecordDataField(property_type=SearchFieldDataType.GeographyPoint)]
Address: Annotated[Address, VectorStoreRecordDataField()]
Rooms: Annotated[list[Rooms], VectorStoreRecordDataField()]
Category: Annotated[str, VectorStoreDataField()]
Tags: Annotated[list[str], VectorStoreDataField(is_indexed=True)]
ParkingIncluded: Annotated[bool | None, VectorStoreDataField()] = None
LastRenovationDate: Annotated[str | None, VectorStoreDataField(type=SearchFieldDataType.DateTimeOffset)] = None
Rating: Annotated[float, VectorStoreDataField()]
Location: Annotated[dict[str, Any], VectorStoreDataField(type=SearchFieldDataType.GeographyPoint)]
Address: Annotated[Address, VectorStoreDataField()]
Rooms: Annotated[list[Rooms], VectorStoreDataField()]

model_config = ConfigDict(extra="ignore")

Expand Down
55 changes: 27 additions & 28 deletions python/samples/concepts/memory/complex_memory.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,9 +13,9 @@
from semantic_kernel.connectors.ai.open_ai import AzureTextEmbedding, OpenAITextEmbedding
from semantic_kernel.connectors.memory import (
AzureAISearchCollection,
AzureCosmosDBforMongoDBCollection,
AzureCosmosDBNoSQLCollection,
ChromaCollection,
CosmosMongoCollection,
CosmosNoSqlCollection,
FaissCollection,
InMemoryCollection,
PineconeCollection,
Expand All @@ -27,14 +27,13 @@
WeaviateCollection,
)
from semantic_kernel.data import (
VectorSearch,
VectorStoreDataField,
VectorStoreKeyField,
VectorStoreRecordCollection,
VectorStoreRecordDataField,
VectorStoreRecordKeyField,
VectorStoreRecordVectorField,
VectorStoreVectorField,
vectorstoremodel,
)
from semantic_kernel.data.vector_search import SearchType
from semantic_kernel.data.vectors import SearchType, VectorSearch

# This is a rather complex sample, showing how to use the vector store
# with a number of different collections.
Expand All @@ -49,14 +48,14 @@
@vectorstoremodel(collection_name="test")
@dataclass
class DataModel:
title: Annotated[str, VectorStoreRecordDataField(is_full_text_indexed=True)]
content: Annotated[str, VectorStoreRecordDataField(is_full_text_indexed=True)]
title: Annotated[str, VectorStoreDataField(is_full_text_indexed=True)]
content: Annotated[str, VectorStoreDataField(is_full_text_indexed=True)]
embedding: Annotated[
str | None,
VectorStoreRecordVectorField(dimensions=1536, property_type="float"),
VectorStoreVectorField(dimensions=1536, type_="float"),
] = None
id: Annotated[str, VectorStoreRecordKeyField()] = field(default_factory=lambda: str(uuid4()))
tag: Annotated[str | None, VectorStoreRecordDataField(property_type="str", is_indexed=True)] = None
id: Annotated[str, VectorStoreKeyField()] = field(default_factory=lambda: str(uuid4()))
tag: Annotated[str | None, VectorStoreDataField(type_="str", is_indexed=True)] = None

def __post_init__(self, **kwargs):
if self.embedding is None:
Expand Down Expand Up @@ -96,32 +95,32 @@ def __post_init__(self, **kwargs):
# Using a function allows for lazy initialization of the collection,
# so that settings for unused collections do not cause validation errors.
collections: dict[str, Callable[[], VectorStoreRecordCollection]] = {
"ai_search": lambda: AzureAISearchCollection[str, DataModel](data_model_type=DataModel),
"postgres": lambda: PostgresCollection[str, DataModel](data_model_type=DataModel),
"ai_search": lambda: AzureAISearchCollection[str, DataModel](record_type=DataModel),
"postgres": lambda: PostgresCollection[str, DataModel](record_type=DataModel),
"redis_json": lambda: RedisJsonCollection[str, DataModel](
data_model_type=DataModel,
record_type=DataModel,
prefix_collection_name_to_key_names=True,
),
"redis_hash": lambda: RedisHashsetCollection[str, DataModel](
data_model_type=DataModel,
record_type=DataModel,
prefix_collection_name_to_key_names=True,
),
"qdrant": lambda: QdrantCollection[str, DataModel](
data_model_type=DataModel,
record_type=DataModel,
prefer_grpc=True,
named_vectors=False,
),
"in_memory": lambda: InMemoryCollection[str, DataModel](data_model_type=DataModel),
"weaviate": lambda: WeaviateCollection[str, DataModel](data_model_type=DataModel),
"azure_cosmos_nosql": lambda: AzureCosmosDBNoSQLCollection[str, DataModel](
data_model_type=DataModel,
"in_memory": lambda: InMemoryCollection[str, DataModel](record_type=DataModel),
"weaviate": lambda: WeaviateCollection[str, DataModel](record_type=DataModel),
"azure_cosmos_nosql": lambda: CosmosNoSqlCollection[str, DataModel](
record_type=DataModel,
create_database=True,
),
"azure_cosmos_mongodb": lambda: AzureCosmosDBforMongoDBCollection[str, DataModel](data_model_type=DataModel),
"faiss": lambda: FaissCollection[str, DataModel](data_model_type=DataModel),
"chroma": lambda: ChromaCollection[str, DataModel](data_model_type=DataModel),
"pinecone": lambda: PineconeCollection[str, DataModel](data_model_type=DataModel),
"sql_server": lambda: SqlServerCollection[str, DataModel](data_model_type=DataModel),
"azure_cosmos_mongodb": lambda: CosmosMongoCollection[str, DataModel](record_type=DataModel),
"faiss": lambda: FaissCollection[str, DataModel](record_type=DataModel),
"chroma": lambda: ChromaCollection[str, DataModel](record_type=DataModel),
"pinecone": lambda: PineconeCollection[str, DataModel](record_type=DataModel),
"sql_server": lambda: SqlServerCollection[str, DataModel](record_type=DataModel),
}


Expand All @@ -132,7 +131,7 @@ async def cleanup(record_collection):
print("Skipping deletion.")
return
print_with_color("Deleting collection!", Colors.CBLUE)
await record_collection.delete_collection()
await record_collection.ensure_collection_deleted()
print_with_color("Done!", Colors.CGREY)


Expand All @@ -147,7 +146,7 @@ async def main(collection: str, use_azure_openai: bool):
record_collection.embedding_generator = embedder
print_with_color(f"Creating {collection} collection!", Colors.CGREY)
# cleanup any existing collection
await record_collection.delete_collection()
await record_collection.ensure_collection_deleted()
# create a new collection
await record_collection.create_collection()

Expand Down
Loading