From 893ee254efb374846748b4bd3c61e46281e82c6f Mon Sep 17 00:00:00 2001 From: himanshu_gupta Date: Thu, 24 Apr 2025 08:30:02 +0530 Subject: [PATCH 1/5] Developed an end-to-end catalog ingestion flow from Petpooja(POS) to Meta and added corresponding test cases --- email.yaml | 1 + kairon/api/app/main.py | 3 +- kairon/api/app/routers/bot/data.py | 38 +- kairon/api/app/routers/bot/integrations.py | 85 + kairon/catalog_sync/definitions/base.py | 18 + kairon/catalog_sync/definitions/factory.py | 29 + kairon/events/definitions/catalog_sync.py | 73 + kairon/events/definitions/factory.py | 4 +- kairon/events/definitions/petpooja_sync.py | 197 ++ kairon/meta/processor.py | 122 + kairon/shared/account/data_objects.py | 1 + kairon/shared/account/processor.py | 3 + .../catalog_sync_log_processor.py | 320 +++ kairon/shared/catalog_sync/data_objects.py | 35 + kairon/shared/cognition/processor.py | 521 ++++- kairon/shared/constants.py | 7 + kairon/shared/data/constant.py | 22 + kairon/shared/data/data_models.py | 11 +- kairon/shared/data/data_objects.py | 31 + kairon/shared/data/utils.py | 17 + kairon/shared/llm/processor.py | 17 + kairon/shared/models.py | 4 +- kairon/shared/utils.py | 17 + metadata/catalog_provider_mappings.json | 60 + tests/integration_test/services_test.py | 1965 ++++++++++++++++- .../catalog_sync_item_toggle_payload.json | 14 + ...oggle_payload_invalid_missing_instock.json | 13 + ...toggle_payload_invalid_missing_itemid.json | 11 + ...le_payload_invalid_nonboolean_instock.json | 14 + .../catalog_sync_push_menu_payload.json | 270 +++ ...atalog_sync_push_menu_payload_invalid.json | 269 +++ ...nc_push_menu_payload_with_delete_data.json | 242 ++ .../catalog_sync_log_processor_test.py | 571 +++++ .../data_processor/data_processor_test.py | 437 +++- 34 files changed, 5312 insertions(+), 130 deletions(-) create mode 100644 kairon/api/app/routers/bot/integrations.py create mode 100644 kairon/catalog_sync/definitions/base.py create mode 100644 kairon/catalog_sync/definitions/factory.py create mode 100644 kairon/events/definitions/catalog_sync.py create mode 100644 kairon/events/definitions/petpooja_sync.py create mode 100644 kairon/meta/processor.py create mode 100644 kairon/shared/catalog_sync/catalog_sync_log_processor.py create mode 100644 kairon/shared/catalog_sync/data_objects.py create mode 100644 metadata/catalog_provider_mappings.json create mode 100644 tests/testing_data/catalog_sync/catalog_sync_item_toggle_payload.json create mode 100644 tests/testing_data/catalog_sync/catalog_sync_item_toggle_payload_invalid_missing_instock.json create mode 100644 tests/testing_data/catalog_sync/catalog_sync_item_toggle_payload_invalid_missing_itemid.json create mode 100644 tests/testing_data/catalog_sync/catalog_sync_item_toggle_payload_invalid_nonboolean_instock.json create mode 100644 tests/testing_data/catalog_sync/catalog_sync_push_menu_payload.json create mode 100644 tests/testing_data/catalog_sync/catalog_sync_push_menu_payload_invalid.json create mode 100644 tests/testing_data/catalog_sync/catalog_sync_push_menu_payload_with_delete_data.json create mode 100644 tests/unit_test/data_processor/catalog_sync_log_processor_test.py diff --git a/email.yaml b/email.yaml index 2600d61ba..817910984 100644 --- a/email.yaml +++ b/email.yaml @@ -25,4 +25,5 @@ email: book_a_demo_subject: ${EMAIL_TEMPLATES_BOOK_A_DEMO_SUBJECT:"kAIron Demo Requested"} member_left_bot_subject: ${EMAIL_TEMPLATES_USER_LEFT_BOT_SUBJECT:"User has left the BOT_NAME bot"} member_left_bot_mail_body: ${EMAIL_TEMPLATES_USER_LEFT_BOT_BODY:"User USER_NAME has left the BOT_NAME bot."} + catalog_sync_status_subject: ${EMAIL_TEMPLATES_CATALOG_SYNC_UPDATE:"Catalog Sync Update"} diff --git a/kairon/api/app/main.py b/kairon/api/app/main.py index 5c5211304..62f8065d9 100644 --- a/kairon/api/app/main.py +++ b/kairon/api/app/main.py @@ -23,7 +23,7 @@ from kairon.api.app.routers import auth, augment, history, user, account, idp, system from kairon.api.app.routers.bot import action, bot, agents, secrets, multilingual, metric, data, \ - channels, custom_widgets + channels, custom_widgets, integrations from kairon.api.models import Response from kairon.exceptions import AppException from kairon.shared.account.processor import AccountProcessor @@ -269,3 +269,4 @@ def healthcheck(): app.include_router(system.router, prefix="/api/system", tags=["Application"]) app.include_router(data.router, prefix="/api/bot/{bot}/data", tags=["File Upload/Download"]) app.include_router(custom_widgets.router, prefix="/api/bot/{bot}/widgets", tags=["Custom analytical widgets"]) +app.include_router(integrations.router, prefix="/api/bot/integration", tags=["Data Integrations"]) diff --git a/kairon/api/app/routers/bot/data.py b/kairon/api/app/routers/bot/data.py index 055da16ce..70d7b3eaa 100644 --- a/kairon/api/app/routers/bot/data.py +++ b/kairon/api/app/routers/bot/data.py @@ -1,7 +1,7 @@ import os -from typing import List +from typing import List, Text -from fastapi import UploadFile, File, Security, APIRouter, Query, HTTPException +from fastapi import UploadFile, File, Security, APIRouter, Query, HTTPException, Path from starlette.requests import Request from starlette.responses import FileResponse @@ -13,8 +13,9 @@ from kairon.shared.cognition.data_objects import CognitionSchema from kairon.shared.cognition.processor import CognitionDataProcessor from kairon.shared.concurrency.actors.factory import ActorFactory -from kairon.shared.constants import ActorType +from kairon.shared.constants import ActorType, CatalogSyncClass from kairon.shared.constants import DESIGNER_ACCESS +from kairon.shared.data.data_models import POSIntegrationRequest from kairon.shared.data.data_models import BulkDeleteRequest from kairon.shared.data.processor import MongoProcessor from kairon.shared.models import User @@ -342,7 +343,7 @@ async def download_error_csv( async def knowledge_vault_sync( primary_key_col: str, collection_name: str, - event_type: str, + sync_type: str, data: List[dict], current_user: User = Security(Authentication.get_current_user_and_bot, scopes=DESIGNER_ACCESS), ): @@ -351,7 +352,7 @@ async def knowledge_vault_sync( """ data = [{key.lower(): value for key, value in row.items()} for row in data] - error_summary = cognition_processor.validate_data(primary_key_col.lower(), collection_name.lower(), event_type.lower(), data, current_user.get_bot()) + error_summary = cognition_processor.validate_data(primary_key_col.lower(), collection_name.lower(), sync_type.lower(), data, current_user.get_bot()) if error_summary: return Response( @@ -361,11 +362,34 @@ async def knowledge_vault_sync( error_code=400 ) - await cognition_processor.upsert_data(primary_key_col.lower(), collection_name.lower(), event_type.lower(), data, + await cognition_processor.upsert_data(primary_key_col.lower(), collection_name.lower(), sync_type.lower(), data, current_user.get_bot(), current_user.get_user()) return Response( success=True, message="Processing completed successfully", data=None - ) \ No newline at end of file + ) + + +@router.post("/integrations/add", response_model=Response) +async def add_pos_integration_config( + request_data: POSIntegrationRequest, + sync_type: str, + current_user: User = Security(Authentication.get_current_user_and_bot, scopes=DESIGNER_ACCESS), +): + """ + Add data integration config + """ + CognitionDataProcessor.load_catalog_provider_mappings() + + if request_data.provider not in CatalogSyncClass.__members__.values(): + raise AppException("Invalid Provider") + + CognitionDataProcessor.add_bot_sync_config(request_data, current_user.get_bot(), current_user.get_user()) + + integration_endpoint = cognition_processor.save_pos_integration_config( + request_data.dict(), current_user.get_bot(), current_user.get_user(), sync_type + ) + + return Response(message='POS Integration Complete', data=integration_endpoint) \ No newline at end of file diff --git a/kairon/api/app/routers/bot/integrations.py b/kairon/api/app/routers/bot/integrations.py new file mode 100644 index 000000000..5b91652e0 --- /dev/null +++ b/kairon/api/app/routers/bot/integrations.py @@ -0,0 +1,85 @@ +from typing import Text + +from fastapi import Security, APIRouter, Path +from starlette.requests import Request + +from kairon.api.models import Response +from kairon.events.definitions.catalog_sync import CatalogSync +from kairon.exceptions import AppException +from kairon.shared.auth import Authentication +from kairon.shared.catalog_sync.data_objects import CatalogSyncLogs +from kairon.shared.cognition.processor import CognitionDataProcessor +from kairon.shared.constants import CatalogProvider +from kairon.shared.constants import DESIGNER_ACCESS +from kairon.shared.models import User +from kairon.shared.utils import MailUtility + +router = APIRouter() +cognition_processor = CognitionDataProcessor() + +@router.post("/{provider}/{sync_type}/{bot}/{token}", response_model=Response) +async def sync_data( + request: Request, + provider: CatalogProvider = Path(description="Catalog provider name", + examples=[CatalogProvider.PETPOOJA.value]), + bot: Text = Path(description="Bot id"), + sync_type: Text = Path(description="Sync Type"), + current_user: User = Security(Authentication.get_current_user_and_bot, scopes=DESIGNER_ACCESS), + token: str = Path(description="JWT token for authentication"), +): + """ + Handles incoming data from catalog_sync (e.g., Petpooja) for processing, validation, and eventual storage. + """ + + request_body = await request.json() + + event = CatalogSync( + bot=bot, + user=current_user.get_user(), + provider=provider, + sync_type=sync_type, + token=token + ) + + is_event_data = await event.validate(request_body=request_body) + if is_event_data is True: + event.enqueue() + return {"message": "Sync in progress! Check logs."} + else: + raise AppException(is_event_data) + + + +@router.post("/{provider}/{sync_type}/{bot}/{token}/{execution_id}", response_model=Response) +async def rerun_sync( + provider: CatalogProvider = Path(description="Catalog provider name", + examples=[CatalogProvider.PETPOOJA.value]), + bot: Text = Path(description="Bot id"), + sync_type: Text = Path(description="Sync Type"), + current_user: User = Security(Authentication.get_current_user_and_bot, scopes=DESIGNER_ACCESS), + token: str = Path(description="JWT token for authentication"), + execution_id: str = Path(description="Execution id"), +): + """ + Handles incoming data from catalog_sync (e.g., Petpooja) for processing, validation, and eventual storage. + """ + sync_log_entry = CatalogSyncLogs.objects(execution_id=execution_id).first() + if not sync_log_entry: + raise AppException(f"Sync log with execution ID {execution_id} not found.") + + request_body = sync_log_entry.raw_payload + + event = CatalogSync( + bot=bot, + user=current_user.get_user(), + provider=provider, + sync_type=sync_type, + token=token + ) + + is_event_data = await event.validate(request_body=request_body) + if is_event_data is True: + event.enqueue() + return {"message": "Sync in progress! Check logs."} + else: + raise AppException(is_event_data) \ No newline at end of file diff --git a/kairon/catalog_sync/definitions/base.py b/kairon/catalog_sync/definitions/base.py new file mode 100644 index 000000000..da3c6acc9 --- /dev/null +++ b/kairon/catalog_sync/definitions/base.py @@ -0,0 +1,18 @@ +from abc import abstractmethod + + +class CatalogSyncBase: + + """Base class to create events""" + + @abstractmethod + def validate(self): + raise NotImplementedError("Provider not implemented") + + @abstractmethod + def preprocess(self): + raise NotImplementedError("Provider not implemented") + + @abstractmethod + def execute(self, **kwargs): + raise NotImplementedError("Provider not implemented") \ No newline at end of file diff --git a/kairon/catalog_sync/definitions/factory.py b/kairon/catalog_sync/definitions/factory.py new file mode 100644 index 000000000..db2ca31f4 --- /dev/null +++ b/kairon/catalog_sync/definitions/factory.py @@ -0,0 +1,29 @@ +from kairon.events.definitions.petpooja_sync import PetpoojaSync +from kairon.exceptions import AppException +from kairon.shared.constants import CatalogSyncClass + + +class CatalogSyncFactory: + + __provider_implementations = { + CatalogSyncClass.petpooja: PetpoojaSync, + } + + @staticmethod + def get_instance(provider: str): + """ + Factory to retrieve catalog provider implementation for execution. + :param provider: catalog provider name (e.g., "petpooja") + :return: Corresponding Sync class + """ + try: + provider_enum = CatalogSyncClass(provider.lower()) + except ValueError: + valid_syncs = [sync.value for sync in CatalogSyncClass] + raise AppException(f"'{provider}' is not a valid catalog sync provider. Accepted types: {valid_syncs}") + + sync_class = CatalogSyncFactory.__provider_implementations.get(provider_enum) + if not sync_class: + raise AppException(f"No implementation found for provider '{provider}'.") + + return sync_class \ No newline at end of file diff --git a/kairon/events/definitions/catalog_sync.py b/kairon/events/definitions/catalog_sync.py new file mode 100644 index 000000000..942ccb236 --- /dev/null +++ b/kairon/events/definitions/catalog_sync.py @@ -0,0 +1,73 @@ +from typing import Text +from kairon import Utility +from loguru import logger + +from kairon.catalog_sync.definitions.factory import CatalogSyncFactory +from kairon.events.definitions.base import EventsBase +from kairon.shared.account.processor import AccountProcessor +from kairon.shared.constants import EventClass +from kairon.shared.data.constant import SyncType, SYNC_STATUS +from kairon.shared.catalog_sync.catalog_sync_log_processor import CatalogSyncLogProcessor + + +class CatalogSync(EventsBase): + """ + Validates and processes data from catalog (e.g., Petpooja) before importing it + to knowledge vault and meta + """ + + def __init__(self, bot: Text, user: Text, provider: Text, **kwargs): + """ + Initialise event. + """ + sync_class = CatalogSyncFactory.get_instance(provider) + self.catalog_sync = sync_class( + bot=bot, + user=user, + provider=provider, + sync_type=kwargs.get("sync_type", SyncType.item_toggle), + token=kwargs.get("token", "") + ) + self.catalog_sync.data = [] + + async def validate(self, **kwargs): + """ + Validates if an event is already running for that particular bot and + checks if the event trigger limit has been exceeded. + Then, preprocesses the received request + """ + request = kwargs.get("request_body") + self.catalog_sync.data = request + is_event_data = await self.catalog_sync.validate(request_body = request) + return is_event_data + + def enqueue(self, **kwargs): + """ + Send event to event server + """ + try: + payload = { + 'bot': self.catalog_sync.bot, + 'user': self.catalog_sync.user, + 'provider': self.catalog_sync.provider, + 'sync_type': self.catalog_sync.sync_type, + 'token': self.catalog_sync.token, + 'data': self.catalog_sync.data + } + CatalogSyncLogProcessor.add_log(self.catalog_sync.bot, self.catalog_sync.user, self.catalog_sync.provider, self.catalog_sync.sync_type, sync_status=SYNC_STATUS.ENQUEUED.value) + Utility.request_event_server(EventClass.catalog_integration, payload) + except Exception as e: + CatalogSyncLogProcessor.delete_enqueued_event_log(self.catalog_sync.bot) + raise e + + async def execute(self, **kwargs): + """ + Execute the document content import event. + """ + AccountProcessor.load_system_properties() + self.catalog_sync.data = kwargs.get("data", []) + try: + initiate_import, stale_primary_keys= await self.catalog_sync.preprocess(request_body=self.catalog_sync.data) + await self.catalog_sync.execute(data=self.catalog_sync.data, initiate_import = initiate_import,stale_primary_keys = stale_primary_keys) + except Exception as e: + logger.error(str(e)) \ No newline at end of file diff --git a/kairon/events/definitions/factory.py b/kairon/events/definitions/factory.py index 31176f0cd..80c374c0c 100644 --- a/kairon/events/definitions/factory.py +++ b/kairon/events/definitions/factory.py @@ -1,4 +1,5 @@ from kairon.events.definitions.agentic_flow import AgenticFlowEvent +from kairon.events.definitions.catalog_sync import CatalogSync from kairon.events.definitions.content_importer import DocContentImporterEvent from kairon.events.definitions.data_importer import TrainingDataImporterEvent from kairon.events.definitions.faq_importer import FaqDataImporterEvent @@ -24,7 +25,8 @@ class EventFactory: EventClass.message_broadcast: MessageBroadcastEvent, EventClass.content_importer: DocContentImporterEvent, EventClass.mail_channel_read_mails: MailReadEvent, - EventClass.agentic_flow: AgenticFlowEvent + EventClass.agentic_flow: AgenticFlowEvent, + EventClass.catalog_integration: CatalogSync } @staticmethod diff --git a/kairon/events/definitions/petpooja_sync.py b/kairon/events/definitions/petpooja_sync.py new file mode 100644 index 000000000..9b885c0b0 --- /dev/null +++ b/kairon/events/definitions/petpooja_sync.py @@ -0,0 +1,197 @@ +from typing import Text + +from dotenv import set_key + +from kairon import Utility +from loguru import logger + +from kairon.catalog_sync.definitions.base import CatalogSyncBase +from kairon.exceptions import AppException +from kairon.meta.processor import MetaProcessor +from kairon.shared.cognition.processor import CognitionDataProcessor +from kairon.shared.constants import EventClass +from kairon.shared.data.constant import SyncType, SYNC_STATUS +from kairon.shared.data.data_objects import POSIntegrations, BotSyncConfig +from kairon.shared.catalog_sync.catalog_sync_log_processor import CatalogSyncLogProcessor +from kairon.shared.utils import MailUtility + + +class PetpoojaSync(CatalogSyncBase): + """ + Validates and processes data from catalog (e.g., Petpooja) before importing it + to knowledge vault and meta + """ + + def __init__(self, bot: Text, user: Text, provider: Text, **kwargs): + """ + Initialise event. + """ + self.bot = bot + self.user = user + self.provider = provider + self.token = kwargs.get("token", "") + self.sync_type = kwargs.get("sync_type", SyncType.item_toggle) + self.data = [] + + async def validate(self, **kwargs): + """ + Validates if an event is already running for that particular bot and + checks if the event trigger limit has been exceeded. + Then, preprocesses the received request + """ + try: + request = kwargs.get("request_body") + CatalogSyncLogProcessor.is_sync_in_progress(self.bot) + CatalogSyncLogProcessor.is_limit_exceeded(self.bot) + CatalogSyncLogProcessor.add_log(self.bot, self.user, self.provider, self.sync_type, + sync_status=SYNC_STATUS.INITIATED.value, raw_payload=request) + + CatalogSyncLogProcessor.is_sync_type_allowed(self.bot, self.sync_type) + if not CatalogSyncLogProcessor.is_catalog_collection_exists(self.bot) and CatalogSyncLogProcessor.is_ai_enabled(self.bot): + CatalogSyncLogProcessor.create_catalog_collection(bot=self.bot, user=self.user) + CatalogSyncLogProcessor.add_log(self.bot, self.user, sync_status=SYNC_STATUS.VALIDATING_REQUEST) + if self.sync_type == SyncType.push_menu: + CatalogSyncLogProcessor.validate_item_ids(request) + CatalogSyncLogProcessor.validate_item_fields(self.bot, request, self.provider) + CatalogSyncLogProcessor.validate_image_configurations(self.bot, self.user) + else: + CatalogSyncLogProcessor.validate_item_toggle_request(request) + return True + except Exception as e: + execution_id = CatalogSyncLogProcessor.get_execution_id_for_bot(self.bot) + await MailUtility.format_and_send_mail( + mail_type="catalog_sync_status", email="himanshu.gupta@nimblework.com", bot = self.bot, executionID = execution_id, + sync_status=SYNC_STATUS.VALIDATING_FAILED, message = str(e), first_name = "HG" + ) + CatalogSyncLogProcessor.add_log(self.bot, self.user, sync_status=SYNC_STATUS.FAILED.value, + exception=str(e), + status="Failure") + return str(e) + + async def preprocess(self, **kwargs): + """ + Transform and preprocess incoming payload data into `self.data` + for catalog sync and meta sync. + """ + sync_status = SYNC_STATUS.VALIDATING_REQUEST_SUCCESS + try: + cognition_processor = CognitionDataProcessor() + sync_status=SYNC_STATUS.PREPROCESSING + CatalogSyncLogProcessor.add_log(self.bot, self.user, sync_status=sync_status) + request = kwargs.get("request_body") + if self.sync_type == SyncType.push_menu: + self.data = cognition_processor.preprocess_push_menu_data(self.bot, request, self.provider) + else: + self.data = cognition_processor.preprocess_item_toggle_data(self.bot, request, self.provider) + sync_status = SYNC_STATUS.PREPROCESSING_COMPLETED + CatalogSyncLogProcessor.add_log(self.bot, self.user, sync_status=sync_status, processed_payload= self.data) + stale_primary_keys = CognitionDataProcessor.save_ai_data(self.data, self.bot, self.user, self.sync_type) + initiate_import = True + if CatalogSyncLogProcessor.is_ai_enabled(self.bot): + restaurant_name, branch_name = CognitionDataProcessor.get_restaurant_and_branch_name(self.bot) + catalog_name = f"{restaurant_name}_{branch_name}_catalog" + sync_status = SYNC_STATUS.VALIDATING_KNOWLEDGE_VAULT_DATA + CatalogSyncLogProcessor.add_log(self.bot, self.user, sync_status=sync_status) + error_summary = cognition_processor.validate_data("id", catalog_name, + self.sync_type.lower(), self.data.get("kv", []), self.bot) + if error_summary: + initiate_import = False + sync_status = SYNC_STATUS.SAVE.value + CatalogSyncLogProcessor.add_log(self.bot, self.user, validation_errors=error_summary, + sync_status=sync_status, status="Failure") + return initiate_import, stale_primary_keys + except Exception as e: + execution_id = CatalogSyncLogProcessor.get_execution_id_for_bot(self.bot) + await MailUtility.format_and_send_mail( + mail_type="catalog_sync_status", email="himanshu.gupta@nimblework.com", bot=self.bot, + executionID=execution_id, + sync_status=sync_status, message=str(e), first_name="HG" + ) + CatalogSyncLogProcessor.add_log(self.bot, self.user, sync_status=SYNC_STATUS.FAILED.value, + exception=str(e), + status="Failure") + return None + + + async def execute(self, **kwargs): + """ + Execute the document content import event. + """ + self.data = kwargs.get("data", {}) + cognition_processor = CognitionDataProcessor() + initiate_import = kwargs.get("initiate_import", False) + stale_primary_keys = kwargs.get("stale_primary_keys") + status = "Failure" + sync_status = SYNC_STATUS.PREPROCESSING_COMPLETED + try: + knowledge_vault_data = self.data.get("kv", []) + restaurant_name, branch_name = CognitionDataProcessor.get_restaurant_and_branch_name(self.bot) + catalog_name = f"{restaurant_name}_{branch_name}_catalog" + + if not CatalogSyncLogProcessor.is_ai_enabled(self.bot) and not CatalogSyncLogProcessor.is_meta_enabled(self.bot): + CatalogSyncLogProcessor.add_log(self.bot, self.user, + exception="Sync to knowledge vault and Meta is not allowed for this bot. Contact Support!!", + status="Success") + raise Exception("Sync to knowledge vault and Meta is not allowed for this bot. Contact Support!!") + + if initiate_import and CatalogSyncLogProcessor.is_ai_enabled(self.bot): + result = await cognition_processor.upsert_data("id", catalog_name, + self.sync_type.lower(), knowledge_vault_data, + self.bot, self.user) + stale_primary_keys = result.get("stale_ids") + else: + sync_status = SYNC_STATUS.COMPLETED.value + CatalogSyncLogProcessor.add_log(self.bot, self.user, + exception="Sync to knowledge vault is not allowed for this bot. Contact Support!!", + status="Success") + + if not CatalogSyncLogProcessor.is_meta_enabled(self.bot): + sync_status = SYNC_STATUS.COMPLETED.value + CatalogSyncLogProcessor.add_log(self.bot, self.user, + exception="Sync to Meta is not allowed for this bot. Contact Support!!", + status="Success") + + integrations_doc = POSIntegrations.objects(bot=self.bot, provider=self.provider, + sync_type=self.sync_type).first() + if integrations_doc and 'meta_config' in integrations_doc: + sync_status=SYNC_STATUS.SAVE_META.value + CatalogSyncLogProcessor.add_log(self.bot, self.user, sync_status=sync_status) + meta_processor = MetaProcessor(integrations_doc.meta_config.get('access_token'), + integrations_doc.meta_config.get('catalog_id')) + + meta_payload = self.data.get("meta", []) + if self.sync_type == SyncType.push_menu: + meta_processor.preprocess_data(self.bot, meta_payload, "CREATE", self.provider) + await meta_processor.push_meta_catalog() + if stale_primary_keys: + delete_payload = meta_processor.preprocess_delete_data(stale_primary_keys) + await meta_processor.delete_meta_catalog(delete_payload) + status = "Success" + else: + meta_processor.preprocess_data(self.bot, meta_payload, "UPDATE", self.provider) + await meta_processor.update_meta_catalog() + status = "Success" + execution_id = CatalogSyncLogProcessor.get_execution_id_for_bot(self.bot) + sync_status=SYNC_STATUS.COMPLETED.value + await MailUtility.format_and_send_mail( + mail_type="catalog_sync_status", email="himanshu.gupta@nimblework.com", bot=self.bot, + executionID=execution_id, + sync_status=sync_status, message="Catalog has been synced successfully", first_name="HG" + ) + CatalogSyncLogProcessor.add_log(self.bot, self.user, sync_status=sync_status, status=status) + except Exception as e: + print(str(e)) + execution_id = CatalogSyncLogProcessor.get_execution_id_for_bot(self.bot) + await MailUtility.format_and_send_mail( + mail_type="catalog_sync_status", email="himanshu.gupta@nimblework.com", bot=self.bot, + executionID=execution_id, + sync_status=sync_status, message=str(e), first_name="HG" + ) + if not CatalogSyncLogProcessor.is_meta_enabled(self.bot) and not CatalogSyncLogProcessor.is_ai_enabled(self.bot): + CatalogSyncLogProcessor.add_log(self.bot, self.user, + sync_status=SYNC_STATUS.COMPLETED.value) + else: + CatalogSyncLogProcessor.add_log(self.bot, self.user, + exception=str(e), + status="Failure", + sync_status=sync_status) \ No newline at end of file diff --git a/kairon/meta/processor.py b/kairon/meta/processor.py new file mode 100644 index 000000000..7073e790d --- /dev/null +++ b/kairon/meta/processor.py @@ -0,0 +1,122 @@ +import asyncio +import json +from typing import Text, List +from urllib.parse import urljoin +from loguru import logger +import requests +from kairon import Utility +from kairon.shared.catalog_sync.data_objects import CatalogProviderMapping +from kairon.shared.rest_client import AioRestClient +from urllib.parse import quote + + +class MetaProcessor: + + def __init__(self, access_token: Text, catalog_id:Text): + self.catalog_id = catalog_id + self.access_token = access_token + self.headers = {} + self.processed_data = [] + + def preprocess_data(self,bot: Text, data: List[dict], method: Text, provider: str): + doc = CatalogProviderMapping.objects(provider=provider).first() + if not doc: + raise Exception(f"Metadata mappings not found for provider={provider}") + + meta_fields = list(doc.meta_mappings.keys()) + + for item in data: + transformed_item = {"retailer_id": item["id"]} + + if method == "UPDATE": + transformed_item["data"] = {} + for field in meta_fields: + if field in item: + value = int(item[field]) if field == "price" else item[field] + transformed_item["data"][field] = value + + else: + transformed_item["data"] = {"currency": "INR"} + for field in meta_fields: + if field in item: + value = int(item[field]) if field == "price" else item[field] + transformed_item["data"][field] = value + + transformed_item["method"] = method + transformed_item["item_type"] = "PRODUCT_ITEM" + self.processed_data.append(transformed_item) + + return self.processed_data + + def preprocess_delete_data(self, remaining_ids: List): + """ + Creates a payload for deleting stale records from the catalog. + Args: + remaining_ids: List of primary keys that need to be deleted. + Returns: + Dict: Payload containing the list of delete operations. + """ + return [{"retailer_id": id, "method": "DELETE"} for id in remaining_ids] + + async def push_meta_catalog(self): + """ + Sync the data to meta when event type is 'push_menu' + """ + try: + req = quote(json.dumps(self.processed_data)) + base_url = f"https://graph.facebook.com/v21.0/{self.catalog_id}/batch" + url = f"{base_url}?item_type=PRODUCT_ITEM&requests={req}" + + data = { + "access_token": self.access_token, + } + + response = await asyncio.to_thread(requests.post, url, headers={}, data=data) + response.raise_for_status() + print("Response JSON:", response.json()) + print("Successfully synced product items to Meta catalog(Push Menu)") + except Exception as e: + logger.exception(f"Error syncing product items to Meta catalog for push menu: {str(e)}") + raise e + + async def update_meta_catalog(self): + """ + Sync the data to meta when event type is 'push_menu' + """ + try: + req = quote(json.dumps(self.processed_data)) + base_url = f"https://graph.facebook.com/v21.0/{self.catalog_id}/batch" + url = f"{base_url}?item_type=PRODUCT_ITEM&requests={req}" + + data = { + "access_token": self.access_token, + } + + response = await asyncio.to_thread(requests.post, url, headers={}, data=data) + response.raise_for_status() + print("Response JSON:", response.json()) + print("Successfully synced product items to Meta catalog(Item Toggle)") + except Exception as e: + logger.exception(f"Error syncing product items to Meta catalog for item toggle: {str(e)}") + raise e + + + async def delete_meta_catalog(self, delete_payload: list): + """ + Sync the data to meta when event type is 'push_menu' + """ + try: + req = quote(json.dumps(delete_payload)) + base_url = "https://graph.facebook.com/v21.0/1880697869060042/batch" + url = f"{base_url}?requests={req}" + + data = { + "access_token": self.access_token, + } + response = await asyncio.to_thread(requests.post, url, headers={}, data=data) + response.raise_for_status() + print("Response JSON:", response.json()) + print("Successfully deleted data from meta.") + except Exception as e: + print(f"Error deleting data from meta: {e}") + raise e \ No newline at end of file diff --git a/kairon/shared/account/data_objects.py b/kairon/shared/account/data_objects.py index e5deb18c7..1e4cd8e83 100644 --- a/kairon/shared/account/data_objects.py +++ b/kairon/shared/account/data_objects.py @@ -155,6 +155,7 @@ class MailTemplates(EmbeddedDocument): add_trusted_device = StringField() button_template = StringField() leave_bot_owner_notification = StringField() + catalog_sync_status = StringField() class SystemProperties(Document): diff --git a/kairon/shared/account/processor.py b/kairon/shared/account/processor.py index ff9561031..168fa1a19 100644 --- a/kairon/shared/account/processor.py +++ b/kairon/shared/account/processor.py @@ -959,6 +959,7 @@ def load_system_properties(): ).read(), button_template=open("template/emails/button.html", "r").read(), leave_bot_owner_notification=open("template/emails/leaveBotOwnerNotification.html", "r").read(), + catalog_sync_status=open("template/emails/catalog_sync_status.html", "r").read(), ) system_properties = ( SystemProperties(mail_templates=mail_templates) @@ -1013,6 +1014,8 @@ def load_system_properties(): ]["button_template"] Utility.email_conf["email"]["templates"]["leave_bot_owner_notification"] = system_properties["mail_templates"][ "leave_bot_owner_notification"] + Utility.email_conf["email"]["templates"]["catalog_sync_status"] = system_properties["mail_templates"][ + "catalog_sync_status"] @staticmethod async def confirm_email(token: str): diff --git a/kairon/shared/catalog_sync/catalog_sync_log_processor.py b/kairon/shared/catalog_sync/catalog_sync_log_processor.py new file mode 100644 index 000000000..242742765 --- /dev/null +++ b/kairon/shared/catalog_sync/catalog_sync_log_processor.py @@ -0,0 +1,320 @@ +import json +from datetime import datetime +from typing import List + +from bson import ObjectId +from loguru import logger +from mongoengine import Q, DoesNotExist + +from kairon.shared.cognition.data_objects import CognitionSchema, ColumnMetadata, CollectionData +from kairon.shared.cognition.processor import CognitionDataProcessor +from kairon.shared.content_importer.data_objects import ContentValidationLogs +from kairon.shared.data.constant import SYNC_STATUS, SyncType +from kairon.shared.data.data_models import CognitionSchemaRequest +from kairon.shared.data.data_objects import BotSettings, BotSyncConfig +from kairon.shared.data.processor import MongoProcessor +from kairon.shared.catalog_sync.data_objects import CatalogSyncLogs, CatalogProviderMapping +from kairon.shared.models import CognitionMetadataType + + +class CatalogSyncLogProcessor: + """ + Log processor for content importer event. + """ + + @staticmethod + def add_log(bot: str, user: str, provider: str = None, sync_type: str = None, validation_errors: dict = None, + raw_payload: dict = None, processed_payload: dict = None, exception: str = None, status: str = None, + sync_status: str = SYNC_STATUS.INITIATED.value): + """ + Adds or updates log for content importer event. + @param bot: bot id. + @param user: kairon username. + @param provider: provider (e.g. Petpooja, Shopify etc.) + @param sync_type: sync type + @param validation_errors: Dictionary containing any validation errors encountered + @param exception: Exception occurred during event. + @param status: Validation success or failure. + @param sync_status: Event success or failed due to any error during validation or import. + @return: + """ + try: + doc = CatalogSyncLogs.objects(bot=bot).filter( + Q(sync_status__ne=SYNC_STATUS.COMPLETED.value) & + Q(sync_status__ne=SYNC_STATUS.FAILED.value)).get() + except DoesNotExist: + doc = CatalogSyncLogs( + bot=bot, + user=user, + execution_id=str(ObjectId()), + provider=provider, + raw_payload = raw_payload, + sync_type = sync_type, + start_timestamp=datetime.utcnow() + ) + doc.sync_status = sync_status + if processed_payload: + doc.processed_payload = processed_payload + if exception: + doc.exception = exception + if status: + doc.status = status + if validation_errors: + doc.validation_errors = validation_errors + if sync_status in {SYNC_STATUS.FAILED.value, SYNC_STATUS.COMPLETED.value}: + doc.end_timestamp = datetime.utcnow() + doc.save() + + @staticmethod + def is_sync_in_progress(bot: str, raise_exception=True): + """ + Checks if event is in progress. + @param bot: bot id + @param raise_exception: Raise exception if event is in progress. + @return: boolean flag. + """ + in_progress = False + try: + CatalogSyncLogs.objects(bot=bot).filter( + Q(sync_status__ne=SYNC_STATUS.COMPLETED.value) & + Q(sync_status__ne=SYNC_STATUS.FAILED.value) & + Q(sync_status__ne=SYNC_STATUS.ABORTED.value)).get() + + if raise_exception: + raise Exception("Sync already in progress! Check logs.") + in_progress = True + except DoesNotExist as e: + logger.error(e) + return in_progress + + @staticmethod + def is_limit_exceeded(bot: str, raise_exception=True): + """ + Checks if daily event triggering limit exceeded. + @param bot: bot id. + @param raise_exception: Raise exception if limit is reached. + @return: boolean flag + """ + today = datetime.today() + + today_start = today.replace(hour=0, minute=0, second=0) + doc_count = CatalogSyncLogs.objects( + bot=bot, start_timestamp__gte=today_start + ).count() + if doc_count >= BotSettings.objects(bot=bot).get().catalog_sync_limit_per_day: + if raise_exception: + raise Exception("Daily limit exceeded.") + else: + return True + else: + return False + + @staticmethod + def get_logs(bot: str, start_idx: int = 0, page_size: int = 10): + """ + Get all logs for content importer event. + @param bot: bot id. + @param start_idx: start index + @param page_size: page size + @return: list of logs. + """ + for log in CatalogSyncLogs.objects(bot=bot).order_by("-start_timestamp").skip(start_idx).limit(page_size): + log = log.to_mongo().to_dict() + log.pop('_id') + log.pop('bot') + log.pop('user') + yield log + + @staticmethod + def delete_enqueued_event_log(bot: str): + """ + Deletes latest log if it is present in enqueued state. + """ + latest_log = CatalogSyncLogs.objects(bot=bot).order_by('-id').first() + if latest_log and latest_log.sync_status == SYNC_STATUS.ENQUEUED.value: + latest_log.delete() + + @staticmethod + def is_catalog_collection_exists(bot: str) -> bool: + """ + Checks if the 'catalogue_table' exists in CognitionSchema for the given bot. + """ + restaurant_name, branch_name = CognitionDataProcessor.get_restaurant_and_branch_name(bot) + catalog_name = f"{restaurant_name}_{branch_name}_catalog" + return CognitionSchema.objects(bot=bot, collection_name=catalog_name).first() is not None + + @staticmethod + def create_catalog_collection(bot: str, user: str): + """ + Creates a 'catalogue_table' collection in CognitionSchema for the given bot with predefined metadata fields. + """ + # Define column names and their data types + cognition_processor = CognitionDataProcessor() + column_definitions = [ + ("id", CognitionMetadataType.str.value), + ("title", CognitionMetadataType.str.value), + ("description", CognitionMetadataType.str.value), + ("price", CognitionMetadataType.float.value), + ("facebook_product_category", CognitionMetadataType.str.value), + ("availability", CognitionMetadataType.str.value), + ] + + bot_settings = BotSettings.objects(bot=bot).first() + if bot_settings: + bot_settings.cognition_columns_per_collection_limit = 10 + bot_settings.llm_settings['enable_faq'] = True + bot_settings.save() + + metadata= [ + { + "column_name": col, + "data_type": data_type, + "enable_search": True, + "create_embeddings": True + } + for col, data_type in column_definitions + ] + restaurant_name, branch_name = CognitionDataProcessor.get_restaurant_and_branch_name(bot) + catalog_name = f"{restaurant_name}_{branch_name}_catalog" + catalog_schema = CognitionSchemaRequest( + collection_name=catalog_name, + metadata=metadata + ) + + metadata_id = cognition_processor.save_cognition_schema( + catalog_schema.dict(), + user, bot) + + return metadata_id + + @staticmethod + def validate_item_ids(json_data): + """ + Validates that all items have an 'itemid' and extracts a list of valid category IDs. + Raises an exception if any item is missing 'itemid'. + Returns a set of valid category IDs. + """ + for item in json_data.get("items", []): + if "itemid" not in item: + raise Exception(f"Missing 'itemid' in item: {item}") + + @staticmethod + def validate_item_toggle_request(json_data: dict) -> None: + """ + Validates that `inStock` exists and is a boolean, + and `itemID` exists in the payload. + Raises: + ValueError: If any validation fails. + """ + body = json_data.get("body", {}) + + if "inStock" not in body: + raise Exception("Missing required field: 'inStock'") + if not isinstance(body["inStock"], bool): + raise Exception("'inStock' must be a boolean (true or false)") + + if "itemID" not in body: + raise Exception("Missing required field: 'itemID'") + + @staticmethod + def validate_item_fields(bot, json_data, provider): + """ + Validates that each item has the required source fields as defined in the metadata file. + Ensures 'item_categoryid' is within valid categories. + Only runs if event_type is 'push_menu'. + """ + doc = CatalogProviderMapping.objects(provider=provider).first() + if not doc: + raise Exception(f"Metadata mappings not found and provider={provider}") + + provider_mappings = { + "meta": doc.meta_mappings, + "kv": doc.kv_mappings + } + + valid_category_ids = {cat["categoryid"] for cat in json_data.get("categories", [])} + + required_fields = set() + for system_fields in provider_mappings.values(): + for config in system_fields.values(): + source_field = config.get("source") + if source_field: + required_fields.add(source_field) + + for item in json_data.get("items", []): + missing_fields = [field for field in required_fields if field not in item] + if missing_fields: + raise Exception(f"Missing fields {missing_fields} in item: {item}") + + if "item_categoryid" in item and item["item_categoryid"] not in valid_category_ids: + raise Exception(f"Invalid 'item_categoryid' {item['item_categoryid']} in item: {item}") + + @staticmethod + def is_sync_type_allowed(bot: str, sync_type: str): + config = BotSyncConfig.objects(branch_bot=bot).first() + if not config: + raise Exception("No bot sync config found for bot") + + if sync_type == SyncType.push_menu and not config.process_push_menu: + raise Exception("Push menu processing is disabled for this bot") + + if sync_type == SyncType.item_toggle and not config.process_item_toggle: + raise Exception("Item toggle is disabled for this bot") + + + @staticmethod + def is_ai_enabled(bot: str): + config = BotSyncConfig.objects(branch_bot=bot).first() + if not config: + raise Exception("No bot sync config found for bot") + return config.ai_enabled + + @staticmethod + def is_meta_enabled(bot: str): + config = BotSyncConfig.objects(branch_bot=bot).first() + if not config: + raise Exception("No bot sync config found for bot") + return config.meta_enabled + + @staticmethod + def validate_image_configurations(bot: str, user: str): + restaurant_name, branch_name = CognitionDataProcessor.get_restaurant_and_branch_name(bot) + catalog_images_collection = f"{restaurant_name}_{branch_name}_catalog_images" + + if not CollectionData.objects(bot=bot, collection_name=catalog_images_collection).first(): + global_fallback_data = { + "image_type": "global", + "image_url":"", + "image_base64":"" + } + CollectionData( + collection_name=catalog_images_collection, + data=global_fallback_data, + user=user, + bot=bot, + status=True, + timestamp=datetime.utcnow() + ).save() + + document = CollectionData.objects( + collection_name=catalog_images_collection, + bot=bot, + data__image_type="global" + ).first() + + if not document: + raise Exception( + f"Global fallback image document not found in `{catalog_images_collection}`") + + if not document.data.get("image_url"): + raise Exception( + f"Global fallback image URL not found") + + @staticmethod + def get_execution_id_for_bot(bot: str): + doc = CatalogSyncLogs.objects(bot=bot).filter( + Q(sync_status__ne=SYNC_STATUS.COMPLETED.value) & + Q(sync_status__ne=SYNC_STATUS.FAILED.value) + ).order_by('-start_timestamp').first() + + return doc.execution_id if doc else None \ No newline at end of file diff --git a/kairon/shared/catalog_sync/data_objects.py b/kairon/shared/catalog_sync/data_objects.py new file mode 100644 index 000000000..30559532e --- /dev/null +++ b/kairon/shared/catalog_sync/data_objects.py @@ -0,0 +1,35 @@ +from datetime import datetime + +from mongoengine import StringField, BooleanField, DateTimeField, DynamicDocument, DictField, ListField +from kairon.shared.data.signals import push_notification, auditlogger + + +@auditlogger.log +@push_notification.apply +class CatalogSyncLogs(DynamicDocument): + execution_id = StringField(required=True, unique=True) + raw_payload = DictField(required=True) + processed_payload = DictField(default=None) + validation_errors = DictField(default={}) + exception = StringField(default="") + bot = StringField(required=True) + user = StringField(required=True) + provider = StringField(required=True) + sync_type = StringField(required=True) + start_timestamp = DateTimeField(default=datetime.utcnow) + end_timestamp = DateTimeField(default=None) + status = StringField(default=None) + sync_status = StringField(default="COMPLETED") + + meta = {"indexes": [{"fields": ["bot", "event_id", ("bot", "event_status", "-start_timestamp")]}]} + + +@auditlogger.log +@push_notification.apply +class CatalogProviderMapping(DynamicDocument): + """ + Stores field mappings (meta and kv) for each bot and provider combination. + """ + provider = StringField(required=True) + meta_mappings = DictField(default=dict) + kv_mappings = DictField(default=dict) diff --git a/kairon/shared/cognition/processor.py b/kairon/shared/cognition/processor.py index 75e168133..75db7faf9 100644 --- a/kairon/shared/cognition/processor.py +++ b/kairon/shared/cognition/processor.py @@ -1,17 +1,26 @@ +import json from datetime import datetime +from pathlib import Path from typing import Text, Dict, Any, List from loguru import logger from mongoengine import DoesNotExist, Q from pydantic import constr, create_model, ValidationError +from pymongo import UpdateOne from kairon import Utility from kairon.exceptions import AppException from kairon.shared.actions.data_objects import PromptAction, DatabaseAction +from kairon.shared.catalog_sync.data_objects import CatalogProviderMapping from kairon.shared.cognition.data_objects import CognitionData, CognitionSchema, ColumnMetadata, CollectionData -from kairon.shared.data.constant import DEFAULT_LLM +from kairon.shared.constants import CatalogSyncClass +from kairon.shared.data.constant import DEFAULT_LLM, SyncType +from kairon.shared.data.data_objects import BotSyncConfig, POSIntegrations from kairon.shared.data.processor import MongoProcessor -from kairon.shared.models import CognitionDataType, CognitionMetadataType, VaultSyncEventType +from kairon.shared.data.utils import DataUtility +from kairon.shared.models import CognitionDataType, CognitionMetadataType, VaultSyncType +from tqdm import tqdm +import uuid class CognitionDataProcessor: @@ -475,22 +484,22 @@ def get_pydantic_type(data_type: str): else: raise ValueError(f"Unsupported data type: {data_type}") - def validate_data(self, primary_key_col: str, collection_name: str, event_type: str, data: List[Dict], bot: str) -> Dict: + def validate_data(self, primary_key_col: str, collection_name: str, sync_type: str, data: List[Dict], bot: str) -> Dict: """ Validates each dictionary in the data list according to the expected schema from column_dict. Args: data: List of dictionaries where each dictionary represents a row to be validated. collection_name: The name of the collection (table name). - event_type: The type of the event being validated. + sync_type: The type of the event being validated. bot: The bot identifier. primary_key_col: The primary key column for identifying rows. Returns: Dict: Summary of validation errors, if any. """ - self._validate_event_type(event_type) - event_validations = VaultSyncEventType[event_type].value + self._validate_sync_type(sync_type) + event_validations = VaultSyncType[sync_type].value self._validate_collection_exists(collection_name) column_dict = MongoProcessor().get_column_datatype_dict(bot, collection_name) @@ -518,10 +527,9 @@ def validate_data(self, primary_key_col: str, collection_name: str, event_type: "expected_columns": list(column_dict.keys()), "actual_columns": list(row.keys()) }) - if "invalid_columns" in event_validations: expected_columns = list(column_dict.keys()) - if event_type == "field_update": + if sync_type == VaultSyncType.item_toggle.name: expected_columns = [primary_key_col + " + any from " + str([col for col in column_dict.keys() if col != primary_key_col])] if not set(row.keys()).issubset(set(column_dict.keys())): row_errors.append({ @@ -567,19 +575,244 @@ def validate_data(self, primary_key_col: str, collection_name: str, event_type: return error_summary - async def upsert_data(self, primary_key_col: str, collection_name: str, event_type: str, data: List[Dict], bot: str, user: Text): + # async def upsert_data(self, primary_key_col: str, collection_name: str, sync_type: str, data: List[Dict], bot: str, user: Text): + # """ + # Upserts data into the CognitionData collection. + # If document with the primary key exists, it will be updated. + # If not, it will be inserted. + # + # Args: + # primary_key_col: The primary key column name to check for uniqueness. + # collection_name: The collection name (table). + # sync_type: The type of the event being upserted + # data: List of rows of data to upsert. + # bot: The bot identifier associated with the data. + # user: The user + # """ + # + # from kairon.shared.llm.processor import LLMProcessor + # llm_processor = LLMProcessor(bot, DEFAULT_LLM) + # suffix = "_faq_embd" + # qdrant_collection = f"{bot}_{collection_name}{suffix}" if collection_name else f"{bot}{suffix}" + # + # if await llm_processor.__collection_exists__(qdrant_collection) is False: + # await llm_processor.__create_collection__(qdrant_collection) + # + # existing_documents = CognitionData.objects(bot=bot, collection=collection_name).as_pymongo() + # + # existing_document_map = { + # doc["data"].get(primary_key_col): doc for doc in existing_documents + # } + # + # for row in data: + # primary_key_value = row.get(primary_key_col) + # + # existing_document = existing_document_map.get(primary_key_value) + # + # if sync_type == "item_toggle" and existing_document: + # existing_data = existing_document.get("data", {}) + # merged_data = {**existing_data, **row} + # logger.debug(f"Merged row for {primary_key_col} {primary_key_value}: {merged_data}") + # else: + # merged_data = row + # + # payload = { + # "data": merged_data, + # "content_type": CognitionDataType.json.value, + # "collection": collection_name + # } + # + # if existing_document: + # row_id = str(existing_document["_id"]) + # self.update_cognition_data(row_id, payload, user, bot) + # updated_document = CognitionData.objects(id=row_id).first() + # if not isinstance(updated_document, dict): + # updated_document = updated_document.to_mongo().to_dict() + # logger.info(f"Row with {primary_key_col}: {primary_key_value} updated in MongoDB") + # await self.sync_with_qdrant(llm_processor, qdrant_collection, bot, updated_document, user, + # primary_key_col) + # else: + # row_id = self.save_cognition_data(payload, user, bot) + # new_document = CognitionData.objects(id=row_id).first() + # if not isinstance(new_document, dict): + # new_document = new_document.to_mongo().to_dict() + # logger.info(f"Row with {primary_key_col}: {primary_key_value} inserted in MongoDB") + # await self.sync_with_qdrant(llm_processor, qdrant_collection, bot, new_document, user, primary_key_col) + # + # return {"message": "Upsert complete!"} + + async def sync_with_qdrant(self, llm_processor, collection_name, bot, document, user, primary_key_col): """ - Upserts data into the CognitionData collection. - If document with the primary key exists, it will be updated. - If not, it will be inserted. + Syncs a document with Qdrant vector database by generating embeddings and upserting them. + + Args: + llm_processor (LLMProcessor): Instance of LLMProcessor for embedding and Qdrant operations. + collection_name (str): Name of the Qdrant collection. + bot (str): Bot identifier. + document (CognitionData): Document to sync with Qdrant. + user (Text): User performing the operation. + + Raises: + AppException: If Qdrant upsert operation fails. + """ + try: + metadata = self.find_matching_metadata(bot, document['data'], document.get('collection')) + search_payload, embedding_payload = Utility.retrieve_search_payload_and_embedding_payload( + document['data'], metadata) + embeddings = await llm_processor.get_embedding(embedding_payload, user, invocation='knowledge_vault_sync') + points = [{'id': document['vector_id'], 'vector': embeddings, 'payload': search_payload}] + await llm_processor.__collection_upsert__(collection_name, {'points': points}, + err_msg="Unable to train FAQ! Contact support") + logger.info(f"Row with {primary_key_col}: {document['data'].get(primary_key_col)} upserted in Qdrant.") + except Exception as e: + raise AppException(f"Failed to sync document with Qdrant: {str(e)}") + + def _validate_sync_type(self, sync_type: str): + if sync_type not in VaultSyncType.__members__.keys(): + raise AppException("Sync type does not exist") + + def _validate_collection_exists(self, collection_name: str): + if not CognitionSchema.objects(collection_name=collection_name).first(): + raise AppException(f"Collection '{collection_name}' does not exist.") + + + def save_pos_integration_config(self, configuration: Dict, bot: Text, user: Text, sync_type: Text = None): + """ + save or updates data integration configuration + :param configuration: config dict + :param bot: bot id + :param user: user id + :param sync_type: event type + :return: None + """ + self._validate_sync_type(sync_type) + try: + integration = POSIntegrations.objects(bot= bot, provider = configuration['provider'], sync_type = sync_type).get() + integration.config = configuration['config'] + integration.meta_config = configuration['meta_config'] + except DoesNotExist: + integration = POSIntegrations(**configuration) + integration.bot = bot + integration.user = user + integration.sync_type = sync_type + integration.timestamp = datetime.utcnow() + + if 'meta_config' in configuration: + integration.meta_config = configuration['meta_config'] + + integration.save() + integration_endpoint = DataUtility.get_integration_endpoint(integration) + return integration_endpoint + + + @staticmethod + def preprocess_push_menu_data(bot, json_data, provider): + """ + Preprocess the JSON data received from Petpooja to extract relevant fields for knowledge base or meta synchronization. + Handles different event types ("push_menu" vs others) and uses metadata to drive the field extraction and defaulting. + """ + doc = CatalogProviderMapping.objects(provider=provider).first() + if not doc: + raise Exception(f"Metadata mappings not found for provider={provider}") + + category_map = { + cat["categoryid"]: cat["categoryname"] + for cat in json_data.get("categories", []) + } + + provider_mappings = { + "meta": doc.meta_mappings, + "kv": doc.kv_mappings + } + + data = {sync_target: [] for sync_target in provider_mappings} + for item in json_data.get("items", []): + for sync_target, fields in provider_mappings.items(): + transformed_item = {"id": item["itemid"]} + + for target_field, field_config in fields.items(): + source_key = field_config.get("source") + default_value = field_config.get("default") + value = item.get(source_key) if source_key else None + + if target_field == "availability": + value = "in stock" if int(value or 0) > 0 else default_value + elif target_field == "facebook_product_category": + category_id = value or "" + value = f"Food and drink > {category_map.get(category_id, 'General')}" + elif target_field == "image_url": + value = CognitionDataProcessor.resolve_image_link(bot, item["itemid"]) + elif target_field == "price": + value = float(value) + if not value: + value = default_value + + transformed_item[target_field] = value + + data[sync_target].append(transformed_item) + + return data + + @staticmethod + def preprocess_item_toggle_data(bot, json_data, provider): + doc = CatalogProviderMapping.objects(provider=provider).first() + if not doc: + raise Exception(f"Metadata mappings not found for provider={provider}") + + provider_mappings = { + "meta": doc.meta_mappings, + "kv": doc.kv_mappings + } + + in_stock = json_data["body"]["inStock"] + item_ids = json_data["body"]["itemID"] + availability = "in stock" if in_stock else "out of stock" + processed_data = [{"id": item_id, "availability": availability} for item_id in item_ids] + + data = {sync_target: processed_data for sync_target in provider_mappings} + + return data + + + @staticmethod + def resolve_image_link(bot: str, item_id: str): + restaurant_name, branch_name = CognitionDataProcessor.get_restaurant_and_branch_name(bot) + catalog_images_collection = f"{restaurant_name}_{branch_name}_catalog_images" + + document = CollectionData.objects( + collection_name=catalog_images_collection, + data__item_id=int(item_id), + data__image_type = "local" + ).first() + + if not document: + document = CollectionData.objects( + collection_name=catalog_images_collection, + bot=bot, + data__image_type="global" + ).first() + + if document: + data = document.data or {} + image_link = data.get("image_url") + + if image_link: + return image_link + else: + raise Exception(f"Image URL not found for {item_id} in {catalog_images_collection}") + + async def upsert_data(self, primary_key_col: str, collection_name: str, sync_type: str, data: List[Dict], bot: str, + user: Text): + """ + Upserts data into the CognitionData collection in batches and syncs embeddings with Qdrant. Args: primary_key_col: The primary key column name to check for uniqueness. collection_name: The collection name (table). - event_type: The type of the event being upserted + sync_type: The type of the event being upserted. data: List of rows of data to upsert. bot: The bot identifier associated with the data. - user: The user + user: The user. """ from kairon.shared.llm.processor import LLMProcessor @@ -587,7 +820,7 @@ async def upsert_data(self, primary_key_col: str, collection_name: str, event_ty suffix = "_faq_embd" qdrant_collection = f"{bot}_{collection_name}{suffix}" if collection_name else f"{bot}{suffix}" - if await llm_processor.__collection_exists__(qdrant_collection) is False: + if not await llm_processor.__collection_exists__(qdrant_collection): await llm_processor.__create_collection__(qdrant_collection) existing_documents = CognitionData.objects(bot=bot, collection=collection_name).as_pymongo() @@ -596,73 +829,213 @@ async def upsert_data(self, primary_key_col: str, collection_name: str, event_ty doc["data"].get(primary_key_col): doc for doc in existing_documents } - for row in data: - primary_key_value = row.get(primary_key_col) + processed_keys = set() - existing_document = existing_document_map.get(primary_key_value) + update_operations = [] + insert_operations = [] - if event_type == "field_update" and existing_document: - existing_data = existing_document.get("data", {}) - merged_data = {**existing_data, **row} - logger.debug(f"Merged row for {primary_key_col} {primary_key_value}: {merged_data}") - else: - merged_data = row + embedding_payloads = [] + search_payloads = [] + vector_ids = [] - payload = { - "data": merged_data, - "content_type": CognitionDataType.json.value, - "collection": collection_name - } - - if existing_document: - row_id = str(existing_document["_id"]) - self.update_cognition_data(row_id, payload, user, bot) - updated_document = CognitionData.objects(id=row_id).first() - if not isinstance(updated_document, dict): - updated_document = updated_document.to_mongo().to_dict() - logger.info(f"Row with {primary_key_col}: {primary_key_value} updated in MongoDB") - await self.sync_with_qdrant(llm_processor, qdrant_collection, bot, updated_document, user, - primary_key_col) - else: - row_id = self.save_cognition_data(payload, user, bot) - new_document = CognitionData.objects(id=row_id).first() - if not isinstance(new_document, dict): - new_document = new_document.to_mongo().to_dict() - logger.info(f"Row with {primary_key_col}: {primary_key_value} inserted in MongoDB") - await self.sync_with_qdrant(llm_processor, qdrant_collection, bot, new_document, user, primary_key_col) + batch_size = 50 + for i in tqdm(range(0, len(data), batch_size), desc="Syncing Knowledge Vault"): + batch_contents = data[i:i + batch_size] - return {"message": "Upsert complete!"} + for row in batch_contents: + primary_key_value = row.get(primary_key_col) + existing_document = existing_document_map.get(primary_key_value) - async def sync_with_qdrant(self, llm_processor, collection_name, bot, document, user, primary_key_col): + vector_id = str(uuid.uuid4()) if not existing_document else existing_document.get("vector_id") + + merged_data = row + if existing_document: + existing_data = existing_document.get("data", {}) + merged_data = {**existing_data, **row} + update_operations.append(UpdateOne( + {"_id": existing_document["_id"]}, + {"$set": {"data": merged_data, "timestamp": datetime.utcnow()}} + )) + else: + new_doc = CognitionData( + data=merged_data, + vector_id=vector_id, + content_type=CognitionDataType.json.value, + collection=collection_name, + bot=bot, + user=user + ) + insert_operations.append(new_doc) + + processed_keys.add(primary_key_value) + + metadata = self.find_matching_metadata(bot, merged_data, collection_name) + search_payload, embedding_payload = Utility.retrieve_search_payload_and_embedding_payload(merged_data, metadata) + + embedding_payloads.append(embedding_payload) + search_payloads.append(search_payload) + vector_ids.append(vector_id) + + if update_operations: + CognitionData._get_collection().bulk_write(update_operations) + logger.info(f"Updated {len(update_operations)} documents in MongoDB") + + if insert_operations: + CognitionData.objects.insert(insert_operations, load_bulk=False) + logger.info(f"Inserted {len(insert_operations)} new documents in MongoDB") + + update_operations.clear() + insert_operations.clear() + if embedding_payloads: + embeddings = await llm_processor.get_embedding(embedding_payloads, user, + invocation="knowledge_vault_sync") + points = [{'id': vector_ids[idx], 'vector': embeddings[idx], 'payload': search_payloads[idx]} + for idx in range(len(vector_ids))] + await llm_processor.__collection_upsert__(qdrant_collection, {'points': points}, + err_msg="Unable to upsert data in qdrant! Contact support") + logger.info(f"Upserted {len(points)} points in Qdrant.") + + embedding_payloads.clear() + search_payloads.clear() + vector_ids.clear() + + remaining_primary_keys =[] + if sync_type == VaultSyncType.push_menu.name: + stale_docs = [doc for key, doc in existing_document_map.items() if key not in processed_keys] + + if stale_docs: + doc_ids = [] + vector_ids = [] + remaining_primary_keys = [] + + for doc in stale_docs: + doc_ids.append(doc["_id"]) + vector_ids.append(doc["vector_id"]) + remaining_primary_keys.append(doc["data"].get(primary_key_col)) + + CognitionData.objects(id__in=doc_ids).delete() + logger.info(f"Deleted {len(stale_docs)} stale documents from MongoDB.") + + await llm_processor.__delete_collection_points__(qdrant_collection, vector_ids, "Cannot delete stale points fro Qdrant!") + logger.info(f"Deleted {len(stale_docs)} stale points from Qdrant.") + + return {"message": "Upsert complete!", "stale_ids": remaining_primary_keys} + + @staticmethod + def save_ai_data(processed_data: dict, bot: str, user: str, sync_type: str): """ - Syncs a document with Qdrant vector database by generating embeddings and upserting them. + Save each item in `kv` of the processed payload into CollectionData individually. + """ + restaurant_name, branch_name = CognitionDataProcessor.get_restaurant_and_branch_name(bot) + catalog_data_collection = f"{restaurant_name}_{branch_name}_catalog_data" + + kv_items = processed_data.get("kv", []) + incoming_data_map = {item["id"]: item for item in kv_items} + incoming_ids = set(incoming_data_map.keys()) + + existing_docs = CollectionData.objects( + collection_name=catalog_data_collection, + bot=bot, + status=True + ) + existing_data_map = {doc.data.get("id"): doc for doc in existing_docs} + existing_ids = set(existing_data_map.keys()) + + for item_id, item in incoming_data_map.items(): + if item_id in existing_data_map: + doc = existing_data_map[item_id] + if sync_type == SyncType.item_toggle: + for key, value in item.items(): + doc.data[key] = value + else: + doc.data = item + doc.timestamp = datetime.utcnow() + doc.user = user + doc.save() + else: + CollectionData( + collection_name=catalog_data_collection, + data=item, + user=user, + bot=bot, + timestamp=datetime.utcnow(), + status=True + ).save() + stale_ids = [] + if sync_type == SyncType.push_menu: + stale_ids = list(existing_ids - incoming_ids) + if stale_ids: + CollectionData.objects( + collection_name=catalog_data_collection, + bot=bot, + status=True, + data__id__in=stale_ids + ).delete() + + return stale_ids - Args: - llm_processor (LLMProcessor): Instance of LLMProcessor for embedding and Qdrant operations. - collection_name (str): Name of the Qdrant collection. - bot (str): Bot identifier. - document (CognitionData): Document to sync with Qdrant. - user (Text): User performing the operation. + @staticmethod + def load_catalog_provider_mappings(): + """ + Load and store catalog provider mappings from a JSON file. - Raises: - AppException: If Qdrant upsert operation fails. + :param file_path: Path to the mappings JSON file. + :raises AppException: If file does not exist or mapping format is invalid. """ - try: - metadata = self.find_matching_metadata(bot, document['data'], document.get('collection')) - search_payload, embedding_payload = Utility.retrieve_search_payload_and_embedding_payload( - document['data'], metadata) - embeddings = await llm_processor.get_embedding(embedding_payload, user, invocation='knowledge_vault_sync') - points = [{'id': document['vector_id'], 'vector': embeddings, 'payload': search_payload}] - await llm_processor.__collection_upsert__(collection_name, {'points': points}, - err_msg="Unable to train FAQ! Contact support") - logger.info(f"Row with {primary_key_col}: {document['data'].get(primary_key_col)} upserted in Qdrant.") - except Exception as e: - raise AppException(f"Failed to sync document with Qdrant: {str(e)}") + file_path = "./metadata/catalog_provider_mappings.json" + path = Path(file_path) + + if not path.exists(): + raise AppException(f"Mappings file not found at {file_path}") + + with open(path, "r") as f: + mapping_data = json.load(f) + + for provider, mappings in mapping_data.items(): + meta = mappings.get("meta") + kv = mappings.get("kv") + + if not meta or not kv: + raise AppException(f"Mappings for provider '{provider}' is missing required 'meta' or 'kv' fields.") + + try: + metadata_doc = CatalogProviderMapping.objects.get(provider=provider) + metadata_doc.update( + set__meta_mappings=meta, + set__kv_mappings=kv + ) + except DoesNotExist: + CatalogProviderMapping( + provider=provider, + meta_mappings=meta, + kv_mappings=kv + ).save() - def _validate_event_type(self, event_type: str): - if event_type not in VaultSyncEventType.__members__.keys(): - raise AppException("Event type does not exist") + @staticmethod + def add_bot_sync_config(request_data, bot: Text, user: Text): + if request_data.provider.lower() == CatalogSyncClass.petpooja: + if BotSyncConfig.objects(branch_bot=bot,provider=CatalogSyncClass.petpooja).first(): + return + + bot_sync_config = BotSyncConfig( + process_push_menu=False, + process_item_toggle=False, + parent_bot=bot, + restaurant_name=request_data.config.get("restaurant_name"), + provider=CatalogSyncClass.petpooja, + branch_name=request_data.config.get("branch_name"), + branch_bot=bot, + ai_enabled=False, + meta_enabled=False, + user=user + ) + bot_sync_config.save() - def _validate_collection_exists(self, collection_name: str): - if not CognitionSchema.objects(collection_name=collection_name).first(): - raise AppException(f"Collection '{collection_name}' does not exist.") + @staticmethod + def get_restaurant_and_branch_name(bot: Text): + config = BotSyncConfig.objects(branch_bot=bot).first() + if not config: + raise Exception(f"No bot sync config found for bot: {bot}") + restaurant_name = config.restaurant_name.replace(" ", "_") + branch_name = config.branch_name.replace(" ", "_") + return restaurant_name.lower(), branch_name.lower() diff --git a/kairon/shared/constants.py b/kairon/shared/constants.py index e9a422718..3227164aa 100644 --- a/kairon/shared/constants.py +++ b/kairon/shared/constants.py @@ -83,6 +83,10 @@ class EventClass(str, Enum): content_importer = "content_importer" mail_channel_read_mails = "email_channel_read_mails" agentic_flow = "agentic_flow" + catalog_integration = "catalog_integration" + +class CatalogSyncClass(str, Enum): + petpooja = "petpooja" class EventRequestType(str, Enum): @@ -120,6 +124,9 @@ class ChannelTypes(str, Enum): LINE = "line" MAIL = "mail" +class CatalogProvider(str, Enum): + PETPOOJA = "petpooja" + class ElementTypes(str, Enum): LINK = "link" IMAGE = "image" diff --git a/kairon/shared/data/constant.py b/kairon/shared/data/constant.py index 2f05508c3..80dfbbe7d 100644 --- a/kairon/shared/data/constant.py +++ b/kairon/shared/data/constant.py @@ -111,6 +111,24 @@ class EVENT_STATUS(str, Enum): ABORTED = "Aborted" +class SYNC_STATUS(str, Enum): + INITIATED = "Initiated" + VALIDATING_REQUEST = "Validating request" + VALIDATING_REQUEST_SUCCESS = "Validating request successful" + VALIDATING_FAILED = "Validation Failed" + VALIDATING_KNOWLEDGE_VAULT_DATA = "Validating Knowledge vault processed data" + PREPROCESSING = "Preprocessing in progress" + PREPROCESSING_FAILED = "Preprocessing Failed" + PREPROCESSING_COMPLETED = "Preprocessing Completed" + SAVE = "Importing data to kairon" + SAVE_META = "Importing data to Meta" + SYNC_FAILED = "Sync Failed" + ENQUEUED = "Enqueued" + COMPLETED = "Completed" + FAILED = "Failed" + ABORTED = "Aborted" + + class ONBOARDING_STATUS(str, Enum): NOT_COMPLETED = "Not Completed" SKIPPED = "Skipped" @@ -185,6 +203,7 @@ class TOKEN_TYPE(str, Enum): DYNAMIC = "dynamic" CHANNEL = "channel" REFRESH = "refresh" + DATA_INTEGRATION = "data_integration" class ModelTestType(str, Enum): @@ -261,6 +280,9 @@ class FeatureMappings(str, Enum): ONLY_SSO_LOGIN = "only_sso_login" CREATE_USER = "create_user" +class SyncType(str, Enum): + push_menu = "push_menu" + item_toggle = "item_toggle" ORG_SETTINGS_MESSAGES = { "create_user": "User creation is blocked by your OrgAdmin from SSO", diff --git a/kairon/shared/data/data_models.py b/kairon/shared/data/data_models.py index df3a3a178..cf92a89f1 100644 --- a/kairon/shared/data/data_models.py +++ b/kairon/shared/data/data_models.py @@ -1389,4 +1389,13 @@ def validate_name(cls, values): class FlowTagChangeRequest(BaseModel): name: constr(to_lower=True, strip_whitespace=True) tag: str - type: str \ No newline at end of file + type: str + +class MetaConfig(BaseModel): + access_token: str + catalog_id: str + +class POSIntegrationRequest(BaseModel): + provider: str + config: dict + meta_config: Optional[MetaConfig] \ No newline at end of file diff --git a/kairon/shared/data/data_objects.py b/kairon/shared/data/data_objects.py index f9f486f20..af81ef2db 100644 --- a/kairon/shared/data/data_objects.py +++ b/kairon/shared/data/data_objects.py @@ -930,6 +930,7 @@ class BotSettings(Auditlog): cognition_columns_per_collection_limit = IntField(default=5) integrations_per_user_limit = IntField(default=3) live_agent_enabled = BooleanField(default=False) + catalog_sync_limit_per_day = IntField(default=5) meta = {"indexes": [{"fields": ["bot", ("bot", "status")]}]} @@ -1086,3 +1087,33 @@ class UserMediaData(Auditlog): meta = {"indexes": [{"fields": ["bot", ("bot", "sender_id"), "media_id"]}]} + + +@auditlogger.log +@push_notification.apply +class POSIntegrations(Auditlog): + bot = StringField(required=True) + provider = StringField(required=True) + config = DictField(required=True) + sync_type = StringField(required=True, default=None) + user = StringField(required=True) + timestamp = DateTimeField(default=datetime.utcnow) + meta_config = DictField() + + meta = {"indexes": [{"fields": ["bot", "provider"]}]} + + +@auditlogger.log +@push_notification.apply +class BotSyncConfig(Auditlog): + process_push_menu = BooleanField(default=False) + process_item_toggle = BooleanField(default=False) + parent_bot = StringField(required=True) + restaurant_name = StringField(required=True) + provider = StringField(required=True) + branch_name = StringField(required=True) + branch_bot = StringField(required=True) + ai_enabled = BooleanField(default=False) + meta_enabled = BooleanField(default=False) + user = StringField(required=True) + timestamp = DateTimeField(default=datetime.utcnow) diff --git a/kairon/shared/data/utils.py b/kairon/shared/data/utils.py index c52e5cd3d..af0cd40d8 100644 --- a/kairon/shared/data/utils.py +++ b/kairon/shared/data/utils.py @@ -397,6 +397,23 @@ def get_channel_endpoint(channel_config: dict): ) return channel_endpoint + @staticmethod + def get_integration_endpoint(integration_config: dict): + from kairon.shared.auth import Authentication + + token, _ = Authentication.generate_integration_token( + integration_config['bot'], integration_config['user'], role=ACCESS_ROLES.DESIGNER.value, + access_limit=[ + f"/api/bot/integration/{integration_config['provider']}/{integration_config['sync_type']}/{integration_config['bot']}/.+"], + token_type=TOKEN_TYPE.DATA_INTEGRATION.value + ) + + integration_endpoint = urljoin( + Utility.environment['model']['agent']['url'], + f"/api/bot/integration/{integration_config['provider']}/{integration_config['sync_type']}/{integration_config['bot']}/{token}" + ) + return integration_endpoint + @staticmethod def save_channel_metadata(**kwargs): token = kwargs["token"] diff --git a/kairon/shared/llm/processor.py b/kairon/shared/llm/processor.py index 0ee781ea0..8bb70e058 100644 --- a/kairon/shared/llm/processor.py +++ b/kairon/shared/llm/processor.py @@ -330,6 +330,23 @@ async def __collection_search__(self, collection_name: Text, vector: List, limit timeout=5) return response + async def __delete_collection_points__(self, collection_name: Text, point_ids: List, err_msg: Text, + raise_err=True): + client = AioRestClient() + response = await client.request(http_url=urljoin(self.db_url, f"/collections/{collection_name}/points/delete"), + request_method="POST", + headers=self.headers, + request_body={"points": point_ids}, + return_json=True, + timeout=5) + print(response) + + if not response.get('result'): + if "status" in response: + logging.exception(response['status'].get('error')) + if raise_err: + raise AppException(err_msg) + async def __collection_hybrid_query__(self, collection_name: Text, embeddings: Dict, limit: int, score_threshold: float): client = AioRestClient() request_body = { diff --git a/kairon/shared/models.py b/kairon/shared/models.py index 4c2b558cf..f700751c5 100644 --- a/kairon/shared/models.py +++ b/kairon/shared/models.py @@ -117,9 +117,9 @@ class CognitionMetadataType(str, Enum): int = "int" float = "float" -class VaultSyncEventType(str, Enum): +class VaultSyncType(str, Enum): push_menu = ["column_length_mismatch", "invalid_columns", "pydantic_validation"] - field_update = ["invalid_columns", "document_non_existence", "pydantic_validation"] + item_toggle = ["invalid_columns", "document_non_existence", "pydantic_validation"] class GlobalSlotsEntryType(str, Enum): agentic_flow = "agentic_flow" diff --git a/kairon/shared/utils.py b/kairon/shared/utils.py index 614f17294..4fd196d07 100644 --- a/kairon/shared/utils.py +++ b/kairon/shared/utils.py @@ -2529,6 +2529,7 @@ async def format_and_send_mail( "add_trusted_device": MailUtility.__handle_add_trusted_device, "book_a_demo": MailUtility.__handle_book_a_demo, "member_left_bot": MailUtility.__handle_member_left_bot, + "catalog_sync_status": MailUtility.__handle_catalog_sync_status } base_url = kwargs.get("base_url") if not base_url: @@ -2665,6 +2666,22 @@ def __handle_verification_confirmation(**kwargs): subject = Utility.email_conf["email"]["templates"]["confirmed_subject"] return body, subject + @staticmethod + def __handle_catalog_sync_status(**kwargs): + bot = kwargs.get("bot") + executionID = kwargs.get("executionID") + sync_status = kwargs.get("sync_status") + message = kwargs.get("message") + + body = Utility.email_conf["email"]["templates"]["catalog_sync_status"] + + body = body.replace("BOT_ID", bot) + body = body.replace("EXECUTION_ID", executionID) + body = body.replace("SYNC_STATUS", sync_status) + body = body.replace("MESSAGE", message) + subject = Utility.email_conf["email"]["templates"]["catalog_sync_status_subject"] + return body, subject + @staticmethod def __handle_add_member(**kwargs): first_name = kwargs.get("first_name") diff --git a/metadata/catalog_provider_mappings.json b/metadata/catalog_provider_mappings.json new file mode 100644 index 000000000..a2b8c2d81 --- /dev/null +++ b/metadata/catalog_provider_mappings.json @@ -0,0 +1,60 @@ +{ + "petpooja": { + "meta": { + "name": { + "source": "itemname", + "default": "No title" + }, + "description": { + "source": "itemdescription", + "default": "No description available" + }, + "price": { + "source": "price", + "default": 0.0 + }, + "availability": { + "source": "in_stock", + "default": "out of stock" + }, + "image_url": { + "source": "item_image_url", + "default": "https://www.kairon.com/default-image.jpg" + }, + "url": { + "source": null, + "default": "https://www.kairon.com/" + }, + "brand": { + "source": null, + "default": "Sattva" + }, + "condition": { + "source": null, + "default": "new" + } + }, + "kv": { + "title": { + "source": "itemname", + "default": "No title" + }, + "description": { + "source": "itemdescription", + "default": "No description available" + }, + "price": { + "source": "price", + "default": 0.0 + }, + "facebook_product_category": { + "source": "item_categoryid", + "default": "Food and drink > General" + }, + "availability": { + "source": "in_stock", + "default": "out of stock" + } + } + } +} \ No newline at end of file diff --git a/tests/integration_test/services_test.py b/tests/integration_test/services_test.py index 9242ace3a..8c5d277d0 100644 --- a/tests/integration_test/services_test.py +++ b/tests/integration_test/services_test.py @@ -1,3 +1,4 @@ +import asyncio import os import re import shutil @@ -27,11 +28,14 @@ from rasa.shared.utils.io import read_config_file from slack_sdk.web.slack_response import SlackResponse +from kairon.meta.processor import MetaProcessor from kairon.shared.account.data_objects import UserActivityLog from kairon.shared.account.data_objects import UserEmailConfirmation from kairon.shared.actions.models import ActionParameterType, DbActionOperationType, DbQueryValueType from kairon.shared.admin.data_objects import LLMSecret from kairon.shared.callback.data_objects import CallbackLog, CallbackRecordStatusType +from kairon.shared.catalog_sync.data_objects import CatalogProviderMapping, CatalogSyncLogs +from kairon.shared.cognition.processor import CognitionDataProcessor from kairon.shared.content_importer.content_processor import ContentImporterLogProcessor from kairon.shared.utils import Utility, MailUtility from kairon.shared.llm.processor import LLMProcessor @@ -48,7 +52,7 @@ from kairon.shared.actions.utils import ActionUtility from kairon.shared.auth import Authentication from kairon.shared.cloud.utils import CloudUtility -from kairon.shared.cognition.data_objects import CognitionSchema, CognitionData +from kairon.shared.cognition.data_objects import CognitionSchema, CognitionData, CollectionData from kairon.shared.constants import EventClass, ChannelTypes, KaironSystemSlots from kairon.shared.data.audit.data_objects import AuditLogData from kairon.shared.data.constant import ( @@ -71,7 +75,7 @@ ChatClientConfig, BotSettings, LLMSettings, - DemoRequestLogs, + DemoRequestLogs, BotSyncConfig, POSIntegrations ) from kairon.shared.data.model_processor import ModelProcessor from kairon.shared.data.processor import MongoProcessor @@ -79,6 +83,7 @@ from kairon.shared.metering.constants import MetricType from kairon.shared.models import StoryEventType from kairon.shared.models import User +from pathlib import Path from kairon.shared.multilingual.processor import MultilingualLogProcessor from kairon.shared.multilingual.utils.translator import Translator from kairon.shared.organization.processor import OrgProcessor @@ -122,11 +127,18 @@ def complete_end_to_end_event_execution(bot, user, event_class, **kwargs): from kairon.events.definitions.model_testing import ModelTestingEvent from kairon.events.definitions.history_delete import DeleteHistoryEvent from kairon.events.definitions.content_importer import DocContentImporterEvent + from kairon.events.definitions.catalog_sync import CatalogSync if event_class == EventClass.data_importer: overwrite = kwargs.get('overwrite', True) TrainingDataImporterEvent(bot, user, import_data=True, overwrite=overwrite).execute() + elif event_class == EventClass.catalog_integration: + provider = kwargs.get('provider') + sync_type = kwargs.get('sync_type') + token = kwargs.get('token') + data = kwargs.get('data') + asyncio.run(CatalogSync(bot, user, provider, sync_type=sync_type, token=token).execute(data=data)) elif event_class == EventClass.model_training: ModelTrainingEvent(bot, user).execute() elif event_class == EventClass.content_importer: @@ -1930,7 +1942,8 @@ def test_knowledge_vault_sync_push_menu(mock_embedding, mock_collection_exists, mock_collection_upsert.return_value = None embedding = list(np.random.random(LLMProcessor.__embedding__)) - mock_embedding.return_value = litellm.EmbeddingResponse(**{'data': [{'embedding': embedding}]}) + mock_embedding.return_value = litellm.EmbeddingResponse(**{'data': [{'embedding': embedding}, {'embedding': embedding}]}) + secrets = [ { @@ -1987,7 +2000,7 @@ def test_knowledge_vault_sync_push_menu(mock_embedding, mock_collection_exists, ] response = client.post( - url=f"/api/bot/{pytest.bot}/data/cognition/sync?primary_key_col=id&collection_name=groceries&event_type=push_menu", + url=f"/api/bot/{pytest.bot}/data/cognition/sync?primary_key_col=id&collection_name=groceries&sync_type=push_menu", json=sync_data, headers={"Authorization": pytest.token_type + " " + pytest.access_token} ) @@ -2011,19 +2024,12 @@ def test_knowledge_vault_sync_push_menu(mock_embedding, mock_collection_exists, expected_calls = [ { - "model": "text-embedding-3-large", - "input": ['{"id":1,"item":"Juice","price":2.5,"quantity":10}'], # First input + "model": "text-embedding-3-small", + "input": ['{"id":1,"item":"Juice","price":2.5,"quantity":10}', '{"id":2,"item":"Apples","price":1.2,"quantity":20}'], "metadata": {'user': 'integration@demo.ai', 'bot': pytest.bot, 'invocation': 'knowledge_vault_sync'}, "api_key": "common_openai_key", "num_retries": 3 }, - { - "model": "text-embedding-3-large", - "input": ['{"id":2,"item":"Apples","price":1.2,"quantity":20}'], # Second input - "metadata": {'user': 'integration@demo.ai', 'bot': pytest.bot, 'invocation': 'knowledge_vault_sync'}, - "api_key": "common_openai_key", - "num_retries": 3 - } ] for i, expected in enumerate(expected_calls): @@ -2041,7 +2047,7 @@ def test_knowledge_vault_sync_push_menu(mock_embedding, mock_collection_exists, @mock.patch.object(LLMProcessor, "__create_collection__", autospec=True) @mock.patch.object(LLMProcessor, "__collection_upsert__", autospec=True) @mock.patch.object(litellm, "aembedding", autospec=True) -def test_knowledge_vault_sync_field_update(mock_embedding, mock_collection_exists, mock_create_collection, mock_collection_upsert): +def test_knowledge_vault_sync_item_toggle(mock_embedding, mock_collection_exists, mock_create_collection, mock_collection_upsert): bot_settings = BotSettings.objects(bot=pytest.bot).get() bot_settings.content_importer_limit_per_day = 10 bot_settings.cognition_collections_limit = 10 @@ -2053,7 +2059,7 @@ def test_knowledge_vault_sync_field_update(mock_embedding, mock_collection_exist mock_collection_upsert.return_value = None embedding = list(np.random.random(LLMProcessor.__embedding__)) - mock_embedding.return_value = litellm.EmbeddingResponse(**{'data': [{'embedding': embedding}]}) + mock_embedding.return_value = litellm.EmbeddingResponse(**{'data': [{'embedding': embedding}, {'embedding': embedding}]}) secrets = [ { @@ -2125,7 +2131,7 @@ def test_knowledge_vault_sync_field_update(mock_embedding, mock_collection_exist ] response = client.post( - url=f"/api/bot/{pytest.bot}/data/cognition/sync?primary_key_col=id&collection_name=groceries&event_type=field_update", + url=f"/api/bot/{pytest.bot}/data/cognition/sync?primary_key_col=id&collection_name=groceries&sync_type=item_toggle", json=sync_data, headers={"Authorization": pytest.token_type + " " + pytest.access_token} ) @@ -2149,19 +2155,12 @@ def test_knowledge_vault_sync_field_update(mock_embedding, mock_collection_exist expected_calls = [ { - "model": "text-embedding-3-large", - "input": ['{"id":1,"item":"Juice","price":80.5,"quantity":56}'], + "model": "text-embedding-3-small", + "input": ['{"id":1,"item":"Juice","price":80.5,"quantity":56}', '{"id":2,"item":"Milk","price":27.0,"quantity":12}'], "metadata": {'user': 'integration@demo.ai', 'bot': pytest.bot, 'invocation': 'knowledge_vault_sync'}, "api_key": "common_openai_key", "num_retries": 3 }, - { - "model": "text-embedding-3-large", - "input": ['{"id":2,"item":"Milk","price":27.0,"quantity":12}'], # Second input - "metadata": {'user': 'integration@demo.ai', 'bot': pytest.bot, 'invocation': 'knowledge_vault_sync'}, - "api_key": "common_openai_key", - "num_retries": 3 - } ] for i, expected in enumerate(expected_calls): @@ -2174,7 +2173,7 @@ def test_knowledge_vault_sync_field_update(mock_embedding, mock_collection_exist @pytest.mark.asyncio @responses.activate @mock.patch.object(litellm, "aembedding", autospec=True) -def test_knowledge_vault_sync_event_type_does_not_exist(mock_embedding): +def test_knowledge_vault_sync_sync_type_does_not_exist(mock_embedding): bot_settings = BotSettings.objects(bot=pytest.bot).get() bot_settings.content_importer_limit_per_day = 10 bot_settings.cognition_collections_limit = 10 @@ -2182,7 +2181,7 @@ def test_knowledge_vault_sync_event_type_does_not_exist(mock_embedding): bot_settings.save() embedding = list(np.random.random(LLMProcessor.__embedding__)) - mock_embedding.return_value = litellm.EmbeddingResponse(**{'data': [{'embedding': embedding}]}) + mock_embedding.return_value = litellm.EmbeddingResponse(**{'data': [{'embedding': embedding}, {'embedding': embedding}]}) secrets = [ { @@ -2202,14 +2201,14 @@ def test_knowledge_vault_sync_event_type_does_not_exist(mock_embedding): ] response = client.post( - url=f"/api/bot/{pytest.bot}/data/cognition/sync?primary_key_col=id&collection_name=groceries&event_type=non_existent_event_type", + url=f"/api/bot/{pytest.bot}/data/cognition/sync?primary_key_col=id&collection_name=groceries&sync_type=non_existent_sync_type", json=sync_data, headers={"Authorization": pytest.token_type + " " + pytest.access_token} ) actual = response.json() assert not actual["success"] - assert actual["message"] == "Event type does not exist" + assert actual["message"] == "Sync type does not exist" assert actual["error_code"] == 422 cognition_data = CognitionData.objects(bot=pytest.bot, collection="nonexistent_collection") @@ -2227,7 +2226,7 @@ def test_knowledge_vault_sync_missing_collection(mock_embedding): bot_settings.save() embedding = list(np.random.random(LLMProcessor.__embedding__)) - mock_embedding.return_value = litellm.EmbeddingResponse(**{'data': [{'embedding': embedding}]}) + mock_embedding.return_value = litellm.EmbeddingResponse(**{'data': [{'embedding': embedding}, {'embedding': embedding}]}) secrets = [ { @@ -2247,7 +2246,7 @@ def test_knowledge_vault_sync_missing_collection(mock_embedding): ] response = client.post( - url=f"/api/bot/{pytest.bot}/data/cognition/sync?primary_key_col=id&collection_name=nonexistent_collection&event_type=push_menu", + url=f"/api/bot/{pytest.bot}/data/cognition/sync?primary_key_col=id&collection_name=nonexistent_collection&sync_type=push_menu", json=sync_data, headers={"Authorization": pytest.token_type + " " + pytest.access_token} ) @@ -2273,7 +2272,7 @@ def test_knowledge_vault_sync_missing_primary_key(mock_embedding): bot_settings.save() embedding = list(np.random.random(LLMProcessor.__embedding__)) - mock_embedding.return_value = litellm.EmbeddingResponse(**{'data': [{'embedding': embedding}]}) + mock_embedding.return_value = litellm.EmbeddingResponse(**{'data': [{'embedding': embedding}, {'embedding': embedding}]}) secrets = [ { @@ -2311,13 +2310,12 @@ def test_knowledge_vault_sync_missing_primary_key(mock_embedding): ] response = client.post( - url=f"/api/bot/{pytest.bot}/data/cognition/sync?primary_key_col=id&collection_name=groceries&event_type=push_menu", + url=f"/api/bot/{pytest.bot}/data/cognition/sync?primary_key_col=id&collection_name=groceries&sync_type=push_menu", json=sync_data, headers={"Authorization": pytest.token_type + " " + pytest.access_token} ) actual = response.json() - print(actual) assert not actual["success"] assert actual["message"] == "Primary key 'id' must exist in each row." assert actual["error_code"] == 422 @@ -2340,7 +2338,7 @@ def test_knowledge_vault_sync_column_length_mismatch(mock_embedding): bot_settings.save() embedding = list(np.random.random(LLMProcessor.__embedding__)) - mock_embedding.return_value = litellm.EmbeddingResponse(**{'data': [{'embedding': embedding}]}) + mock_embedding.return_value = litellm.EmbeddingResponse(**{'data': [{'embedding': embedding}, {'embedding': embedding}]}) secrets = [ { @@ -2378,7 +2376,7 @@ def test_knowledge_vault_sync_column_length_mismatch(mock_embedding): ] response = client.post( - url=f"/api/bot/{pytest.bot}/data/cognition/sync?primary_key_col=id&collection_name=groceries&event_type=push_menu", + url=f"/api/bot/{pytest.bot}/data/cognition/sync?primary_key_col=id&collection_name=groceries&sync_type=push_menu", json=sync_data, headers={"Authorization": pytest.token_type + " " + pytest.access_token} ) @@ -2458,7 +2456,7 @@ def test_knowledge_vault_sync_invalid_columns(mock_embedding): ] response = client.post( - url=f"/api/bot/{pytest.bot}/data/cognition/sync?primary_key_col=id&collection_name=groceries&event_type=field_update", + url=f"/api/bot/{pytest.bot}/data/cognition/sync?primary_key_col=id&collection_name=groceries&sync_type=item_toggle", json=sync_data, headers={"Authorization": pytest.token_type + " " + pytest.access_token} ) @@ -2546,7 +2544,7 @@ def test_knowledge_vault_sync_document_non_existence(mock_embedding): ] response = client.post( - url=f"/api/bot/{pytest.bot}/data/cognition/sync?primary_key_col=id&collection_name=groceries&event_type=field_update", + url=f"/api/bot/{pytest.bot}/data/cognition/sync?primary_key_col=id&collection_name=groceries&sync_type=item_toggle", json=sync_data, headers={"Authorization": pytest.token_type + " " + pytest.access_token} ) @@ -3159,6 +3157,1897 @@ def test_upload_doc_content_file_type_validation_failure(): CognitionData.objects(bot=pytest.bot, collection="test_doc_content_file_type_validation_failure").delete() +@responses.activate +def test_add_pos_integration_config_success(): + payload = { + "provider": "petpooja", + "config": { + "restaurant_name": "restaurant1", + "branch_name": "branch1", + "restaurant_id": "98765" + }, + "meta_config": { + "access_token":"dummy_access_token", + "catalog_id":"12345" + } + } + + response = client.post( + url=f"/api/bot/{pytest.bot}/data/integrations/add?sync_type=push_menu", + json = payload, + headers={"Authorization": pytest.token_type + " " + pytest.access_token} + ) + actual = response.json() + assert actual["message"] == "POS Integration Complete" + assert actual["error_code"] == 0 + assert actual["success"] + assert "integration/petpooja/push_menu" in actual["data"] + assert str(pytest.bot) in actual["data"] + + provider_mapping = CatalogProviderMapping.objects(provider="petpooja").first() + assert provider_mapping is not None + assert provider_mapping.meta_mappings is not None + assert provider_mapping.kv_mappings is not None + + bot_sync_config = BotSyncConfig.objects(branch_bot=pytest.bot, provider="petpooja").first() + assert bot_sync_config is not None + assert bot_sync_config.restaurant_name == "restaurant1" + assert bot_sync_config.branch_name == "branch1" + assert bot_sync_config.parent_bot == pytest.bot + + pos_integration = POSIntegrations.objects(bot=pytest.bot, provider="petpooja", sync_type="push_menu").first() + assert pos_integration is not None + assert pos_integration.config["restaurant_id"] == "98765" + assert pos_integration.meta_config["access_token"] == "dummy_access_token" + + BotSyncConfig.objects(branch_bot=pytest.bot, provider="petpooja").delete() + CatalogProviderMapping.objects(provider="petpooja").delete() + POSIntegrations.objects(bot=pytest.bot, provider="petpooja", sync_type="push_menu").delete() + + +@responses.activate +def test_add_pos_integration_config_invalid_provider(): + payload = { + "provider": "invalid_provider", + "config": { + "restaurant_name": "invalid", + "branch_name": "invalid", + "restaurant_id": "00000" + }, + "meta_config": { + "access_token": "invalid", + "catalog_id": "000" + } + } + + response = client.post( + url=f"/api/bot/{pytest.bot}/data/integrations/add?sync_type=push_menu", + json=payload, + headers={"Authorization": f"{pytest.token_type} {pytest.access_token}"} + ) + + actual = response.json() + assert actual["message"] == "Invalid Provider" + assert actual["error_code"] == 422 + assert not actual["success"] + + CatalogProviderMapping.objects(provider="invalid_provider").delete() + BotSyncConfig.objects(branch_bot=pytest.bot, provider="invalid_provider").delete() + POSIntegrations.objects(bot=pytest.bot, provider="invalid_provider", sync_type="push_menu").delete() + + +@responses.activate +def test_add_pos_integration_config_does_not_update_existing_bot_sync_config(): + payload = { + "provider": "petpooja", + "config": { + "restaurant_name": "restaurant1", + "branch_name": "branch1", + "restaurant_id": "11111" + }, + "meta_config": { + "access_token": "access_token_1", + "catalog_id": "catalog_1" + } + } + + client.post( + url=f"/api/bot/{pytest.bot}/data/integrations/add?sync_type=push_menu", + json=payload, + headers={"Authorization": f"{pytest.token_type} {pytest.access_token}"} + ) + + updated_payload = { + "provider": "petpooja", + "config": { + "restaurant_name": "updated_restaurant", + "branch_name": "updated_branch", + "restaurant_id": "22222" + }, + "meta_config": { + "access_token": "access_token_2", + "catalog_id": "catalog_2" + } + } + + response = client.post( + url=f"/api/bot/{pytest.bot}/data/integrations/add?sync_type=push_menu", + json=updated_payload, + headers={"Authorization": f"{pytest.token_type} {pytest.access_token}"} + ) + actual = response.json() + assert actual["success"] + assert actual["error_code"] == 0 + assert actual["message"] == "POS Integration Complete" + assert "integration/petpooja/push_menu" in actual["data"] + assert str(pytest.bot) in actual["data"] + + bot_sync_config = BotSyncConfig.objects(branch_bot=pytest.bot, provider="petpooja").first() + assert bot_sync_config.restaurant_name == "restaurant1" + assert bot_sync_config.branch_name == "branch1" + + CatalogProviderMapping.objects(provider="petpooja").delete() + BotSyncConfig.objects(branch_bot=pytest.bot, provider="petpooja").delete() + POSIntegrations.objects(bot=pytest.bot, provider="petpooja", sync_type="push_menu").delete() + + +@responses.activate +def test_add_pos_integration_config_invalid_sync_type(): + payload = { + "provider": "petpooja", + "config": { + "restaurant_name": "restaurant3", + "branch_name": "branch3", + "restaurant_id": "33333" + }, + "meta_config": { + "access_token": "access_333", + "catalog_id": "333" + } + } + + response = client.post( + url=f"/api/bot/{pytest.bot}/data/integrations/add?sync_type=invalid_sync", + json=payload, + headers={"Authorization": f"{pytest.token_type} {pytest.access_token}"} + ) + + actual = response.json() + assert actual["message"] == "Sync type does not exist" + assert actual["error_code"] == 422 + assert not actual["success"] + + CatalogProviderMapping.objects(provider="petpooja").delete() + BotSyncConfig.objects(branch_bot=pytest.bot, provider="petpooja").delete() + POSIntegrations.objects(bot=pytest.bot, provider="petpooja", sync_type="invalid_sync").delete() + + +@pytest.mark.asyncio +@responses.activate +@mock.patch.object(LLMProcessor, "__collection_exists__", autospec=True) +@mock.patch.object(LLMProcessor, "__create_collection__", autospec=True) +@mock.patch.object(LLMProcessor, "__collection_upsert__", autospec=True) +@mock.patch.object(MetaProcessor, "push_meta_catalog", autospec=True) +@mock.patch.object(MetaProcessor, "delete_meta_catalog", autospec=True) +@mock.patch.object(litellm, "aembedding", autospec=True) +def test_catalog_sync_push_menu_success(mock_embedding, mock_collection_exists, mock_create_collection, + mock_collection_upsert, mock_delete_meta_catalog, mock_push_meta_catalog): + mock_collection_exists.return_value = False + mock_create_collection.return_value = None + mock_collection_upsert.return_value = None + mock_push_meta_catalog.return_value = None + mock_delete_meta_catalog.return_value = None + + embedding = list(np.random.random(LLMProcessor.__embedding__)) + mock_embedding.return_value = litellm.EmbeddingResponse( + **{'data': [{'embedding': embedding}, {'embedding': embedding}, {'embedding': embedding}]}) + + secrets = [ + { + "llm_type": "openai", + "api_key": "common_openai_key", + "models": ["common_openai_model1", "common_openai_model2"], + "user": "123", + "timestamp": datetime.utcnow() + }, + ] + + for secret in secrets: + LLMSecret(**secret).save() + + payload = { + "provider": "petpooja", + "config": { + "restaurant_name": "restaurant1", + "branch_name": "branch1", + "restaurant_id": "98765" + }, + "meta_config": { + "access_token":"dummy_access_token", + "catalog_id":"12345" + } + } + + response = client.post( + url=f"/api/bot/{pytest.bot}/data/integrations/add?sync_type=push_menu", + json = payload, + headers={"Authorization": pytest.token_type + " " + pytest.access_token} + ) + actual = response.json() + assert actual["message"] == "POS Integration Complete" + assert actual["error_code"] == 0 + assert actual["success"] + assert "integration/petpooja/push_menu" in actual["data"] + assert str(pytest.bot) in actual["data"] + sync_url = actual["data"] + token = sync_url.split(str(pytest.bot) + "/")[1] + + provider_mapping = CatalogProviderMapping.objects(provider="petpooja").first() + assert provider_mapping is not None + assert provider_mapping.meta_mappings is not None + assert provider_mapping.kv_mappings is not None + + bot_sync_config = BotSyncConfig.objects(branch_bot=pytest.bot, provider="petpooja").first() + assert bot_sync_config is not None + assert bot_sync_config.restaurant_name == "restaurant1" + assert bot_sync_config.branch_name == "branch1" + assert bot_sync_config.parent_bot == pytest.bot + + pos_integration = POSIntegrations.objects(bot=pytest.bot, provider="petpooja", sync_type="push_menu").first() + assert pos_integration is not None + assert pos_integration.config["restaurant_id"] == "98765" + assert pos_integration.meta_config["access_token"] == "dummy_access_token" + + event_url = urljoin( + Utility.environment["events"]["server_url"], + f"/api/events/execute/{EventClass.catalog_integration}", + ) + responses.add( + "POST", + event_url, + json={"success": True, "message": "Event triggered successfully!"}, + ) + + bot_sync_config = BotSyncConfig.objects(branch_bot=pytest.bot, provider="petpooja").first() + bot_sync_config.process_push_menu= True + bot_sync_config.process_item_toggle = True + bot_sync_config.ai_enabled = True + bot_sync_config.meta_enabled = True + bot_sync_config.save() + + restaurant_name, branch_name = CognitionDataProcessor.get_restaurant_and_branch_name(pytest.bot) + catalog_images_collection = f"{restaurant_name}_{branch_name}_catalog_images" + fallback_data = { + "image_type": "global", + "image_url": "https://picsum.photos/id/237/200/300", + "image_base64": "" + } + CollectionData( + collection_name=catalog_images_collection, + data=fallback_data, + user="integration@demo.ai", + bot=pytest.bot, + status=True, + timestamp=datetime.utcnow() + ).save() + + push_menu_payload_path = Path("tests/testing_data/catalog_sync/catalog_sync_push_menu_payload.json") + + with push_menu_payload_path.open("r", encoding="utf-8") as f: + push_menu_payload = json.load(f) + + response = client.post( + url=sync_url, + json=push_menu_payload, + headers={"Authorization": pytest.token_type + " " + pytest.access_token} + ) + + actual = response.json() + assert actual["message"] == "Sync in progress! Check logs." + assert actual["error_code"] == 0 + assert actual["data"] is None + assert actual["success"] + + complete_end_to_end_event_execution( + pytest.bot, "integration@demo.ai", EventClass.catalog_integration, sync_type="push_menu", token = token, + provider = "petpooja", data = push_menu_payload + ) + + latest_log = CatalogSyncLogs.objects(bot=str(pytest.bot)).order_by("-start_timestamp").first() + assert latest_log is not None + assert latest_log.execution_id + assert latest_log.sync_status == "Completed" + assert latest_log.status == "Success" + assert hasattr(latest_log, "exception") + assert latest_log.exception == "" + + + restaurant_name, branch_name = CognitionDataProcessor.get_restaurant_and_branch_name(pytest.bot) + catalog_data_collection = f"{restaurant_name}_{branch_name}_catalog_data" + catalog_data_docs = CollectionData.objects(collection_name=catalog_data_collection, bot=pytest.bot) + catalog_item_summaries = [ + {"id": doc.data["id"], "price": doc.data["price"]} + for doc in catalog_data_docs + ] + + expected_items = [ + {"id": "10539634", "price": 8700.0}, + {"id": "10539699", "price": 3426.0}, + {"id": "10539580", "price": 3159.0}, + ] + + assert all(item in catalog_item_summaries for item in expected_items) + + cognition_data_docs = CognitionData.objects(bot=str(pytest.bot)) + cognition_map = {doc.data["id"]: doc.data["price"] for doc in cognition_data_docs if + "id" in doc.data and "price" in doc.data} + for item in expected_items: + assert item["id"] in cognition_map + assert cognition_map[item["id"]] == item["price"] + +@pytest.mark.asyncio +@responses.activate +@mock.patch.object(LLMProcessor, "__collection_exists__", autospec=True) +@mock.patch.object(LLMProcessor, "__create_collection__", autospec=True) +@mock.patch.object(LLMProcessor, "__collection_upsert__", autospec=True) +@mock.patch.object(LLMProcessor, "__delete_collection_points__", autospec=True) +@mock.patch.object(MetaProcessor, "push_meta_catalog", autospec=True) +@mock.patch.object(MetaProcessor, "delete_meta_catalog", autospec=True) +@mock.patch.object(litellm, "aembedding", autospec=True) +def test_catalog_sync_push_menu_success_with_delete_data(mock_embedding, mock_collection_exists, mock_create_collection, + mock_collection_upsert, mock_delete_collection_points, mock_delete_meta_catalog, mock_push_meta_catalog): + mock_collection_exists.return_value = False + mock_create_collection.return_value = None + mock_collection_upsert.return_value = None + mock_push_meta_catalog.return_value = None + mock_delete_meta_catalog.return_value = None + mock_delete_collection_points.return_value = None + + embedding = list(np.random.random(LLMProcessor.__embedding__)) + mock_embedding.return_value = litellm.EmbeddingResponse( + **{'data': [{'embedding': embedding}, {'embedding': embedding}]}) + + payload = { + "provider": "petpooja", + "config": { + "restaurant_name": "restaurant1", + "branch_name": "branch1", + "restaurant_id": "98765" + }, + "meta_config": { + "access_token":"dummy_access_token", + "catalog_id":"12345" + } + } + + response = client.post( + url=f"/api/bot/{pytest.bot}/data/integrations/add?sync_type=push_menu", + json = payload, + headers={"Authorization": pytest.token_type + " " + pytest.access_token} + ) + actual = response.json() + assert actual["message"] == "POS Integration Complete" + assert actual["error_code"] == 0 + assert actual["success"] + assert "integration/petpooja/push_menu" in actual["data"] + assert str(pytest.bot) in actual["data"] + sync_url = actual["data"] + token = sync_url.split(str(pytest.bot) + "/")[1] + + provider_mapping = CatalogProviderMapping.objects(provider="petpooja").first() + assert provider_mapping is not None + assert provider_mapping.meta_mappings is not None + assert provider_mapping.kv_mappings is not None + + bot_sync_config = BotSyncConfig.objects(branch_bot=pytest.bot, provider="petpooja").first() + assert bot_sync_config is not None + assert bot_sync_config.restaurant_name == "restaurant1" + assert bot_sync_config.branch_name == "branch1" + assert bot_sync_config.parent_bot == pytest.bot + + pos_integration = POSIntegrations.objects(bot=pytest.bot, provider="petpooja", sync_type="push_menu").first() + assert pos_integration is not None + assert pos_integration.config["restaurant_id"] == "98765" + assert pos_integration.meta_config["access_token"] == "dummy_access_token" + + event_url = urljoin( + Utility.environment["events"]["server_url"], + f"/api/events/execute/{EventClass.catalog_integration}", + ) + responses.add( + "POST", + event_url, + json={"success": True, "message": "Event triggered successfully!"}, + ) + + push_menu_payload_path = Path("tests/testing_data/catalog_sync/catalog_sync_push_menu_payload_with_delete_data.json") + + with push_menu_payload_path.open("r", encoding="utf-8") as f: + push_menu_payload = json.load(f) + + response = client.post( + url=sync_url, + json=push_menu_payload, + headers={"Authorization": pytest.token_type + " " + pytest.access_token} + ) + + actual = response.json() + assert actual["message"] == "Sync in progress! Check logs." + assert actual["error_code"] == 0 + assert actual["data"] is None + assert actual["success"] + + complete_end_to_end_event_execution( + pytest.bot, "integration@demo.ai", EventClass.catalog_integration, sync_type="push_menu", token = token, + provider = "petpooja", data = push_menu_payload + ) + + latest_log = CatalogSyncLogs.objects(bot=str(pytest.bot)).order_by("-start_timestamp").first() + assert latest_log is not None + assert latest_log.execution_id + assert latest_log.sync_status == "Completed" + assert latest_log.status == "Success" + assert hasattr(latest_log, "exception") + assert latest_log.exception == "" + + + restaurant_name, branch_name = CognitionDataProcessor.get_restaurant_and_branch_name(pytest.bot) + catalog_data_collection = f"{restaurant_name}_{branch_name}_catalog_data" + catalog_data_docs = CollectionData.objects(collection_name=catalog_data_collection, bot=pytest.bot) + catalog_item_summaries = [ + {"id": doc.data["id"], "price": doc.data["price"]} + for doc in catalog_data_docs + ] + expected_items = [ + {"id": "10539699", "price": 123.0}, + {"id": "10539580", "price": 3159.0}, + ] + + assert all(item in catalog_item_summaries for item in expected_items) + + cognition_data_docs = CognitionData.objects(bot=str(pytest.bot)) + cognition_map = {doc.data["id"]: doc.data["price"] for doc in cognition_data_docs if + "id" in doc.data and "price" in doc.data} + for item in expected_items: + assert item["id"] in cognition_map + assert cognition_map[item["id"]] == item["price"] + + +@pytest.mark.asyncio +@responses.activate +@mock.patch.object(LLMProcessor, "__collection_exists__", autospec=True) +@mock.patch.object(LLMProcessor, "__create_collection__", autospec=True) +@mock.patch.object(LLMProcessor, "__collection_upsert__", autospec=True) +@mock.patch.object(MetaProcessor, "update_meta_catalog", autospec=True) +@mock.patch.object(litellm, "aembedding", autospec=True) +def test_catalog_sync_item_toggle_success(mock_embedding, mock_collection_exists, mock_create_collection, + mock_collection_upsert, mock_update_meta_catalog): + mock_collection_exists.return_value = False + mock_create_collection.return_value = None + mock_collection_upsert.return_value = None + mock_update_meta_catalog.return_value = None + + embedding = list(np.random.random(LLMProcessor.__embedding__)) + mock_embedding.return_value = litellm.EmbeddingResponse( + **{'data': [{'embedding': embedding}, {'embedding': embedding}]}) + + payload = { + "provider": "petpooja", + "config": { + "restaurant_name": "restaurant1", + "branch_name": "branch1", + "restaurant_id": "98765" + }, + "meta_config": { + "access_token": "dummy_access_token", + "catalog_id": "12345" + } + } + + response = client.post( + url=f"/api/bot/{pytest.bot}/data/integrations/add?sync_type=item_toggle", + json = payload, + headers={"Authorization": pytest.token_type + " " + pytest.access_token} + ) + actual = response.json() + assert actual["message"] == "POS Integration Complete" + assert actual["error_code"] == 0 + assert actual["success"] + assert "integration/petpooja/item_toggle" in actual["data"] + assert str(pytest.bot) in actual["data"] + sync_url = actual["data"] + token = sync_url.split(str(pytest.bot) + "/")[1] + + provider_mapping = CatalogProviderMapping.objects(provider="petpooja").first() + assert provider_mapping is not None + assert provider_mapping.meta_mappings is not None + assert provider_mapping.kv_mappings is not None + + bot_sync_config = BotSyncConfig.objects(branch_bot=pytest.bot, provider="petpooja").first() + assert bot_sync_config is not None + assert bot_sync_config.restaurant_name == "restaurant1" + assert bot_sync_config.branch_name == "branch1" + assert bot_sync_config.parent_bot == pytest.bot + + pos_integration = POSIntegrations.objects(bot=pytest.bot, provider="petpooja", sync_type="item_toggle").first() + assert pos_integration is not None + assert pos_integration.config["restaurant_id"] == "98765" + assert pos_integration.meta_config["access_token"] == "dummy_access_token" + + event_url = urljoin( + Utility.environment["events"]["server_url"], + f"/api/events/execute/{EventClass.catalog_integration}", + ) + responses.add( + "POST", + event_url, + json={"success": True, "message": "Event triggered successfully!"}, + ) + + item_toggle_payload_path = Path("tests/testing_data/catalog_sync/catalog_sync_item_toggle_payload.json") + + with item_toggle_payload_path.open("r", encoding="utf-8") as f: + item_toggle_payload = json.load(f) + + response = client.post( + url=sync_url, + json=item_toggle_payload, + headers={"Authorization": pytest.token_type + " " + pytest.access_token} + ) + + actual = response.json() + assert actual["message"] == "Sync in progress! Check logs." + assert actual["error_code"] == 0 + assert actual["data"] is None + assert actual["success"] + + complete_end_to_end_event_execution( + pytest.bot, "integration@demo.ai", EventClass.catalog_integration, sync_type="item_toggle", token = token, + provider = "petpooja", data = item_toggle_payload + ) + + latest_log = CatalogSyncLogs.objects(bot=str(pytest.bot)).order_by("-start_timestamp").first() + assert latest_log is not None + assert latest_log.execution_id + assert latest_log.sync_status == "Completed" + assert latest_log.status == "Success" + assert hasattr(latest_log, "exception") + assert latest_log.exception == "" + + restaurant_name, branch_name = CognitionDataProcessor.get_restaurant_and_branch_name(pytest.bot) + catalog_data_collection = f"{restaurant_name}_{branch_name}_catalog_data" + catalog_data_docs = CollectionData.objects(collection_name=catalog_data_collection, bot=pytest.bot) + catalog_item_summaries = [ + {"id": doc.data["id"], "availability": doc.data["availability"]} + for doc in catalog_data_docs + ] + expected_items = [ + {"id": "10539699", "availability": "in stock"}, + {"id": "10539580", "availability": "out of stock"}, + ] + + assert all(item in catalog_item_summaries for item in expected_items) + + cognition_data_docs = CognitionData.objects(bot=str(pytest.bot)) + cognition_map = {doc.data["id"]: doc.data["availability"] for doc in cognition_data_docs if + "id" in doc.data and "availability" in doc.data} + for item in expected_items: + assert item["id"] in cognition_map + assert cognition_map[item["id"]] == item["availability"] + + + restaurant_name, branch_name = CognitionDataProcessor.get_restaurant_and_branch_name(pytest.bot) + catalog_images_collection = f"{restaurant_name}_{branch_name}_catalog_images" + + CatalogProviderMapping.objects(provider="petpooja").delete() + BotSyncConfig.objects(branch_bot=pytest.bot, provider="petpooja").delete() + POSIntegrations.objects(bot=pytest.bot, provider="petpooja", sync_type="invalid_sync").delete() + LLMSecret.objects.delete() + CollectionData.objects(collection_name=catalog_data_collection).delete() + CollectionData.objects(collection_name=catalog_images_collection).delete() + CatalogSyncLogs.objects.delete() + CognitionData.objects(bot=pytest.bot).delete() + CognitionSchema.objects(bot=pytest.bot).delete() + +@pytest.mark.asyncio +@responses.activate +@mock.patch.object(LLMProcessor, "__collection_exists__", autospec=True) +@mock.patch.object(LLMProcessor, "__create_collection__", autospec=True) +@mock.patch.object(LLMProcessor, "__collection_upsert__", autospec=True) +@mock.patch.object(MetaProcessor, "push_meta_catalog", autospec=True) +@mock.patch.object(MetaProcessor, "delete_meta_catalog", autospec=True) +@mock.patch.object(litellm, "aembedding", autospec=True) +def test_catalog_sync_push_menu_process_push_menu_disabled(mock_embedding, mock_collection_exists, mock_create_collection, + mock_collection_upsert, mock_delete_meta_catalog, mock_push_meta_catalog): + mock_collection_exists.return_value = False + mock_create_collection.return_value = None + mock_collection_upsert.return_value = None + mock_push_meta_catalog.return_value = None + mock_delete_meta_catalog.return_value = None + + embedding = list(np.random.random(LLMProcessor.__embedding__)) + mock_embedding.return_value = litellm.EmbeddingResponse( + **{'data': [{'embedding': embedding}, {'embedding': embedding}, {'embedding': embedding}]}) + + secrets = [ + { + "llm_type": "openai", + "api_key": "common_openai_key", + "models": ["common_openai_model1", "common_openai_model2"], + "user": "123", + "timestamp": datetime.utcnow() + }, + ] + + for secret in secrets: + LLMSecret(**secret).save() + + payload = { + "provider": "petpooja", + "config": { + "restaurant_name": "restaurant1", + "branch_name": "branch1", + "restaurant_id": "98765" + }, + "meta_config": { + "access_token":"dummy_access_token", + "catalog_id":"12345" + } + } + + response = client.post( + url=f"/api/bot/{pytest.bot}/data/integrations/add?sync_type=push_menu", + json = payload, + headers={"Authorization": pytest.token_type + " " + pytest.access_token} + ) + actual = response.json() + assert actual["message"] == "POS Integration Complete" + assert actual["error_code"] == 0 + assert actual["success"] + assert "integration/petpooja/push_menu" in actual["data"] + assert str(pytest.bot) in actual["data"] + sync_url = actual["data"] + token = sync_url.split(str(pytest.bot) + "/")[1] + + provider_mapping = CatalogProviderMapping.objects(provider="petpooja").first() + assert provider_mapping is not None + assert provider_mapping.meta_mappings is not None + assert provider_mapping.kv_mappings is not None + + bot_sync_config = BotSyncConfig.objects(branch_bot=pytest.bot, provider="petpooja").first() + assert bot_sync_config is not None + assert bot_sync_config.restaurant_name == "restaurant1" + assert bot_sync_config.branch_name == "branch1" + assert bot_sync_config.parent_bot == pytest.bot + + pos_integration = POSIntegrations.objects(bot=pytest.bot, provider="petpooja", sync_type="push_menu").first() + assert pos_integration is not None + assert pos_integration.config["restaurant_id"] == "98765" + assert pos_integration.meta_config["access_token"] == "dummy_access_token" + + event_url = urljoin( + Utility.environment["events"]["server_url"], + f"/api/events/execute/{EventClass.catalog_integration}", + ) + responses.add( + "POST", + event_url, + json={"success": True, "message": "Event triggered successfully!"}, + ) + + push_menu_payload_path = Path("tests/testing_data/catalog_sync/catalog_sync_push_menu_payload.json") + + with push_menu_payload_path.open("r", encoding="utf-8") as f: + push_menu_payload = json.load(f) + + response = client.post( + url=sync_url, + json=push_menu_payload, + headers={"Authorization": pytest.token_type + " " + pytest.access_token} + ) + + actual = response.json() + assert actual["message"] == "Push menu processing is disabled for this bot" + assert actual["error_code"] == 422 + assert not actual["success"] + + latest_log = CatalogSyncLogs.objects(bot=str(pytest.bot)).order_by("-start_timestamp").first() + print(latest_log.to_mongo().to_dict()) + assert latest_log is not None + assert latest_log.execution_id + assert latest_log.sync_status == "Failed" + assert latest_log.status == "Failure" + assert hasattr(latest_log, "exception") + assert latest_log.exception == "Push menu processing is disabled for this bot" + + restaurant_name, branch_name = CognitionDataProcessor.get_restaurant_and_branch_name(pytest.bot) + catalog_data_collection = f"{restaurant_name}_{branch_name}_catalog_data" + catalog_data_docs = CollectionData.objects(collection_name=catalog_data_collection, bot=pytest.bot) + assert catalog_data_docs.count() == 0 + cognition_data_docs = CognitionData.objects(bot=str(pytest.bot)) + assert cognition_data_docs.count() == 0 + + CatalogProviderMapping.objects(provider="petpooja").delete() + BotSyncConfig.objects(branch_bot=pytest.bot, provider="petpooja").delete() + POSIntegrations.objects(bot=pytest.bot, provider="petpooja", sync_type="invalid_sync").delete() + LLMSecret.objects.delete() + CollectionData.objects(collection_name=catalog_data_collection).delete() + # CollectionData.objects(collection_name=catalog_images_collection).delete() + CatalogSyncLogs.objects.delete() + CognitionData.objects(bot=pytest.bot).delete() + CognitionSchema.objects(bot=pytest.bot).delete() + + +@pytest.mark.asyncio +@responses.activate +@mock.patch.object(LLMProcessor, "__collection_exists__", autospec=True) +@mock.patch.object(LLMProcessor, "__create_collection__", autospec=True) +@mock.patch.object(LLMProcessor, "__collection_upsert__", autospec=True) +@mock.patch.object(MetaProcessor, "update_meta_catalog", autospec=True) +@mock.patch.object(litellm, "aembedding", autospec=True) +def test_catalog_sync_item_toggle_process_item_toggle_disabled(mock_embedding, mock_collection_exists, mock_create_collection, + mock_collection_upsert, update_meta_catalog): + mock_collection_exists.return_value = False + mock_create_collection.return_value = None + mock_collection_upsert.return_value = None + update_meta_catalog.return_value = None + + embedding = list(np.random.random(LLMProcessor.__embedding__)) + mock_embedding.return_value = litellm.EmbeddingResponse( + **{'data': [{'embedding': embedding}, {'embedding': embedding}, {'embedding': embedding}]}) + + secrets = [ + { + "llm_type": "openai", + "api_key": "common_openai_key", + "models": ["common_openai_model1", "common_openai_model2"], + "user": "123", + "timestamp": datetime.utcnow() + }, + ] + + for secret in secrets: + LLMSecret(**secret).save() + + payload = { + "provider": "petpooja", + "config": { + "restaurant_name": "restaurant1", + "branch_name": "branch1", + "restaurant_id": "98765" + }, + "meta_config": { + "access_token":"dummy_access_token", + "catalog_id":"12345" + } + } + + response = client.post( + url=f"/api/bot/{pytest.bot}/data/integrations/add?sync_type=item_toggle", + json = payload, + headers={"Authorization": pytest.token_type + " " + pytest.access_token} + ) + actual = response.json() + assert actual["message"] == "POS Integration Complete" + assert actual["error_code"] == 0 + assert actual["success"] + assert "integration/petpooja/item_toggle" in actual["data"] + assert str(pytest.bot) in actual["data"] + sync_url = actual["data"] + token = sync_url.split(str(pytest.bot) + "/")[1] + + provider_mapping = CatalogProviderMapping.objects(provider="petpooja").first() + assert provider_mapping is not None + assert provider_mapping.meta_mappings is not None + assert provider_mapping.kv_mappings is not None + + bot_sync_config = BotSyncConfig.objects(branch_bot=pytest.bot, provider="petpooja").first() + assert bot_sync_config is not None + assert bot_sync_config.restaurant_name == "restaurant1" + assert bot_sync_config.branch_name == "branch1" + assert bot_sync_config.parent_bot == pytest.bot + + pos_integration = POSIntegrations.objects(bot=pytest.bot, provider="petpooja", sync_type="push_menu").first() + assert pos_integration is not None + assert pos_integration.config["restaurant_id"] == "98765" + assert pos_integration.meta_config["access_token"] == "dummy_access_token" + + event_url = urljoin( + Utility.environment["events"]["server_url"], + f"/api/events/execute/{EventClass.catalog_integration}", + ) + responses.add( + "POST", + event_url, + json={"success": True, "message": "Event triggered successfully!"}, + ) + + item_toggle_payload_path = Path("tests/testing_data/catalog_sync/catalog_sync_item_toggle_payload.json") + + with item_toggle_payload_path.open("r", encoding="utf-8") as f: + item_toggle_payload = json.load(f) + + response = client.post( + url=sync_url, + json=item_toggle_payload, + headers={"Authorization": pytest.token_type + " " + pytest.access_token} + ) + + actual = response.json() + assert actual["message"] == "Item toggle is disabled for this bot" + assert actual["error_code"] == 422 + assert not actual["success"] + + latest_log = CatalogSyncLogs.objects(bot=str(pytest.bot)).order_by("-start_timestamp").first() + print(latest_log.to_mongo().to_dict()) + assert latest_log is not None + assert latest_log.execution_id + assert latest_log.sync_status == "Failed" + assert latest_log.status == "Failure" + assert hasattr(latest_log, "exception") + assert latest_log.exception == "Item toggle is disabled for this bot" + + restaurant_name, branch_name = CognitionDataProcessor.get_restaurant_and_branch_name(pytest.bot) + catalog_data_collection = f"{restaurant_name}_{branch_name}_catalog_data" + catalog_data_docs = CollectionData.objects(collection_name=catalog_data_collection, bot=pytest.bot) + assert catalog_data_docs.count() == 0 + cognition_data_docs = CognitionData.objects(bot=str(pytest.bot)) + assert cognition_data_docs.count() == 0 + + CatalogProviderMapping.objects(provider="petpooja").delete() + BotSyncConfig.objects(branch_bot=pytest.bot, provider="petpooja").delete() + POSIntegrations.objects(bot=pytest.bot, provider="petpooja", sync_type="invalid_sync").delete() + LLMSecret.objects.delete() + CollectionData.objects(collection_name=catalog_data_collection).delete() + # CollectionData.objects(collection_name=catalog_images_collection).delete() + CatalogSyncLogs.objects.delete() + CognitionData.objects(bot=pytest.bot).delete() + CognitionSchema.objects(bot=pytest.bot).delete() + + +@pytest.mark.asyncio +@responses.activate +@mock.patch.object(LLMProcessor, "__collection_exists__", autospec=True) +@mock.patch.object(LLMProcessor, "__create_collection__", autospec=True) +@mock.patch.object(LLMProcessor, "__collection_upsert__", autospec=True) +@mock.patch.object(MetaProcessor, "push_meta_catalog", autospec=True) +@mock.patch.object(MetaProcessor, "delete_meta_catalog", autospec=True) +@mock.patch.object(litellm, "aembedding", autospec=True) +def test_catalog_sync_push_menu_ai_disabled_meta_disabled(mock_embedding, mock_collection_exists, mock_create_collection, + mock_collection_upsert, mock_delete_meta_catalog, mock_push_meta_catalog): + mock_collection_exists.return_value = False + mock_create_collection.return_value = None + mock_collection_upsert.return_value = None + mock_push_meta_catalog.return_value = None + mock_delete_meta_catalog.return_value = None + + embedding = list(np.random.random(LLMProcessor.__embedding__)) + mock_embedding.return_value = litellm.EmbeddingResponse( + **{'data': [{'embedding': embedding}, {'embedding': embedding}, {'embedding': embedding}]}) + + secrets = [ + { + "llm_type": "openai", + "api_key": "common_openai_key", + "models": ["common_openai_model1", "common_openai_model2"], + "user": "123", + "timestamp": datetime.utcnow() + }, + ] + + for secret in secrets: + LLMSecret(**secret).save() + + payload = { + "provider": "petpooja", + "config": { + "restaurant_name": "restaurant1", + "branch_name": "branch1", + "restaurant_id": "98765" + }, + "meta_config": { + "access_token":"dummy_access_token", + "catalog_id":"12345" + } + } + + response = client.post( + url=f"/api/bot/{pytest.bot}/data/integrations/add?sync_type=push_menu", + json = payload, + headers={"Authorization": pytest.token_type + " " + pytest.access_token} + ) + actual = response.json() + assert actual["message"] == "POS Integration Complete" + assert actual["error_code"] == 0 + assert actual["success"] + assert "integration/petpooja/push_menu" in actual["data"] + assert str(pytest.bot) in actual["data"] + sync_url = actual["data"] + token = sync_url.split(str(pytest.bot) + "/")[1] + + provider_mapping = CatalogProviderMapping.objects(provider="petpooja").first() + assert provider_mapping is not None + assert provider_mapping.meta_mappings is not None + assert provider_mapping.kv_mappings is not None + + bot_sync_config = BotSyncConfig.objects(branch_bot=pytest.bot, provider="petpooja").first() + assert bot_sync_config is not None + assert bot_sync_config.restaurant_name == "restaurant1" + assert bot_sync_config.branch_name == "branch1" + assert bot_sync_config.parent_bot == pytest.bot + + pos_integration = POSIntegrations.objects(bot=pytest.bot, provider="petpooja", sync_type="push_menu").first() + assert pos_integration is not None + assert pos_integration.config["restaurant_id"] == "98765" + assert pos_integration.meta_config["access_token"] == "dummy_access_token" + + bot_sync_config = BotSyncConfig.objects(branch_bot=pytest.bot, provider="petpooja").first() + bot_sync_config.process_push_menu = True + bot_sync_config.save() + + event_url = urljoin( + Utility.environment["events"]["server_url"], + f"/api/events/execute/{EventClass.catalog_integration}", + ) + responses.add( + "POST", + event_url, + json={"success": True, "message": "Event triggered successfully!"}, + ) + + restaurant_name, branch_name = CognitionDataProcessor.get_restaurant_and_branch_name(pytest.bot) + catalog_images_collection = f"{restaurant_name}_{branch_name}_catalog_images" + fallback_data = { + "image_type": "global", + "image_url": "https://picsum.photos/id/237/200/300", + "image_base64": "" + } + CollectionData( + collection_name=catalog_images_collection, + data=fallback_data, + user="integration@demo.ai", + bot=pytest.bot, + status=True, + timestamp=datetime.utcnow() + ).save() + + push_menu_payload_path = Path("tests/testing_data/catalog_sync/catalog_sync_push_menu_payload.json") + + with push_menu_payload_path.open("r", encoding="utf-8") as f: + push_menu_payload = json.load(f) + + response = client.post( + url=sync_url, + json=push_menu_payload, + headers={"Authorization": pytest.token_type + " " + pytest.access_token} + ) + actual = response.json() + assert actual["message"] == "Sync in progress! Check logs." + assert actual["error_code"] == 0 + assert actual["data"] is None + assert actual["success"] + + complete_end_to_end_event_execution( + pytest.bot, "integration@demo.ai", EventClass.catalog_integration, sync_type="push_menu", token=token, + provider="petpooja", data=push_menu_payload + ) + + latest_log = CatalogSyncLogs.objects(bot=str(pytest.bot)).order_by("-start_timestamp").first() + print(latest_log.to_mongo().to_dict()) + assert latest_log is not None + assert latest_log.execution_id + assert latest_log.sync_status == "Completed" + assert latest_log.status == "Success" + assert hasattr(latest_log, "exception") + assert latest_log.exception == "Sync to knowledge vault and Meta is not allowed for this bot. Contact Support!!" + + restaurant_name, branch_name = CognitionDataProcessor.get_restaurant_and_branch_name(pytest.bot) + catalog_data_collection = f"{restaurant_name}_{branch_name}_catalog_data" + catalog_data_docs = CollectionData.objects(collection_name=catalog_data_collection, bot=pytest.bot) + catalog_item_summaries = [ + {"id": doc.data["id"], "price": doc.data["price"]} + for doc in catalog_data_docs + ] + + expected_items = [ + {"id": "10539634", "price": 8700.0}, + {"id": "10539699", "price": 3426.0}, + {"id": "10539580", "price": 3159.0}, + ] + + assert all(item in catalog_item_summaries for item in expected_items) + + cognition_data_docs = CognitionData.objects(bot=str(pytest.bot)) + assert cognition_data_docs.count() == 0 + + CatalogProviderMapping.objects(provider="petpooja").delete() + BotSyncConfig.objects(branch_bot=pytest.bot, provider="petpooja").delete() + POSIntegrations.objects(bot=pytest.bot, provider="petpooja", sync_type="invalid_sync").delete() + LLMSecret.objects.delete() + CollectionData.objects(collection_name=catalog_data_collection).delete() + CollectionData.objects(collection_name=catalog_images_collection).delete() + CatalogSyncLogs.objects.delete() + CognitionData.objects(bot=pytest.bot).delete() + CognitionSchema.objects(bot=pytest.bot).delete() + + +@pytest.mark.asyncio +@responses.activate +@mock.patch.object(LLMProcessor, "__collection_exists__", autospec=True) +@mock.patch.object(LLMProcessor, "__create_collection__", autospec=True) +@mock.patch.object(LLMProcessor, "__collection_upsert__", autospec=True) +@mock.patch.object(MetaProcessor, "push_meta_catalog", autospec=True) +@mock.patch.object(MetaProcessor, "delete_meta_catalog", autospec=True) +@mock.patch.object(litellm, "aembedding", autospec=True) +def test_catalog_sync_push_menu_ai_enabled_meta_disabled(mock_embedding, mock_collection_exists, mock_create_collection, + mock_collection_upsert, mock_delete_meta_catalog, mock_push_meta_catalog): + mock_collection_exists.return_value = False + mock_create_collection.return_value = None + mock_collection_upsert.return_value = None + mock_push_meta_catalog.return_value = None + mock_delete_meta_catalog.return_value = None + + embedding = list(np.random.random(LLMProcessor.__embedding__)) + mock_embedding.return_value = litellm.EmbeddingResponse( + **{'data': [{'embedding': embedding}, {'embedding': embedding}, {'embedding': embedding}]}) + + secrets = [ + { + "llm_type": "openai", + "api_key": "common_openai_key", + "models": ["common_openai_model1", "common_openai_model2"], + "user": "123", + "timestamp": datetime.utcnow() + }, + ] + + for secret in secrets: + LLMSecret(**secret).save() + + payload = { + "provider": "petpooja", + "config": { + "restaurant_name": "restaurant1", + "branch_name": "branch1", + "restaurant_id": "98765" + }, + "meta_config": { + "access_token":"dummy_access_token", + "catalog_id":"12345" + } + } + + response = client.post( + url=f"/api/bot/{pytest.bot}/data/integrations/add?sync_type=push_menu", + json = payload, + headers={"Authorization": pytest.token_type + " " + pytest.access_token} + ) + actual = response.json() + assert actual["message"] == "POS Integration Complete" + assert actual["error_code"] == 0 + assert actual["success"] + assert "integration/petpooja/push_menu" in actual["data"] + assert str(pytest.bot) in actual["data"] + sync_url = actual["data"] + token = sync_url.split(str(pytest.bot) + "/")[1] + + provider_mapping = CatalogProviderMapping.objects(provider="petpooja").first() + assert provider_mapping is not None + assert provider_mapping.meta_mappings is not None + assert provider_mapping.kv_mappings is not None + + bot_sync_config = BotSyncConfig.objects(branch_bot=pytest.bot, provider="petpooja").first() + assert bot_sync_config is not None + assert bot_sync_config.restaurant_name == "restaurant1" + assert bot_sync_config.branch_name == "branch1" + assert bot_sync_config.parent_bot == pytest.bot + + pos_integration = POSIntegrations.objects(bot=pytest.bot, provider="petpooja", sync_type="push_menu").first() + assert pos_integration is not None + assert pos_integration.config["restaurant_id"] == "98765" + assert pos_integration.meta_config["access_token"] == "dummy_access_token" + + bot_sync_config = BotSyncConfig.objects(branch_bot=pytest.bot, provider="petpooja").first() + bot_sync_config.process_push_menu = True + bot_sync_config.ai_enabled = True + bot_sync_config.save() + + event_url = urljoin( + Utility.environment["events"]["server_url"], + f"/api/events/execute/{EventClass.catalog_integration}", + ) + responses.add( + "POST", + event_url, + json={"success": True, "message": "Event triggered successfully!"}, + ) + + restaurant_name, branch_name = CognitionDataProcessor.get_restaurant_and_branch_name(pytest.bot) + catalog_images_collection = f"{restaurant_name}_{branch_name}_catalog_images" + fallback_data = { + "image_type": "global", + "image_url": "https://picsum.photos/id/237/200/300", + "image_base64": "" + } + CollectionData( + collection_name=catalog_images_collection, + data=fallback_data, + user="integration@demo.ai", + bot=pytest.bot, + status=True, + timestamp=datetime.utcnow() + ).save() + + push_menu_payload_path = Path("tests/testing_data/catalog_sync/catalog_sync_push_menu_payload.json") + + with push_menu_payload_path.open("r", encoding="utf-8") as f: + push_menu_payload = json.load(f) + + response = client.post( + url=sync_url, + json=push_menu_payload, + headers={"Authorization": pytest.token_type + " " + pytest.access_token} + ) + actual = response.json() + assert actual["message"] == "Sync in progress! Check logs." + assert actual["error_code"] == 0 + assert actual["data"] is None + assert actual["success"] + + complete_end_to_end_event_execution( + pytest.bot, "integration@demo.ai", EventClass.catalog_integration, sync_type="push_menu", token=token, + provider="petpooja", data=push_menu_payload + ) + + latest_log = CatalogSyncLogs.objects(bot=str(pytest.bot)).order_by("-start_timestamp").first() + print(latest_log.to_mongo().to_dict()) + assert latest_log is not None + assert latest_log.execution_id + assert latest_log.sync_status == "Completed" + assert latest_log.status == "Success" + assert hasattr(latest_log, "exception") + assert latest_log.exception == "Sync to Meta is not allowed for this bot. Contact Support!!" + + restaurant_name, branch_name = CognitionDataProcessor.get_restaurant_and_branch_name(pytest.bot) + catalog_data_collection = f"{restaurant_name}_{branch_name}_catalog_data" + catalog_data_docs = CollectionData.objects(collection_name=catalog_data_collection, bot=pytest.bot) + catalog_item_summaries = [ + {"id": doc.data["id"], "price": doc.data["price"]} + for doc in catalog_data_docs + ] + + expected_items = [ + {"id": "10539634", "price": 8700.0}, + {"id": "10539699", "price": 3426.0}, + {"id": "10539580", "price": 3159.0}, + ] + + assert all(item in catalog_item_summaries for item in expected_items) + + cognition_data_docs = CognitionData.objects(bot=str(pytest.bot)) + assert cognition_data_docs.count() == 3 + + cognition_map = {doc.data["id"]: doc.data["price"] for doc in cognition_data_docs if + "id" in doc.data and "price" in doc.data} + for item in expected_items: + assert item["id"] in cognition_map + assert cognition_map[item["id"]] == item["price"] + + CatalogProviderMapping.objects(provider="petpooja").delete() + BotSyncConfig.objects(branch_bot=pytest.bot, provider="petpooja").delete() + POSIntegrations.objects(bot=pytest.bot, provider="petpooja", sync_type="invalid_sync").delete() + LLMSecret.objects.delete() + CollectionData.objects(collection_name=catalog_data_collection).delete() + CollectionData.objects(collection_name=catalog_images_collection).delete() + CatalogSyncLogs.objects.delete() + CognitionData.objects(bot=pytest.bot).delete() + CognitionSchema.objects(bot=pytest.bot).delete() + + +@pytest.mark.asyncio +@responses.activate +@mock.patch.object(LLMProcessor, "__collection_exists__", autospec=True) +@mock.patch.object(LLMProcessor, "__create_collection__", autospec=True) +@mock.patch.object(LLMProcessor, "__collection_upsert__", autospec=True) +@mock.patch.object(MetaProcessor, "push_meta_catalog", autospec=True) +@mock.patch.object(MetaProcessor, "delete_meta_catalog", autospec=True) +@mock.patch.object(litellm, "aembedding", autospec=True) +def test_catalog_sync_push_menu_ai_disabled_meta_enabled(mock_embedding, mock_collection_exists, mock_create_collection, + mock_collection_upsert, mock_delete_meta_catalog, mock_push_meta_catalog): + mock_collection_exists.return_value = False + mock_create_collection.return_value = None + mock_collection_upsert.return_value = None + mock_push_meta_catalog.return_value = None + mock_delete_meta_catalog.return_value = None + + embedding = list(np.random.random(LLMProcessor.__embedding__)) + mock_embedding.return_value = litellm.EmbeddingResponse( + **{'data': [{'embedding': embedding}, {'embedding': embedding}, {'embedding': embedding}]}) + + secrets = [ + { + "llm_type": "openai", + "api_key": "common_openai_key", + "models": ["common_openai_model1", "common_openai_model2"], + "user": "123", + "timestamp": datetime.utcnow() + }, + ] + + for secret in secrets: + LLMSecret(**secret).save() + + payload = { + "provider": "petpooja", + "config": { + "restaurant_name": "restaurant1", + "branch_name": "branch1", + "restaurant_id": "98765" + }, + "meta_config": { + "access_token":"dummy_access_token", + "catalog_id":"12345" + } + } + + response = client.post( + url=f"/api/bot/{pytest.bot}/data/integrations/add?sync_type=push_menu", + json = payload, + headers={"Authorization": pytest.token_type + " " + pytest.access_token} + ) + actual = response.json() + assert actual["message"] == "POS Integration Complete" + assert actual["error_code"] == 0 + assert actual["success"] + assert "integration/petpooja/push_menu" in actual["data"] + assert str(pytest.bot) in actual["data"] + sync_url = actual["data"] + token = sync_url.split(str(pytest.bot) + "/")[1] + + provider_mapping = CatalogProviderMapping.objects(provider="petpooja").first() + assert provider_mapping is not None + assert provider_mapping.meta_mappings is not None + assert provider_mapping.kv_mappings is not None + + bot_sync_config = BotSyncConfig.objects(branch_bot=pytest.bot, provider="petpooja").first() + assert bot_sync_config is not None + assert bot_sync_config.restaurant_name == "restaurant1" + assert bot_sync_config.branch_name == "branch1" + assert bot_sync_config.parent_bot == pytest.bot + + pos_integration = POSIntegrations.objects(bot=pytest.bot, provider="petpooja", sync_type="push_menu").first() + assert pos_integration is not None + assert pos_integration.config["restaurant_id"] == "98765" + assert pos_integration.meta_config["access_token"] == "dummy_access_token" + + bot_sync_config = BotSyncConfig.objects(branch_bot=pytest.bot, provider="petpooja").first() + bot_sync_config.process_push_menu = True + bot_sync_config.meta_enabled = True + bot_sync_config.save() + + event_url = urljoin( + Utility.environment["events"]["server_url"], + f"/api/events/execute/{EventClass.catalog_integration}", + ) + responses.add( + "POST", + event_url, + json={"success": True, "message": "Event triggered successfully!"}, + ) + + restaurant_name, branch_name = CognitionDataProcessor.get_restaurant_and_branch_name(pytest.bot) + catalog_images_collection = f"{restaurant_name}_{branch_name}_catalog_images" + fallback_data = { + "image_type": "global", + "image_url": "https://picsum.photos/id/237/200/300", + "image_base64": "" + } + CollectionData( + collection_name=catalog_images_collection, + data=fallback_data, + user="integration@demo.ai", + bot=pytest.bot, + status=True, + timestamp=datetime.utcnow() + ).save() + + push_menu_payload_path = Path("tests/testing_data/catalog_sync/catalog_sync_push_menu_payload.json") + + with push_menu_payload_path.open("r", encoding="utf-8") as f: + push_menu_payload = json.load(f) + + response = client.post( + url=sync_url, + json=push_menu_payload, + headers={"Authorization": pytest.token_type + " " + pytest.access_token} + ) + actual = response.json() + assert actual["message"] == "Sync in progress! Check logs." + assert actual["error_code"] == 0 + assert actual["data"] is None + assert actual["success"] + + complete_end_to_end_event_execution( + pytest.bot, "integration@demo.ai", EventClass.catalog_integration, sync_type="push_menu", token=token, + provider="petpooja", data=push_menu_payload + ) + + latest_log = CatalogSyncLogs.objects(bot=str(pytest.bot)).order_by("-start_timestamp").first() + print(latest_log.to_mongo().to_dict()) + assert latest_log is not None + assert latest_log.execution_id + assert latest_log.sync_status == "Completed" + assert latest_log.status == "Success" + assert hasattr(latest_log, "exception") + assert latest_log.exception == "Sync to knowledge vault is not allowed for this bot. Contact Support!!" + + restaurant_name, branch_name = CognitionDataProcessor.get_restaurant_and_branch_name(pytest.bot) + catalog_data_collection = f"{restaurant_name}_{branch_name}_catalog_data" + catalog_data_docs = CollectionData.objects(collection_name=catalog_data_collection, bot=pytest.bot) + catalog_item_summaries = [ + {"id": doc.data["id"], "price": doc.data["price"]} + for doc in catalog_data_docs + ] + + expected_items = [ + {"id": "10539634", "price": 8700.0}, + {"id": "10539699", "price": 3426.0}, + {"id": "10539580", "price": 3159.0}, + ] + + assert all(item in catalog_item_summaries for item in expected_items) + + cognition_data_docs = CognitionData.objects(bot=str(pytest.bot)) + assert cognition_data_docs.count() == 0 + + CatalogProviderMapping.objects(provider="petpooja").delete() + BotSyncConfig.objects(branch_bot=pytest.bot, provider="petpooja").delete() + POSIntegrations.objects(bot=pytest.bot, provider="petpooja", sync_type="invalid_sync").delete() + LLMSecret.objects.delete() + CollectionData.objects(collection_name=catalog_data_collection).delete() + CollectionData.objects(collection_name=catalog_images_collection).delete() + CatalogSyncLogs.objects.delete() + CognitionData.objects(bot=pytest.bot).delete() + CognitionSchema.objects(bot=pytest.bot).delete() + + +@pytest.mark.asyncio +@responses.activate +@mock.patch.object(LLMProcessor, "__collection_exists__", autospec=True) +@mock.patch.object(LLMProcessor, "__create_collection__", autospec=True) +@mock.patch.object(LLMProcessor, "__collection_upsert__", autospec=True) +@mock.patch.object(MetaProcessor, "push_meta_catalog", autospec=True) +@mock.patch.object(MetaProcessor, "delete_meta_catalog", autospec=True) +@mock.patch.object(litellm, "aembedding", autospec=True) +def test_catalog_sync_push_menu_global_image_not_found(mock_embedding, mock_collection_exists, mock_create_collection, + mock_collection_upsert, mock_delete_meta_catalog, mock_push_meta_catalog): + mock_collection_exists.return_value = False + mock_create_collection.return_value = None + mock_collection_upsert.return_value = None + mock_push_meta_catalog.return_value = None + mock_delete_meta_catalog.return_value = None + + embedding = list(np.random.random(LLMProcessor.__embedding__)) + mock_embedding.return_value = litellm.EmbeddingResponse( + **{'data': [{'embedding': embedding}, {'embedding': embedding}, {'embedding': embedding}]}) + + secrets = [ + { + "llm_type": "openai", + "api_key": "common_openai_key", + "models": ["common_openai_model1", "common_openai_model2"], + "user": "123", + "timestamp": datetime.utcnow() + }, + ] + + for secret in secrets: + LLMSecret(**secret).save() + + payload = { + "provider": "petpooja", + "config": { + "restaurant_name": "restaurant1", + "branch_name": "branch1", + "restaurant_id": "98765" + }, + "meta_config": { + "access_token":"dummy_access_token", + "catalog_id":"12345" + } + } + + response = client.post( + url=f"/api/bot/{pytest.bot}/data/integrations/add?sync_type=push_menu", + json = payload, + headers={"Authorization": pytest.token_type + " " + pytest.access_token} + ) + actual = response.json() + assert actual["message"] == "POS Integration Complete" + assert actual["error_code"] == 0 + assert actual["success"] + assert "integration/petpooja/push_menu" in actual["data"] + assert str(pytest.bot) in actual["data"] + sync_url = actual["data"] + token = sync_url.split(str(pytest.bot) + "/")[1] + + provider_mapping = CatalogProviderMapping.objects(provider="petpooja").first() + assert provider_mapping is not None + assert provider_mapping.meta_mappings is not None + assert provider_mapping.kv_mappings is not None + + bot_sync_config = BotSyncConfig.objects(branch_bot=pytest.bot, provider="petpooja").first() + assert bot_sync_config is not None + assert bot_sync_config.restaurant_name == "restaurant1" + assert bot_sync_config.branch_name == "branch1" + assert bot_sync_config.parent_bot == pytest.bot + + pos_integration = POSIntegrations.objects(bot=pytest.bot, provider="petpooja", sync_type="push_menu").first() + assert pos_integration is not None + assert pos_integration.config["restaurant_id"] == "98765" + assert pos_integration.meta_config["access_token"] == "dummy_access_token" + + bot_sync_config = BotSyncConfig.objects(branch_bot=pytest.bot, provider="petpooja").first() + bot_sync_config.process_push_menu = True + bot_sync_config.meta_enabled = True + bot_sync_config.save() + + event_url = urljoin( + Utility.environment["events"]["server_url"], + f"/api/events/execute/{EventClass.catalog_integration}", + ) + responses.add( + "POST", + event_url, + json={"success": True, "message": "Event triggered successfully!"}, + ) + + push_menu_payload_path = Path("tests/testing_data/catalog_sync/catalog_sync_push_menu_payload.json") + + with push_menu_payload_path.open("r", encoding="utf-8") as f: + push_menu_payload = json.load(f) + + response = client.post( + url=sync_url, + json=push_menu_payload, + headers={"Authorization": pytest.token_type + " " + pytest.access_token} + ) + actual = response.json() + assert actual["message"] == "Global fallback image URL not found" + assert actual["error_code"] == 422 + assert not actual["success"] + + latest_log = CatalogSyncLogs.objects(bot=str(pytest.bot)).order_by("-start_timestamp").first() + print(latest_log.to_mongo().to_dict()) + assert latest_log is not None + assert latest_log.execution_id + assert latest_log.sync_status == "Failed" + assert latest_log.status == "Failure" + assert hasattr(latest_log, "exception") + assert latest_log.exception == "Global fallback image URL not found" + + restaurant_name, branch_name = CognitionDataProcessor.get_restaurant_and_branch_name(pytest.bot) + catalog_data_collection = f"{restaurant_name}_{branch_name}_catalog_data" + catalog_data_docs = CollectionData.objects(collection_name=catalog_data_collection, bot=pytest.bot) + assert catalog_data_docs.count() == 0 + cognition_data_docs = CognitionData.objects(bot=str(pytest.bot)) + assert cognition_data_docs.count() == 0 + + restaurant_name, branch_name = CognitionDataProcessor.get_restaurant_and_branch_name(pytest.bot) + catalog_images_collection = f"{restaurant_name}_{branch_name}_catalog_images" + + CatalogProviderMapping.objects(provider="petpooja").delete() + BotSyncConfig.objects(branch_bot=pytest.bot, provider="petpooja").delete() + POSIntegrations.objects(bot=pytest.bot, provider="petpooja", sync_type="invalid_sync").delete() + LLMSecret.objects.delete() + CollectionData.objects(collection_name=catalog_data_collection).delete() + CollectionData.objects(collection_name=catalog_images_collection).delete() + CatalogSyncLogs.objects.delete() + CognitionData.objects(bot=pytest.bot).delete() + CognitionSchema.objects(bot=pytest.bot).delete() + + +@pytest.mark.asyncio +@responses.activate +@mock.patch.object(LLMProcessor, "__collection_exists__", autospec=True) +@mock.patch.object(LLMProcessor, "__create_collection__", autospec=True) +@mock.patch.object(LLMProcessor, "__collection_upsert__", autospec=True) +@mock.patch.object(MetaProcessor, "push_meta_catalog", autospec=True) +@mock.patch.object(MetaProcessor, "delete_meta_catalog", autospec=True) +@mock.patch.object(litellm, "aembedding", autospec=True) +def test_catalog_sync_push_menu_global_local_images_success(mock_embedding, mock_collection_exists, mock_create_collection, + mock_collection_upsert, mock_delete_meta_catalog, mock_push_meta_catalog): + mock_collection_exists.return_value = False + mock_create_collection.return_value = None + mock_collection_upsert.return_value = None + mock_push_meta_catalog.return_value = None + mock_delete_meta_catalog.return_value = None + + embedding = list(np.random.random(LLMProcessor.__embedding__)) + mock_embedding.return_value = litellm.EmbeddingResponse( + **{'data': [{'embedding': embedding}, {'embedding': embedding}, {'embedding': embedding}]}) + + secrets = [ + { + "llm_type": "openai", + "api_key": "common_openai_key", + "models": ["common_openai_model1", "common_openai_model2"], + "user": "123", + "timestamp": datetime.utcnow() + }, + ] + + for secret in secrets: + LLMSecret(**secret).save() + + payload = { + "provider": "petpooja", + "config": { + "restaurant_name": "restaurant1", + "branch_name": "branch1", + "restaurant_id": "98765" + }, + "meta_config": { + "access_token":"dummy_access_token", + "catalog_id":"12345" + } + } + + response = client.post( + url=f"/api/bot/{pytest.bot}/data/integrations/add?sync_type=push_menu", + json = payload, + headers={"Authorization": pytest.token_type + " " + pytest.access_token} + ) + actual = response.json() + assert actual["message"] == "POS Integration Complete" + assert actual["error_code"] == 0 + assert actual["success"] + assert "integration/petpooja/push_menu" in actual["data"] + assert str(pytest.bot) in actual["data"] + sync_url = actual["data"] + token = sync_url.split(str(pytest.bot) + "/")[1] + + provider_mapping = CatalogProviderMapping.objects(provider="petpooja").first() + assert provider_mapping is not None + assert provider_mapping.meta_mappings is not None + assert provider_mapping.kv_mappings is not None + + bot_sync_config = BotSyncConfig.objects(branch_bot=pytest.bot, provider="petpooja").first() + assert bot_sync_config is not None + assert bot_sync_config.restaurant_name == "restaurant1" + assert bot_sync_config.branch_name == "branch1" + assert bot_sync_config.parent_bot == pytest.bot + + pos_integration = POSIntegrations.objects(bot=pytest.bot, provider="petpooja", sync_type="push_menu").first() + assert pos_integration is not None + assert pos_integration.config["restaurant_id"] == "98765" + assert pos_integration.meta_config["access_token"] == "dummy_access_token" + + bot_sync_config = BotSyncConfig.objects(branch_bot=pytest.bot, provider="petpooja").first() + bot_sync_config.process_push_menu = True + bot_sync_config.ai_enabled = True + bot_sync_config.meta_enabled = True + bot_sync_config.save() + + event_url = urljoin( + Utility.environment["events"]["server_url"], + f"/api/events/execute/{EventClass.catalog_integration}", + ) + responses.add( + "POST", + event_url, + json={"success": True, "message": "Event triggered successfully!"}, + ) + + restaurant_name, branch_name = CognitionDataProcessor.get_restaurant_and_branch_name(pytest.bot) + catalog_images_collection = f"{restaurant_name}_{branch_name}_catalog_images" + fallback_data = { + "image_type": "global", + "image_url": "https://picsum.photos/id/237/200/300", + "image_base64": "" + } + CollectionData( + collection_name=catalog_images_collection, + data=fallback_data, + user="integration@demo.ai", + bot=pytest.bot, + status=True, + timestamp=datetime.utcnow() + ).save() + + local_image_data = { + "image_type": "local", + "item_id": 10539634, + "image_url": "https://picsum.photos/id/local/237/200/300", + "image_base64": "" + } + CollectionData( + collection_name=catalog_images_collection, + data=local_image_data, + user="integration@demo.ai", + bot=pytest.bot, + status=True, + timestamp=datetime.utcnow() + ).save() + + push_menu_payload_path = Path("tests/testing_data/catalog_sync/catalog_sync_push_menu_payload.json") + + with push_menu_payload_path.open("r", encoding="utf-8") as f: + push_menu_payload = json.load(f) + + response = client.post( + url=sync_url, + json=push_menu_payload, + headers={"Authorization": pytest.token_type + " " + pytest.access_token} + ) + + actual = response.json() + assert actual["message"] == "Sync in progress! Check logs." + assert actual["error_code"] == 0 + assert actual["data"] is None + assert actual["success"] + + complete_end_to_end_event_execution( + pytest.bot, "integration@demo.ai", EventClass.catalog_integration, sync_type="push_menu", token=token, + provider="petpooja", data=push_menu_payload + ) + + latest_log = CatalogSyncLogs.objects(bot=str(pytest.bot)).order_by("-start_timestamp").first() + assert latest_log is not None + assert latest_log.execution_id + assert latest_log.sync_status == "Completed" + assert latest_log.status == "Success" + assert hasattr(latest_log, "exception") + assert latest_log.exception == "" + + expected_id_image_map = { + '10539634': 'https://picsum.photos/id/local/237/200/300', + '10539699': 'https://picsum.photos/id/237/200/300', + '10539580': 'https://picsum.photos/id/237/200/300' + } + latest_log_dict = latest_log.to_mongo().to_dict() + meta_items = latest_log_dict["processed_payload"]["meta"] + id_image_map = {item["id"]: item["image_url"] for item in meta_items} + + assert id_image_map == expected_id_image_map + + restaurant_name, branch_name = CognitionDataProcessor.get_restaurant_and_branch_name(pytest.bot) + catalog_data_collection = f"{restaurant_name}_{branch_name}_catalog_data" + catalog_data_docs = CollectionData.objects(collection_name=catalog_data_collection, bot=pytest.bot) + catalog_item_summaries = [ + {"id": doc.data["id"], "price": doc.data["price"]} + for doc in catalog_data_docs + ] + + expected_items = [ + {"id": "10539634", "price": 8700.0}, + {"id": "10539699", "price": 3426.0}, + {"id": "10539580", "price": 3159.0}, + ] + + assert all(item in catalog_item_summaries for item in expected_items) + + cognition_data_docs = CognitionData.objects(bot=str(pytest.bot)) + cognition_map = {doc.data["id"]: doc.data["price"] for doc in cognition_data_docs if + "id" in doc.data and "price" in doc.data} + for item in expected_items: + assert item["id"] in cognition_map + assert cognition_map[item["id"]] == item["price"] + + CatalogProviderMapping.objects(provider="petpooja").delete() + BotSyncConfig.objects(branch_bot=pytest.bot, provider="petpooja").delete() + POSIntegrations.objects(bot=pytest.bot, provider="petpooja", sync_type="invalid_sync").delete() + LLMSecret.objects.delete() + CollectionData.objects(collection_name=catalog_data_collection).delete() + CollectionData.objects(collection_name=catalog_images_collection).delete() + CatalogSyncLogs.objects.delete() + CognitionData.objects(bot=pytest.bot).delete() + CognitionSchema.objects(bot=pytest.bot).delete() + + +@pytest.mark.asyncio +@responses.activate +@mock.patch.object(LLMProcessor, "__collection_exists__", autospec=True) +@mock.patch.object(LLMProcessor, "__create_collection__", autospec=True) +@mock.patch.object(LLMProcessor, "__collection_upsert__", autospec=True) +@mock.patch.object(MetaProcessor, "push_meta_catalog", autospec=True) +@mock.patch.object(MetaProcessor, "delete_meta_catalog", autospec=True) +@mock.patch.object(litellm, "aembedding", autospec=True) +def test_catalog_rerun_sync_push_menu_success(mock_embedding, mock_collection_exists, mock_create_collection, + mock_collection_upsert, mock_delete_meta_catalog, mock_push_meta_catalog): + mock_collection_exists.return_value = False + mock_create_collection.return_value = None + mock_collection_upsert.return_value = None + mock_push_meta_catalog.return_value = None + mock_delete_meta_catalog.return_value = None + + embedding = list(np.random.random(LLMProcessor.__embedding__)) + mock_embedding.return_value = litellm.EmbeddingResponse( + **{'data': [{'embedding': embedding}, {'embedding': embedding}, {'embedding': embedding}]}) + + secrets = [ + { + "llm_type": "openai", + "api_key": "common_openai_key", + "models": ["common_openai_model1", "common_openai_model2"], + "user": "123", + "timestamp": datetime.utcnow() + }, + ] + + for secret in secrets: + LLMSecret(**secret).save() + + payload = { + "provider": "petpooja", + "config": { + "restaurant_name": "restaurant1", + "branch_name": "branch1", + "restaurant_id": "98765" + }, + "meta_config": { + "access_token":"dummy_access_token", + "catalog_id":"12345" + } + } + + response = client.post( + url=f"/api/bot/{pytest.bot}/data/integrations/add?sync_type=push_menu", + json = payload, + headers={"Authorization": pytest.token_type + " " + pytest.access_token} + ) + actual = response.json() + assert actual["message"] == "POS Integration Complete" + assert actual["error_code"] == 0 + assert actual["success"] + assert "integration/petpooja/push_menu" in actual["data"] + assert str(pytest.bot) in actual["data"] + sync_url = actual["data"] + token = sync_url.split(str(pytest.bot) + "/")[1] + + provider_mapping = CatalogProviderMapping.objects(provider="petpooja").first() + assert provider_mapping is not None + assert provider_mapping.meta_mappings is not None + assert provider_mapping.kv_mappings is not None + + bot_sync_config = BotSyncConfig.objects(branch_bot=pytest.bot, provider="petpooja").first() + assert bot_sync_config is not None + assert bot_sync_config.restaurant_name == "restaurant1" + assert bot_sync_config.branch_name == "branch1" + assert bot_sync_config.parent_bot == pytest.bot + + pos_integration = POSIntegrations.objects(bot=pytest.bot, provider="petpooja", sync_type="push_menu").first() + assert pos_integration is not None + assert pos_integration.config["restaurant_id"] == "98765" + assert pos_integration.meta_config["access_token"] == "dummy_access_token" + + event_url = urljoin( + Utility.environment["events"]["server_url"], + f"/api/events/execute/{EventClass.catalog_integration}", + ) + responses.add( + "POST", + event_url, + json={"success": True, "message": "Event triggered successfully!"}, + ) + + push_menu_payload_path = Path("tests/testing_data/catalog_sync/catalog_sync_push_menu_payload.json") + + with push_menu_payload_path.open("r", encoding="utf-8") as f: + push_menu_payload = json.load(f) + + response = client.post( + url=sync_url, + json=push_menu_payload, + headers={"Authorization": pytest.token_type + " " + pytest.access_token} + ) + + actual = response.json() + assert actual["message"] == "Push menu processing is disabled for this bot" + assert actual["error_code"] == 422 + assert not actual["success"] + + latest_log = CatalogSyncLogs.objects(bot=str(pytest.bot)).order_by("-start_timestamp").first() + print(latest_log.to_mongo().to_dict()) + assert latest_log is not None + assert latest_log.execution_id + assert latest_log.sync_status == "Failed" + assert latest_log.status == "Failure" + assert hasattr(latest_log, "exception") + assert latest_log.exception == "Push menu processing is disabled for this bot" + rerun_execution_id = latest_log.execution_id + + restaurant_name, branch_name = CognitionDataProcessor.get_restaurant_and_branch_name(pytest.bot) + catalog_data_collection = f"{restaurant_name}_{branch_name}_catalog_data" + catalog_data_docs = CollectionData.objects(collection_name=catalog_data_collection, bot=pytest.bot) + assert catalog_data_docs.count() == 0 + cognition_data_docs = CognitionData.objects(bot=str(pytest.bot)) + assert cognition_data_docs.count() == 0 + + bot_sync_config = BotSyncConfig.objects(branch_bot=pytest.bot, provider="petpooja").first() + bot_sync_config.process_push_menu = True + bot_sync_config.process_item_toggle = True + bot_sync_config.ai_enabled = True + bot_sync_config.meta_enabled = True + bot_sync_config.save() + + restaurant_name, branch_name = CognitionDataProcessor.get_restaurant_and_branch_name(pytest.bot) + catalog_images_collection = f"{restaurant_name}_{branch_name}_catalog_images" + fallback_data = { + "image_type": "global", + "image_url": "https://picsum.photos/id/237/200/300", + "image_base64": "" + } + CollectionData( + collection_name=catalog_images_collection, + data=fallback_data, + user="integration@demo.ai", + bot=pytest.bot, + status=True, + timestamp=datetime.utcnow() + ).save() + + rerun_sync_url = f"{sync_url}/{rerun_execution_id}" + + response = client.post( + url=rerun_sync_url, + headers={"Authorization": pytest.token_type + " " + pytest.access_token} + ) + + actual = response.json() + print(actual) + assert actual["message"] == "Sync in progress! Check logs." + assert actual["error_code"] == 0 + assert actual["data"] is None + assert actual["success"] + + complete_end_to_end_event_execution( + pytest.bot, "integration@demo.ai", EventClass.catalog_integration, sync_type="push_menu", token=token, + provider="petpooja", data=push_menu_payload + ) + + latest_log = CatalogSyncLogs.objects(bot=str(pytest.bot)).order_by("-start_timestamp").first() + assert latest_log is not None + assert latest_log.execution_id + assert latest_log.sync_status == "Completed" + assert latest_log.status == "Success" + assert hasattr(latest_log, "exception") + assert latest_log.exception == "" + + restaurant_name, branch_name = CognitionDataProcessor.get_restaurant_and_branch_name(pytest.bot) + catalog_data_collection = f"{restaurant_name}_{branch_name}_catalog_data" + catalog_data_docs = CollectionData.objects(collection_name=catalog_data_collection, bot=pytest.bot) + catalog_item_summaries = [ + {"id": doc.data["id"], "price": doc.data["price"]} + for doc in catalog_data_docs + ] + + expected_items = [ + {"id": "10539634", "price": 8700.0}, + {"id": "10539699", "price": 3426.0}, + {"id": "10539580", "price": 3159.0}, + ] + + assert all(item in catalog_item_summaries for item in expected_items) + + cognition_data_docs = CognitionData.objects(bot=str(pytest.bot)) + cognition_map = {doc.data["id"]: doc.data["price"] for doc in cognition_data_docs if + "id" in doc.data and "price" in doc.data} + for item in expected_items: + assert item["id"] in cognition_map + assert cognition_map[item["id"]] == item["price"] + + CatalogProviderMapping.objects(provider="petpooja").delete() + BotSyncConfig.objects(branch_bot=pytest.bot, provider="petpooja").delete() + POSIntegrations.objects(bot=pytest.bot, provider="petpooja", sync_type="invalid_sync").delete() + LLMSecret.objects.delete() + CollectionData.objects(collection_name=catalog_data_collection).delete() + CollectionData.objects(collection_name=catalog_images_collection).delete() + CatalogSyncLogs.objects.delete() + CognitionData.objects(bot=pytest.bot).delete() + CognitionSchema.objects(bot=pytest.bot).delete() + + @responses.activate def test_upload_with_bot_content_only_validate_content_data(): bot_settings = BotSettings.objects(bot=pytest.bot).get() diff --git a/tests/testing_data/catalog_sync/catalog_sync_item_toggle_payload.json b/tests/testing_data/catalog_sync/catalog_sync_item_toggle_payload.json new file mode 100644 index 000000000..adb06a45c --- /dev/null +++ b/tests/testing_data/catalog_sync/catalog_sync_item_toggle_payload.json @@ -0,0 +1,14 @@ +{ + "type": "POST", + "body": { + "restID": "njrbv7mu", + "inStock": false, + "itemID": [ + "10539580" + ], + "type": "item", + "autoTurnOnTime": "custom", + "customTurnOnTime": "2025-03-26 12:45:00" + }, + "params": {} +} \ No newline at end of file diff --git a/tests/testing_data/catalog_sync/catalog_sync_item_toggle_payload_invalid_missing_instock.json b/tests/testing_data/catalog_sync/catalog_sync_item_toggle_payload_invalid_missing_instock.json new file mode 100644 index 000000000..ee1f8ff03 --- /dev/null +++ b/tests/testing_data/catalog_sync/catalog_sync_item_toggle_payload_invalid_missing_instock.json @@ -0,0 +1,13 @@ +{ + "type": "POST", + "body": { + "restID": "njrbv7mu", + "itemID": [ + "10539580" + ], + "type": "item", + "autoTurnOnTime": "custom", + "customTurnOnTime": "2025-03-26 12:45:00" + }, + "params": {} +} \ No newline at end of file diff --git a/tests/testing_data/catalog_sync/catalog_sync_item_toggle_payload_invalid_missing_itemid.json b/tests/testing_data/catalog_sync/catalog_sync_item_toggle_payload_invalid_missing_itemid.json new file mode 100644 index 000000000..021f772bb --- /dev/null +++ b/tests/testing_data/catalog_sync/catalog_sync_item_toggle_payload_invalid_missing_itemid.json @@ -0,0 +1,11 @@ +{ + "type": "POST", + "body": { + "restID": "njrbv7mu", + "inStock": false, + "type": "item", + "autoTurnOnTime": "custom", + "customTurnOnTime": "2025-03-26 12:45:00" + }, + "params": {} +} \ No newline at end of file diff --git a/tests/testing_data/catalog_sync/catalog_sync_item_toggle_payload_invalid_nonboolean_instock.json b/tests/testing_data/catalog_sync/catalog_sync_item_toggle_payload_invalid_nonboolean_instock.json new file mode 100644 index 000000000..5a58000f0 --- /dev/null +++ b/tests/testing_data/catalog_sync/catalog_sync_item_toggle_payload_invalid_nonboolean_instock.json @@ -0,0 +1,14 @@ +{ + "type": "POST", + "body": { + "restID": "njrbv7mu", + "inStock": "invalid", + "itemID": [ + "10539580" + ], + "type": "item", + "autoTurnOnTime": "custom", + "customTurnOnTime": "2025-03-26 12:45:00" + }, + "params": {} +} \ No newline at end of file diff --git a/tests/testing_data/catalog_sync/catalog_sync_push_menu_payload.json b/tests/testing_data/catalog_sync/catalog_sync_push_menu_payload.json new file mode 100644 index 000000000..9fd22b148 --- /dev/null +++ b/tests/testing_data/catalog_sync/catalog_sync_push_menu_payload.json @@ -0,0 +1,270 @@ +{ + "success": "1", + "message": "Menu items are successfully listed.", + "restaurants": [ + { + "restaurantid": "4639", + "active": "1", + "details": { + "menusharingcode": "nu0wisf4", + "currency_html": "₹", + "country": "India", + "country_id": "97", + "images": [], + "restaurantname": "Indisubb Foods Private Limited", + "address": "Ahmedabad", + "contact": "1234567890", + "latitude": "1", + "longitude": "1", + "landmark": "", + "city": "Ahmedabad", + "city_id": "1321851", + "state": "Gujarat", + "state_id": "1612", + "minimumorderamount": "0", + "minimumdeliverytime": "30 Minutes", + "deliverycharge": "0", + "deliveryhoursfrom1": "", + "deliveryhoursto1": "", + "deliveryhoursfrom2": "", + "deliveryhoursto2": "", + "calculatetaxonpacking": 0, + "calculatetaxondelivery": 0, + "dc_taxes_id": "", + "pc_taxes_id": "", + "packaging_applicable_on": "NONE", + "packaging_charge": "", + "packaging_charge_type": "" + } + } + ], + "ordertypes": [ + { + "ordertypeid": 1, + "ordertype": "Delivery" + }, + { + "ordertypeid": 2, + "ordertype": "Pick Up" + }, + { + "ordertypeid": 3, + "ordertype": "Dine In" + } + ], + "categories": [ + { + "categoryid": "77583", + "active": "1", + "categoryrank": "1", + "parent_category_id": "0", + "categoryname": "Add Ons", + "categorytimings": "", + "category_image_url": "" + }, + { + "categoryid": "77581", + "active": "1", + "categoryrank": "1", + "parent_category_id": "0", + "categoryname": "Beverages", + "categorytimings": "", + "category_image_url": "" + }, + { + "categoryid": "77585", + "active": "1", + "categoryrank": "1", + "parent_category_id": "0", + "categoryname": "Chicken", + "categorytimings": "", + "category_image_url": "" + }, + { + "categoryid": "77578", + "active": "1", + "categoryrank": "1", + "parent_category_id": "0", + "categoryname": "Chicken Meal", + "categorytimings": "", + "category_image_url": "" + }, + { + "categoryid": "77584", + "active": "1", + "categoryrank": "1", + "parent_category_id": "0", + "categoryname": "Desserts", + "categorytimings": "", + "category_image_url": "" + }, + { + "categoryid": "77582", + "active": "1", + "categoryrank": "1", + "parent_category_id": "0", + "categoryname": "Hot Coffees", + "categorytimings": "", + "category_image_url": "" + }, + { + "categoryid": "77579", + "active": "1", + "categoryrank": "1", + "parent_category_id": "0", + "categoryname": "Seafood Meal", + "categorytimings": "", + "category_image_url": "" + }, + { + "categoryid": "77580", + "active": "1", + "categoryrank": "1", + "parent_category_id": "0", + "categoryname": "Side Orders", + "categorytimings": "", + "category_image_url": "" + } + ], + "parentcategories": [], + "items": [ + { + "itemid": "10539634", + "itemallowvariation": "0", + "itemrank": "1", + "item_categoryid": "77578", + "item_ordertype": "1,2,3", + "item_packingcharges": "0", + "itemallowaddon": "0", + "itemaddonbasedon": "0", + "item_favorite": "0", + "ignore_taxes": "0", + "ignore_discounts": "0", + "in_stock": "2", + "cuisine": [], + "variation_groupname": "", + "variation": [], + "addon": [], + "is_recommend": "0", + "itemname": "Potter 4", + "item_attributeid": "1", + "itemdescription": "Chicken fillet in a bun with coleslaw,lettuce, pickles and our spicy cocktail sauce. This sandwich is made with care to make sure that each and every bite is packed with Mmmm", + "minimumpreparationtime": "", + "price": "8700.00", + "active": "1", + "item_image_url": "", + "item_tax": "2524,2525", + "gst_type": "services" + }, + { + "itemid": "10539699", + "itemallowvariation": "0", + "itemrank": "1", + "item_categoryid": "77578", + "item_ordertype": "1,2,3", + "item_packingcharges": "0", + "itemallowaddon": "0", + "itemaddonbasedon": "0", + "item_favorite": "0", + "ignore_taxes": "0", + "ignore_discounts": "0", + "in_stock": "2", + "cuisine": [], + "variation_groupname": "", + "variation": [], + "addon": [], + "is_recommend": "0", + "itemname": "Potter 99", + "item_attributeid": "1", + "itemdescription": "Chicken fillet in a bun with coleslaw,lettuce, pickles and our spicy cocktail sauce. This sandwich is made with care to make sure that each and every bite is packed with Mmmm", + "minimumpreparationtime": "", + "price": "3426.00", + "active": "1", + "item_image_url": "", + "item_tax": "2524,2525", + "gst_type": "services" + }, + { + "itemid": "10539580", + "itemallowvariation": "0", + "itemrank": "1", + "item_categoryid": "77578", + "item_ordertype": "1,2,3", + "item_packingcharges": "0", + "itemallowaddon": "1", + "itemaddonbasedon": "0", + "item_favorite": "0", + "ignore_taxes": "0", + "ignore_discounts": "0", + "in_stock": "2", + "cuisine": [], + "variation_groupname": "", + "variation": [], + "addon": [ + { + "addon_group_id": "11425", + "addon_item_selection_min": "1", + "addon_item_selection_max": "1" + } + ], + "is_recommend": "0", + "itemname": "Potter 5", + "item_attributeid": "1", + "itemdescription": "chicken fillet nuggets come with a sauce of your choice (nugget\/garlic sauce). Bite-sized pieces of tender all breast chicken fillets, marinated in our unique & signature blend, breaded and seasoned to perfection, then deep-fried until deliciously tender, crispy with a golden crust", + "minimumpreparationtime": "", + "price": "3159.00", + "active": "1", + "item_image_url": "", + "item_tax": "2524,2525", + "gst_type": "services" + } + ], + "variations": [], + "addongroups": [], + "attributes": [ + { + "attributeid": "1", + "attribute": "veg", + "active": "1" + }, + { + "attributeid": "2", + "attribute": "non-veg", + "active": "1" + } + ], + "taxes": [ + { + "taxid": "2524", + "taxname": "CGST", + "tax": "2.5", + "taxtype": "1", + "tax_ordertype": "", + "active": "1", + "tax_coreortotal": "2", + "tax_taxtype": "1", + "rank": "1", + "consider_in_core_amount": "0", + "description": "" + }, + { + "taxid": "2525", + "taxname": "SGST", + "tax": "2.5", + "taxtype": "1", + "tax_ordertype": "", + "active": "1", + "tax_coreortotal": "2", + "tax_taxtype": "1", + "rank": "2", + "consider_in_core_amount": "0", + "description": "" + } + ], + "discounts": [], + "serverdatetime": "2024-08-05 16:40:42", + "db_version": "1.0", + "application_version": "4.0", + "http_code": 200, + "error": "" +} \ No newline at end of file diff --git a/tests/testing_data/catalog_sync/catalog_sync_push_menu_payload_invalid.json b/tests/testing_data/catalog_sync/catalog_sync_push_menu_payload_invalid.json new file mode 100644 index 000000000..87b073776 --- /dev/null +++ b/tests/testing_data/catalog_sync/catalog_sync_push_menu_payload_invalid.json @@ -0,0 +1,269 @@ +{ + "success": "1", + "message": "Menu items are successfully listed.", + "restaurants": [ + { + "restaurantid": "4639", + "active": "1", + "details": { + "menusharingcode": "nu0wisf4", + "currency_html": "₹", + "country": "India", + "country_id": "97", + "images": [], + "restaurantname": "Indisubb Foods Private Limited", + "address": "Ahmedabad", + "contact": "1234567890", + "latitude": "1", + "longitude": "1", + "landmark": "", + "city": "Ahmedabad", + "city_id": "1321851", + "state": "Gujarat", + "state_id": "1612", + "minimumorderamount": "0", + "minimumdeliverytime": "30 Minutes", + "deliverycharge": "0", + "deliveryhoursfrom1": "", + "deliveryhoursto1": "", + "deliveryhoursfrom2": "", + "deliveryhoursto2": "", + "calculatetaxonpacking": 0, + "calculatetaxondelivery": 0, + "dc_taxes_id": "", + "pc_taxes_id": "", + "packaging_applicable_on": "NONE", + "packaging_charge": "", + "packaging_charge_type": "" + } + } + ], + "ordertypes": [ + { + "ordertypeid": 1, + "ordertype": "Delivery" + }, + { + "ordertypeid": 2, + "ordertype": "Pick Up" + }, + { + "ordertypeid": 3, + "ordertype": "Dine In" + } + ], + "categories": [ + { + "categoryid": "77583", + "active": "1", + "categoryrank": "1", + "parent_category_id": "0", + "categoryname": "Add Ons", + "categorytimings": "", + "category_image_url": "" + }, + { + "categoryid": "77581", + "active": "1", + "categoryrank": "1", + "parent_category_id": "0", + "categoryname": "Beverages", + "categorytimings": "", + "category_image_url": "" + }, + { + "categoryid": "77585", + "active": "1", + "categoryrank": "1", + "parent_category_id": "0", + "categoryname": "Chicken", + "categorytimings": "", + "category_image_url": "" + }, + { + "categoryid": "77578", + "active": "1", + "categoryrank": "1", + "parent_category_id": "0", + "categoryname": "Chicken Meal", + "categorytimings": "", + "category_image_url": "" + }, + { + "categoryid": "77584", + "active": "1", + "categoryrank": "1", + "parent_category_id": "0", + "categoryname": "Desserts", + "categorytimings": "", + "category_image_url": "" + }, + { + "categoryid": "77582", + "active": "1", + "categoryrank": "1", + "parent_category_id": "0", + "categoryname": "Hot Coffees", + "categorytimings": "", + "category_image_url": "" + }, + { + "categoryid": "77579", + "active": "1", + "categoryrank": "1", + "parent_category_id": "0", + "categoryname": "Seafood Meal", + "categorytimings": "", + "category_image_url": "" + }, + { + "categoryid": "77580", + "active": "1", + "categoryrank": "1", + "parent_category_id": "0", + "categoryname": "Side Orders", + "categorytimings": "", + "category_image_url": "" + } + ], + "parentcategories": [], + "items": [ + { + "itemid": "10539634", + "itemallowvariation": "0", + "itemrank": "1", + "item_categoryid": "77578", + "item_ordertype": "1,2,3", + "item_packingcharges": "0", + "itemallowaddon": "0", + "itemaddonbasedon": "0", + "item_favorite": "0", + "ignore_taxes": "0", + "ignore_discounts": "0", + "in_stock": "2", + "cuisine": [], + "variation_groupname": "", + "variation": [], + "addon": [], + "is_recommend": "0", + "itemname": "Potter 4", + "item_attributeid": "1", + "itemdescription": "Chicken fillet in a bun with coleslaw,lettuce, pickles and our spicy cocktail sauce. This sandwich is made with care to make sure that each and every bite is packed with Mmmm", + "minimumpreparationtime": "", + "price": "8700.00", + "active": "1", + "item_image_url": "", + "item_tax": "2524,2525", + "gst_type": "services" + }, + { + "itemallowvariation": "0", + "itemrank": "1", + "item_categoryid": "77578", + "item_ordertype": "1,2,3", + "item_packingcharges": "0", + "itemallowaddon": "0", + "itemaddonbasedon": "0", + "item_favorite": "0", + "ignore_taxes": "0", + "ignore_discounts": "0", + "in_stock": "2", + "cuisine": [], + "variation_groupname": "", + "variation": [], + "addon": [], + "is_recommend": "0", + "itemname": "Potter 99", + "item_attributeid": "1", + "itemdescription": "Chicken fillet in a bun with coleslaw,lettuce, pickles and our spicy cocktail sauce. This sandwich is made with care to make sure that each and every bite is packed with Mmmm", + "minimumpreparationtime": "", + "price": "3426.00", + "active": "1", + "item_image_url": "", + "item_tax": "2524,2525", + "gst_type": "services" + }, + { + "itemid": "10539580", + "itemallowvariation": "0", + "itemrank": "1", + "item_categoryid": "77578", + "item_ordertype": "1,2,3", + "item_packingcharges": "0", + "itemallowaddon": "1", + "itemaddonbasedon": "0", + "item_favorite": "0", + "ignore_taxes": "0", + "ignore_discounts": "0", + "in_stock": "2", + "cuisine": [], + "variation_groupname": "", + "variation": [], + "addon": [ + { + "addon_group_id": "11425", + "addon_item_selection_min": "1", + "addon_item_selection_max": "1" + } + ], + "is_recommend": "0", + "itemname": "Potter 5", + "item_attributeid": "1", + "itemdescription": "chicken fillet nuggets come with a sauce of your choice (nugget\/garlic sauce). Bite-sized pieces of tender all breast chicken fillets, marinated in our unique & signature blend, breaded and seasoned to perfection, then deep-fried until deliciously tender, crispy with a golden crust", + "minimumpreparationtime": "", + "price": "3159.00", + "active": "1", + "item_image_url": "", + "item_tax": "2524,2525", + "gst_type": "services" + } + ], + "variations": [], + "addongroups": [], + "attributes": [ + { + "attributeid": "1", + "attribute": "veg", + "active": "1" + }, + { + "attributeid": "2", + "attribute": "non-veg", + "active": "1" + } + ], + "taxes": [ + { + "taxid": "2524", + "taxname": "CGST", + "tax": "2.5", + "taxtype": "1", + "tax_ordertype": "", + "active": "1", + "tax_coreortotal": "2", + "tax_taxtype": "1", + "rank": "1", + "consider_in_core_amount": "0", + "description": "" + }, + { + "taxid": "2525", + "taxname": "SGST", + "tax": "2.5", + "taxtype": "1", + "tax_ordertype": "", + "active": "1", + "tax_coreortotal": "2", + "tax_taxtype": "1", + "rank": "2", + "consider_in_core_amount": "0", + "description": "" + } + ], + "discounts": [], + "serverdatetime": "2024-08-05 16:40:42", + "db_version": "1.0", + "application_version": "4.0", + "http_code": 200, + "error": "" +} \ No newline at end of file diff --git a/tests/testing_data/catalog_sync/catalog_sync_push_menu_payload_with_delete_data.json b/tests/testing_data/catalog_sync/catalog_sync_push_menu_payload_with_delete_data.json new file mode 100644 index 000000000..0865c9acd --- /dev/null +++ b/tests/testing_data/catalog_sync/catalog_sync_push_menu_payload_with_delete_data.json @@ -0,0 +1,242 @@ +{ + "success": "1", + "message": "Menu items are successfully listed.", + "restaurants": [ + { + "restaurantid": "4639", + "active": "1", + "details": { + "menusharingcode": "nu0wisf4", + "currency_html": "₹", + "country": "India", + "country_id": "97", + "images": [], + "restaurantname": "Indisubb Foods Private Limited", + "address": "Ahmedabad", + "contact": "1234567890", + "latitude": "1", + "longitude": "1", + "landmark": "", + "city": "Ahmedabad", + "city_id": "1321851", + "state": "Gujarat", + "state_id": "1612", + "minimumorderamount": "0", + "minimumdeliverytime": "30 Minutes", + "deliverycharge": "0", + "deliveryhoursfrom1": "", + "deliveryhoursto1": "", + "deliveryhoursfrom2": "", + "deliveryhoursto2": "", + "calculatetaxonpacking": 0, + "calculatetaxondelivery": 0, + "dc_taxes_id": "", + "pc_taxes_id": "", + "packaging_applicable_on": "NONE", + "packaging_charge": "", + "packaging_charge_type": "" + } + } + ], + "ordertypes": [ + { + "ordertypeid": 1, + "ordertype": "Delivery" + }, + { + "ordertypeid": 2, + "ordertype": "Pick Up" + }, + { + "ordertypeid": 3, + "ordertype": "Dine In" + } + ], + "categories": [ + { + "categoryid": "77583", + "active": "1", + "categoryrank": "1", + "parent_category_id": "0", + "categoryname": "Add Ons", + "categorytimings": "", + "category_image_url": "" + }, + { + "categoryid": "77581", + "active": "1", + "categoryrank": "1", + "parent_category_id": "0", + "categoryname": "Beverages", + "categorytimings": "", + "category_image_url": "" + }, + { + "categoryid": "77585", + "active": "1", + "categoryrank": "1", + "parent_category_id": "0", + "categoryname": "Chicken", + "categorytimings": "", + "category_image_url": "" + }, + { + "categoryid": "77578", + "active": "1", + "categoryrank": "1", + "parent_category_id": "0", + "categoryname": "Chicken Meal", + "categorytimings": "", + "category_image_url": "" + }, + { + "categoryid": "77584", + "active": "1", + "categoryrank": "1", + "parent_category_id": "0", + "categoryname": "Desserts", + "categorytimings": "", + "category_image_url": "" + }, + { + "categoryid": "77582", + "active": "1", + "categoryrank": "1", + "parent_category_id": "0", + "categoryname": "Hot Coffees", + "categorytimings": "", + "category_image_url": "" + }, + { + "categoryid": "77579", + "active": "1", + "categoryrank": "1", + "parent_category_id": "0", + "categoryname": "Seafood Meal", + "categorytimings": "", + "category_image_url": "" + }, + { + "categoryid": "77580", + "active": "1", + "categoryrank": "1", + "parent_category_id": "0", + "categoryname": "Side Orders", + "categorytimings": "", + "category_image_url": "" + } + ], + "parentcategories": [], + "items": [ + { + "itemid": "10539699", + "itemallowvariation": "0", + "itemrank": "1", + "item_categoryid": "77578", + "item_ordertype": "1,2,3", + "item_packingcharges": "0", + "itemallowaddon": "0", + "itemaddonbasedon": "0", + "item_favorite": "0", + "ignore_taxes": "0", + "ignore_discounts": "0", + "in_stock": "2", + "cuisine": [], + "variation_groupname": "", + "variation": [], + "addon": [], + "is_recommend": "0", + "itemname": "Potter 99", + "item_attributeid": "1", + "itemdescription": "Chicken fillet in a bun with coleslaw,lettuce, pickles and our spicy cocktail sauce. This sandwich is made with care to make sure that each and every bite is packed with Mmmm", + "minimumpreparationtime": "", + "price": "123.00", + "active": "1", + "item_image_url": "", + "item_tax": "2524,2525", + "gst_type": "services" + }, + { + "itemid": "10539580", + "itemallowvariation": "0", + "itemrank": "1", + "item_categoryid": "77578", + "item_ordertype": "1,2,3", + "item_packingcharges": "0", + "itemallowaddon": "1", + "itemaddonbasedon": "0", + "item_favorite": "0", + "ignore_taxes": "0", + "ignore_discounts": "0", + "in_stock": "2", + "cuisine": [], + "variation_groupname": "", + "variation": [], + "addon": [ + { + "addon_group_id": "11425", + "addon_item_selection_min": "1", + "addon_item_selection_max": "1" + } + ], + "is_recommend": "0", + "itemname": "Potter 5", + "item_attributeid": "1", + "itemdescription": "chicken fillet nuggets come with a sauce of your choice (nugget\/garlic sauce). Bite-sized pieces of tender all breast chicken fillets, marinated in our unique & signature blend, breaded and seasoned to perfection, then deep-fried until deliciously tender, crispy with a golden crust", + "minimumpreparationtime": "", + "price": "3159.00", + "active": "1", + "item_image_url": "", + "item_tax": "2524,2525", + "gst_type": "services" + } + ], + "variations": [], + "addongroups": [], + "attributes": [ + { + "attributeid": "1", + "attribute": "veg", + "active": "1" + }, + { + "attributeid": "2", + "attribute": "non-veg", + "active": "1" + } + ], + "taxes": [ + { + "taxid": "2524", + "taxname": "CGST", + "tax": "2.5", + "taxtype": "1", + "tax_ordertype": "", + "active": "1", + "tax_coreortotal": "2", + "tax_taxtype": "1", + "rank": "1", + "consider_in_core_amount": "0", + "description": "" + }, + { + "taxid": "2525", + "taxname": "SGST", + "tax": "2.5", + "taxtype": "1", + "tax_ordertype": "", + "active": "1", + "tax_coreortotal": "2", + "tax_taxtype": "1", + "rank": "2", + "consider_in_core_amount": "0", + "description": "" + } + ], + "discounts": [], + "serverdatetime": "2024-08-05 16:40:42", + "db_version": "1.0", + "application_version": "4.0", + "http_code": 200, + "error": "" +} \ No newline at end of file diff --git a/tests/unit_test/data_processor/catalog_sync_log_processor_test.py b/tests/unit_test/data_processor/catalog_sync_log_processor_test.py new file mode 100644 index 000000000..2c85821ef --- /dev/null +++ b/tests/unit_test/data_processor/catalog_sync_log_processor_test.py @@ -0,0 +1,571 @@ +import json +import os +from datetime import datetime, timedelta +from pathlib import Path + +import pytest +from babel.messages.jslexer import uni_escape_re +from mongoengine import connect + +from kairon import Utility +from kairon.exceptions import AppException +from kairon.shared.catalog_sync.catalog_sync_log_processor import CatalogSyncLogProcessor +from kairon.shared.catalog_sync.data_objects import CatalogSyncLogs +from kairon.shared.cognition.data_objects import CognitionSchema, CollectionData +from kairon.shared.cognition.processor import CognitionDataProcessor +from kairon.shared.data.constant import SYNC_STATUS, SyncType +from kairon.shared.data.data_objects import BotSettings, BotSyncConfig + + +class TestCatalogSyncLogProcessor: + + @pytest.fixture(scope='session', autouse=True) + def init(self): + os.environ["system_file"] = "./tests/testing_data/system.yaml" + Utility.load_environment() + connect(**Utility.mongoengine_connection(Utility.environment['database']["url"])) + + + def test_add_log(self): + bot = 'test' + user = 'test' + provider = "petpooja" + sync_type = "push_menu" + CatalogSyncLogProcessor.add_log(bot, user, provider = provider, sync_type = sync_type,raw_payload={"item":"Test raw payload"}) + log = CatalogSyncLogs.objects(bot=bot).get().to_mongo().to_dict() + assert not log.get('exception') + assert log['execution_id'] + assert log['raw_payload'] + assert log['sync_type'] + assert log['start_timestamp'] + assert not log.get('end_timestamp') + assert not log.get('processed_payload') + assert log['sync_status'] == SYNC_STATUS.INITIATED.value + + def test_add_log_exception(self): + bot = 'test' + user = 'test' + CatalogSyncLogProcessor.add_log(bot, user, sync_status=SYNC_STATUS.FAILED.value, + exception="Push menu processing is disabled for this bot", + status="Failure") + log = CatalogSyncLogs.objects(bot=bot).get().to_mongo().to_dict() + assert log.get('exception') == "Push menu processing is disabled for this bot" + assert log['execution_id'] + assert log['raw_payload'] + assert log['sync_type'] + assert log['start_timestamp'] + assert log.get('end_timestamp') + assert log['sync_status'] == SYNC_STATUS.FAILED.value + + + def test_add_log_validation_errors(self): + bot = 'test' + user = 'test' + provider = "petpooja" + sync_type = "push_menu" + CatalogSyncLogProcessor.add_log(bot, user, provider = provider, sync_type = sync_type,raw_payload={"item":"Test raw payload"}, + sync_status=SYNC_STATUS.FAILED.value, + exception="Validation Failed", + status="Failure", + validation_errors={ + "Header mismatch": "Expected headers ['order_id', 'order_priority', 'sales', 'profit'] but found ['order_id', 'order_priority', 'revenue', 'sales'].", + "Missing columns": "{'profit'}.", + "Extra columns": "{'revenue'}." + } + ) + log = list(CatalogSyncLogProcessor.get_logs(bot)) + assert log[0].get('exception') == "Validation Failed" + assert log[0]['execution_id'] + assert log[0]['raw_payload'] + assert log[0]['sync_type'] + assert log[0]['start_timestamp'] + assert log[0]["validation_errors"] + assert log[0].get('end_timestamp') + assert log[0]['sync_status'] == SYNC_STATUS.FAILED.value + + def test_add_log_success(self): + bot = 'test' + user = 'test' + provider = "petpooja" + sync_type = "push_menu" + CatalogSyncLogProcessor.add_log(bot, user, provider = provider, sync_type = sync_type,raw_payload={"item":"Test raw payload"}) + CatalogSyncLogProcessor.add_log(bot, user, sync_status=SYNC_STATUS.COMPLETED.value, status="Success") + log = list(CatalogSyncLogProcessor.get_logs(bot)) + assert not log[0].get('exception') + assert log[0]['execution_id'] + assert log[0]['raw_payload'] + assert log[0]['sync_type'] + assert log[0]['start_timestamp'] + assert not log[0]["validation_errors"] + assert log[0]["status"] == 'Success' + assert log[0]['sync_status'] == SYNC_STATUS.COMPLETED.value + + def test_is_event_in_progress_false(self): + bot = 'test' + assert not CatalogSyncLogProcessor.is_sync_in_progress(bot) + + def test_is_event_in_progress_true(self): + bot = 'test' + user = 'test' + provider = "petpooja" + sync_type = "push_menu" + CatalogSyncLogProcessor.add_log(bot, user, provider=provider, sync_type=sync_type, + raw_payload={"item": "Test raw payload"}) + assert CatalogSyncLogProcessor.is_sync_in_progress(bot, False) + + with pytest.raises(Exception): + CatalogSyncLogProcessor.is_sync_in_progress(bot) + + def test_get_logs(self): + bot = 'test' + logs = list(CatalogSyncLogProcessor.get_logs(bot)) + assert len(logs) == 4 + + def test_is_limit_exceeded_exception(self, monkeypatch): + bot = 'test' + try: + bot_settings = BotSettings.objects(bot=bot).get() + bot_settings.catalog_sync_limit_per_day = 0 + except: + bot_settings = BotSettings(bot=bot, catalog_sync_limit_per_day=0, user="test") + bot_settings.save() + with pytest.raises(Exception): + assert CatalogSyncLogProcessor.is_limit_exceeded(bot) + + def test_is_limit_exceeded(self, monkeypatch): + bot = 'test' + bot_settings = BotSettings.objects(bot=bot).get() + bot_settings.catalog_sync_limit_per_day = 3 + bot_settings.save() + assert CatalogSyncLogProcessor.is_limit_exceeded(bot, False) + + def test_is_limit_exceeded_false(self, monkeypatch): + bot = 'test' + bot_settings = BotSettings.objects(bot=bot).get() + bot_settings.catalog_sync_limit_per_day = 6 + bot_settings.save() + assert not CatalogSyncLogProcessor.is_limit_exceeded(bot) + + def test_catalog_collection_exists_true(self): + bot = "test" + user = "test" + + BotSyncConfig( + parent_bot=bot, + restaurant_name="Test Restaurant", + provider="demo", + branch_name="Branch A", + branch_bot=bot, + user=user + ).save() + + expected_collection = "test_restaurant_branch_a_catalog" + + CognitionSchema( + bot=bot, + user=user, + collection_name=expected_collection + ).save() + + assert CatalogSyncLogProcessor.is_catalog_collection_exists(bot) is True + + BotSyncConfig.objects.delete() + CognitionSchema.objects.delete() + + def test_catalog_collection_exists_false(self): + bot = "test" + user = "test" + + BotSyncConfig( + parent_bot=bot, + restaurant_name="Test Restaurant", + provider="demo", + branch_name="Branch A", + branch_bot=bot, + user=user + ).save() + + assert CatalogSyncLogProcessor.is_catalog_collection_exists(bot) is False + + BotSyncConfig.objects.delete() + CognitionSchema.objects.delete() + + def test_create_catalog_collection(self): + bot = "test_bot" + user = "test_user" + + BotSyncConfig( + parent_bot=bot, + restaurant_name="Test Restaurant", + provider="demo_provider", + branch_name="Test Branch", + branch_bot=bot, + user=user + ).save() + + BotSettings( + bot=bot, + user=user, + cognition_columns_per_collection_limit=5, + llm_settings={'enable_faq': True} + ).save() + + metadata_id = CatalogSyncLogProcessor.create_catalog_collection(bot, user) + + assert metadata_id is not None + + catalog_name = "test_restaurant_test_branch_catalog" + created_schema = CognitionSchema.objects(collection_name=catalog_name).first() + assert created_schema is not None + assert created_schema.collection_name == catalog_name + + BotSyncConfig.objects.delete() + BotSettings.objects.delete() + CognitionSchema.objects.delete() + + def test_validate_item_ids(self): + push_menu_payload_path = Path("tests/testing_data/catalog_sync/catalog_sync_push_menu_payload.json") + + with push_menu_payload_path.open("r", encoding="utf-8") as f: + push_menu_payload = json.load(f) + + try: + CatalogSyncLogProcessor.validate_item_ids(push_menu_payload) + itemid_missing = False + except Exception as e: + itemid_missing = True + + assert itemid_missing is False + + def test_validate_item_ids_missing_itemid(self): + push_menu_payload_path = Path("tests/testing_data/catalog_sync/catalog_sync_push_menu_payload_invalid.json") + + with push_menu_payload_path.open("r", encoding="utf-8") as f: + push_menu_payload = json.load(f) + + with pytest.raises(Exception): + CatalogSyncLogProcessor.validate_item_ids(push_menu_payload) + + def test_validate_item_toggle_request_valid(self): + file_path = Path("tests/testing_data/catalog_sync/catalog_sync_item_toggle_payload.json") + with file_path.open("r", encoding="utf-8") as f: + payload = json.load(f) + + CatalogSyncLogProcessor.validate_item_toggle_request(payload) + + def test_validate_item_toggle_request_missing_instock(self): + file_path = Path("tests/testing_data/catalog_sync/catalog_sync_item_toggle_payload_invalid_missing_instock.json") + with file_path.open("r", encoding="utf-8") as f: + payload = json.load(f) + + with pytest.raises(Exception, match="Missing required field: 'inStock'"): + CatalogSyncLogProcessor.validate_item_toggle_request(payload) + + def test_validate_item_toggle_request_nonboolean_instock(self): + file_path = Path("tests/testing_data/catalog_sync/catalog_sync_item_toggle_payload_invalid_nonboolean_instock.json") + with file_path.open("r", encoding="utf-8") as f: + payload = json.load(f) + + with pytest.raises(Exception, match="'inStock' must be a boolean"): + CatalogSyncLogProcessor.validate_item_toggle_request(payload) + + def test_validate_item_toggle_request_missing_itemid(self): + file_path = Path("tests/testing_data/catalog_sync/catalog_sync_item_toggle_payload_invalid_missing_itemid.json") + with file_path.open("r", encoding="utf-8") as f: + payload = json.load(f) + + with pytest.raises(Exception, match="Missing required field: 'itemID'"): + CatalogSyncLogProcessor.validate_item_toggle_request(payload) + + def test_sync_type_allowed_valid_push_menu(self): + bot = "test_bot" + user = "test_user" + BotSyncConfig( + parent_bot=bot, + restaurant_name="Test Restaurant", + provider="demo", + branch_name="Branch", + branch_bot=bot, + user=user, + process_push_menu=True, + process_item_toggle=False + ).save() + + CatalogSyncLogProcessor.is_sync_type_allowed(bot, SyncType.push_menu) + + BotSyncConfig.objects.delete() + + def test_sync_type_allowed_valid_item_toggle(self): + bot = "test_bot" + user = "test_user" + BotSyncConfig( + parent_bot=bot, + restaurant_name="Test Restaurant", + provider="demo", + branch_name="Branch", + branch_bot=bot, + user=user, + process_push_menu=False, + process_item_toggle=True + ).save() + + CatalogSyncLogProcessor.is_sync_type_allowed(bot, SyncType.item_toggle) + + BotSyncConfig.objects.delete() + + def test_sync_type_push_menu_not_allowed(self): + bot = "test_bot" + user = "test_user" + BotSyncConfig( + parent_bot=bot, + restaurant_name="Test Restaurant", + provider="demo", + branch_name="Branch", + branch_bot=bot, + user=user, + process_push_menu=False, + process_item_toggle=True + ).save() + + with pytest.raises(Exception, match="Push menu processing is disabled for this bot"): + CatalogSyncLogProcessor.is_sync_type_allowed(bot, SyncType.push_menu) + + BotSyncConfig.objects.delete() + + def test_sync_type_item_toggle_not_allowed(self): + bot = "test_bot" + user = "test_user" + BotSyncConfig( + parent_bot=bot, + restaurant_name="Test Restaurant", + provider="demo", + branch_name="Branch", + branch_bot=bot, + user=user, + process_push_menu=True, + process_item_toggle=False + ).save() + + with pytest.raises(Exception, match="Item toggle is disabled for this bot"): + CatalogSyncLogProcessor.is_sync_type_allowed(bot, SyncType.item_toggle) + + BotSyncConfig.objects.delete() + + def test_sync_type_config_missing(self): + bot = "test_bot" + BotSyncConfig.objects.delete() + + with pytest.raises(Exception, match="No bot sync config found for bot"): + CatalogSyncLogProcessor.is_sync_type_allowed(bot, SyncType.push_menu) + + def test_ai_enabled_true(self): + bot = "test_bot" + user = "test_user" + + BotSyncConfig( + parent_bot=bot, + restaurant_name="Test Restaurant", + provider="demo", + branch_name="Branch", + branch_bot=bot, + user=user, + ai_enabled=True + ).save() + + result = CatalogSyncLogProcessor.is_ai_enabled(bot) + assert result is True + + BotSyncConfig.objects.delete() + + def test_ai_enabled_false(self): + bot = "test_bot" + user = "test_user" + + BotSyncConfig( + parent_bot=bot, + restaurant_name="Test Restaurant", + provider="demo", + branch_name="Branch", + branch_bot=bot, + user=user, + ai_enabled=False + ).save() + + result = CatalogSyncLogProcessor.is_ai_enabled(bot) + assert result is False + + BotSyncConfig.objects.delete() + + def test_ai_enabled_no_config(self): + bot = "test_bot" + BotSyncConfig.objects.delete() + + with pytest.raises(Exception, match="No bot sync config found for bot"): + CatalogSyncLogProcessor.is_ai_enabled(bot) + + def test_meta_enabled_true(self): + bot = "test_bot" + user = "test_user" + + BotSyncConfig( + parent_bot=bot, + restaurant_name="Test Restaurant", + provider="demo", + branch_name="Branch", + branch_bot=bot, + user=user, + meta_enabled=True + ).save() + + result = CatalogSyncLogProcessor.is_meta_enabled(bot) + assert result is True + + BotSyncConfig.objects.delete() + + def test_meta_enabled_false(self): + bot = "test_bot" + user = "test_user" + + BotSyncConfig( + parent_bot=bot, + restaurant_name="Test Restaurant", + provider="demo", + branch_name="Branch", + branch_bot=bot, + user=user, + meta_enabled=False + ).save() + + result = CatalogSyncLogProcessor.is_meta_enabled(bot) + assert result is False + + BotSyncConfig.objects.delete() + + def test_meta_enabled_no_config(self): + bot = "test_bot" + BotSyncConfig.objects.delete() + + with pytest.raises(Exception, match="No bot sync config found for bot"): + CatalogSyncLogProcessor.is_meta_enabled(bot) + + def test_get_execution_id_for_bot_returns_latest_pending(self): + bot = "test_bot" + user = "test_user" + + CatalogSyncLogs( + execution_id="completed_1", + raw_payload={"item":"Test raw payload"}, + bot=bot, + user=user, + provider="demo", + sync_type="push_menu", + sync_status=SYNC_STATUS.COMPLETED.value, + start_timestamp=datetime.utcnow() - timedelta(minutes=10) + ).save() + + CatalogSyncLogs( + execution_id="failed_1", + raw_payload={"item":"Test raw payload"}, + bot=bot, + user=user, + provider="demo", + sync_type="push_menu", + sync_status=SYNC_STATUS.FAILED.value, + start_timestamp=datetime.utcnow() - timedelta(minutes=5) + ).save() + + CatalogSyncLogs( + execution_id="valid_pending_1", + raw_payload={"item":"Test raw payload"}, + bot=bot, + user=user, + provider="demo", + sync_type="push_menu", + sync_status=SYNC_STATUS.PREPROCESSING.value, + start_timestamp=datetime.utcnow() + ).save() + + execution_id = CatalogSyncLogProcessor.get_execution_id_for_bot(bot) + assert execution_id == "valid_pending_1" + + CatalogSyncLogs.objects.delete() + + def test_get_execution_id_for_bot_returns_none_if_all_completed_or_failed(self): + bot = "test_bot" + user = "test_user" + + CatalogSyncLogs( + execution_id="completed_2", + raw_payload={"item":"Test raw payload"}, + bot=bot, + user=user, + provider="demo", + sync_type="push_menu", + sync_status=SYNC_STATUS.COMPLETED.value, + start_timestamp=datetime.utcnow() + ).save() + + CatalogSyncLogs( + execution_id="failed_2", + raw_payload={"item":"Test raw payload"}, + bot=bot, + user=user, + provider="demo", + sync_type="push_menu", + sync_status=SYNC_STATUS.FAILED.value, + start_timestamp=datetime.utcnow() + ).save() + + execution_id = CatalogSyncLogProcessor.get_execution_id_for_bot(bot) + assert execution_id is None + + CatalogSyncLogs.objects.delete() + + def test_validate_image_configurations_when_catalog_images_exists(self): + bot = "test_bot" + user = "test_user" + + BotSyncConfig( + parent_bot=bot, + restaurant_name="Test Restaurant", + provider="demo_provider", + branch_name="Test Branch", + branch_bot=bot, + user=user + ).save() + + CollectionData( + collection_name="test_restaurant_test_branch_catalog_images", + data={ + "image_type": "global", + "image_url": "http://example.com/global_fallback.jpg", + "image_base64": "" + }, + user=user, + bot=bot, + status=True, + timestamp=datetime.utcnow() + ).save() + + CatalogSyncLogProcessor.validate_image_configurations(bot, user) + + BotSyncConfig.objects.delete() + CollectionData.objects.delete() + + def test_validate_image_configurations_when_catalog_images_missing_global_fallback(self): + bot = "test_bot" + user = "test_user" + + BotSyncConfig( + parent_bot=bot, + restaurant_name="Test Restaurant", + provider="demo_provider", + branch_name="Test Branch", + branch_bot=bot, + user=user + ).save() + + with pytest.raises(Exception, match="Global fallback image URL not found"): + CatalogSyncLogProcessor.validate_image_configurations(bot, user) + + BotSyncConfig.objects.delete() \ No newline at end of file diff --git a/tests/unit_test/data_processor/data_processor_test.py b/tests/unit_test/data_processor/data_processor_test.py index 5d13dee14..f91b9d0b7 100644 --- a/tests/unit_test/data_processor/data_processor_test.py +++ b/tests/unit_test/data_processor/data_processor_test.py @@ -7,13 +7,16 @@ import urllib from datetime import datetime, timedelta, timezone from io import BytesIO +from pathlib import Path from typing import List from urllib.parse import urljoin import ujson as json import yaml +from kairon.shared.catalog_sync.data_objects import CatalogProviderMapping from kairon.shared.content_importer.data_objects import ContentValidationLogs +from kairon.shared.data.data_models import POSIntegrationRequest from kairon.shared.rest_client import AioRestClient from kairon.shared.utils import Utility from kairon.shared.llm.processor import LLMProcessor @@ -65,7 +68,7 @@ from kairon.shared.admin.data_objects import LLMSecret from kairon.shared.auth import Authentication from kairon.shared.chat.data_objects import Channels -from kairon.shared.cognition.data_objects import CognitionData, CognitionSchema, ColumnMetadata +from kairon.shared.cognition.data_objects import CognitionData, CognitionSchema, ColumnMetadata, CollectionData from kairon.shared.cognition.processor import CognitionDataProcessor from kairon.shared.constants import SLOT_SET_TYPE, EventClass from kairon.shared.data.audit.data_objects import AuditLogData @@ -84,7 +87,7 @@ Utterances, BotSettings, ChatClientConfig, LookupTables, Forms, SlotMapping, KeyVault, MultiflowStories, LLMSettings, MultiflowStoryEvents, Synonyms, - Lookup + Lookup, BotSyncConfig ) from kairon.shared.data.history_log_processor import HistoryDeletionLogProcessor from kairon.shared.data.model_processor import ModelProcessor @@ -94,7 +97,7 @@ from kairon.shared.live_agent.live_agent import LiveAgentHandler from kairon.shared.metering.constants import MetricType from kairon.shared.metering.data_object import Metering -from kairon.shared.models import StoryEventType, HttpContentType, CognitionDataType, VaultSyncEventType +from kairon.shared.models import StoryEventType, HttpContentType, CognitionDataType, VaultSyncType from kairon.shared.multilingual.processor import MultilingualLogProcessor from kairon.shared.test.data_objects import ModelTestingLogs from kairon.shared.test.processor import ModelTestingLogProcessor @@ -1090,6 +1093,428 @@ def test_delete_prompt_action_not_present(self): with pytest.raises(AppException, match=f'Action with name "non_existent_kairon_faq_action" not found'): processor.delete_action('non_existent_kairon_faq_action', bot, user) + def test_preprocess_push_menu_data_success(self): + bot = "test_bot" + user = "test_user" + provider = "petpooja" + + push_menu_payload_path = Path("tests/testing_data/catalog_sync/catalog_sync_push_menu_payload.json") + with push_menu_payload_path.open("r", encoding="utf-8") as f: + push_menu_payload = json.load(f) + + CatalogProviderMapping( + provider=provider, + meta_mappings={ + "name": {"source": "itemname", "default": "No title"}, + "description": {"source": "itemdescription", "default": "No description available"}, + "price": {"source": "price", "default": 0.0}, + "availability": {"source": "in_stock", "default": "out of stock"}, + "image_url": {"source": "item_image_url", "default": "https://www.kairon.com/default-image.jpg"}, + "url": {"source": None, "default": "https://www.kairon.com/"}, + "brand": {"source": None, "default": "Sattva"}, + "condition": {"source": None, "default": "new"} + }, + kv_mappings={ + "title": {"source": "itemname", "default": "No title"}, + "description": {"source": "itemdescription", "default": "No description available"}, + "price": {"source": "price", "default": 0.0}, + "facebook_product_category": {"source": "item_categoryid", "default": "Food and drink > General"}, + "availability": {"source": "in_stock", "default": "out of stock"} + } + ).save() + + BotSyncConfig( + parent_bot=bot, + restaurant_name="Test Restaurant", + provider="demo", + branch_name="Branch", + branch_bot=bot, + user=user, + process_push_menu=False, + process_item_toggle=True + ).save() + + restaurant_name, branch_name = CognitionDataProcessor.get_restaurant_and_branch_name(bot) + catalog_images_collection = f"{restaurant_name}_{branch_name}_catalog_images" + fallback_data = { + "image_type": "global", + "image_url": "https://picsum.photos/id/237/200/300", + "image_base64": "" + } + CollectionData( + collection_name=catalog_images_collection, + data=fallback_data, + user=user, + bot=bot, + status=True, + timestamp=datetime.utcnow() + ).save() + + result = CognitionDataProcessor.preprocess_push_menu_data(bot, push_menu_payload, provider) + + expected_result = { + "meta": [ + { + "id": "10539634", + "name": "Potter 4", + "description": "Chicken fillet in a bun with coleslaw,lettuce, pickles and our spicy cocktail sauce. This sandwich is made with care to make sure that each and every bite is packed with Mmmm", + "price": 8700, + "availability": "in stock", + "image_url": "https://picsum.photos/id/237/200/300", + "url": "https://www.kairon.com/", + "brand": "Sattva", + "condition": "new" + }, + { + "id": "10539699", + "name": "Potter 99", + "description": "Chicken fillet in a bun with coleslaw,lettuce, pickles and our spicy cocktail sauce. This sandwich is made with care to make sure that each and every bite is packed with Mmmm", + "price": 3426, + "availability": "in stock", + "image_url": "https://picsum.photos/id/237/200/300", + "url": "https://www.kairon.com/", + "brand": "Sattva", + "condition": "new" + }, + { + "id": "10539580", + "name": "Potter 5", + "description": "chicken fillet nuggets come with a sauce of your choice (nugget/garlic sauce). Bite-sized pieces of tender all breast chicken fillets, marinated in our unique & signature blend, breaded and seasoned to perfection, then deep-fried until deliciously tender, crispy with a golden crust", + "price": 3159, + "availability": "in stock", + "image_url": "https://picsum.photos/id/237/200/300", + "url": "https://www.kairon.com/", + "brand": "Sattva", + "condition": "new" + } + ], + "kv": [ + { + "id": "10539634", + "title": "Potter 4", + "description": "Chicken fillet in a bun with coleslaw,lettuce, pickles and our spicy cocktail sauce. This sandwich is made with care to make sure that each and every bite is packed with Mmmm", + "price": 8700, + "facebook_product_category": "Food and drink > Chicken Meal", + "availability": "in stock" + }, + { + "id": "10539699", + "title": "Potter 99", + "description": "Chicken fillet in a bun with coleslaw,lettuce, pickles and our spicy cocktail sauce. This sandwich is made with care to make sure that each and every bite is packed with Mmmm", + "price": 3426, + "facebook_product_category": "Food and drink > Chicken Meal", + "availability": "in stock" + }, + { + "id": "10539580", + "title": "Potter 5", + "description": "chicken fillet nuggets come with a sauce of your choice (nugget/garlic sauce). Bite-sized pieces of tender all breast chicken fillets, marinated in our unique & signature blend, breaded and seasoned to perfection, then deep-fried until deliciously tender, crispy with a golden crust", + "price": 3159, + "facebook_product_category": "Food and drink > Chicken Meal", + "availability": "in stock" + } + ] + } + + assert result == expected_result + CatalogProviderMapping.objects.delete() + BotSyncConfig.objects.delete() + CollectionData.objects(collection_name=catalog_images_collection).delete() + + def test_preprocess_push_menu_data_no_provider_mapping(self): + bot = "test_bot" + provider = "nonexistent_provider" + push_menu_payload_path = Path("tests/testing_data/catalog_sync/catalog_sync_push_menu_payload.json") + with push_menu_payload_path.open("r", encoding="utf-8") as f: + push_menu_payload = json.load(f) + + with pytest.raises(Exception, match="Metadata mappings not found for provider=nonexistent_provider"): + CognitionDataProcessor.preprocess_push_menu_data(bot, push_menu_payload, provider) + + def test_preprocess_item_toggle_data_success(self): + bot = "test_bot" + provider = "petpooja" + + CatalogProviderMapping( + provider=provider, + meta_mappings={ + "name": {"source": "itemname", "default": "No title"}, + "description": {"source": "itemdescription", "default": "No description available"}, + "price": {"source": "price", "default": 0.0}, + "availability": {"source": "in_stock", "default": "out of stock"}, + "image_url": {"source": "item_image_url", "default": "https://www.kairon.com/default-image.jpg"}, + "url": {"source": None, "default": "https://www.kairon.com/"}, + "brand": {"source": None, "default": "Sattva"}, + "condition": {"source": None, "default": "new"} + }, + kv_mappings={ + "title": {"source": "itemname", "default": "No title"}, + "description": {"source": "itemdescription", "default": "No description available"}, + "price": {"source": "price", "default": 0.0}, + "facebook_product_category": {"source": "item_categoryid", "default": "Food and drink > General"}, + "availability": {"source": "in_stock", "default": "out of stock"} + } + ).save() + + json_data_path = Path("tests/testing_data/catalog_sync/catalog_sync_item_toggle_payload.json") + with json_data_path.open("r", encoding="utf-8") as f: + json_data = json.load(f) + + result = CognitionDataProcessor.preprocess_item_toggle_data(bot, json_data, provider) + + expected_result = { + "meta": [ + {"id": "10539580", "availability": "out of stock"} + ], + "kv": [ + {"id": "10539580", "availability": "out of stock"} + ] + } + + assert result == expected_result + CatalogProviderMapping.objects.delete() + + def test_preprocess_item_toggle_data_no_provider_mapping(self): + bot = "test_bot" + provider = "nonexistent_provider" + json_data_path = Path("tests/testing_data/catalog_sync/catalog_sync_item_toggle_payload.json") + with json_data_path.open("r", encoding="utf-8") as f: + json_data = json.load(f) + + with pytest.raises(Exception, match="Metadata mappings not found for provider=nonexistent_provider"): + CognitionDataProcessor.preprocess_item_toggle_data(bot, json_data, provider) + + def test_resolve_image_link_global(self): + bot = "test_bot" + user = "test_user" + item_id = "12345" + + bot_sync_config = BotSyncConfig( + parent_bot="parent_bot", + restaurant_name="TestRestaurant", + provider="some_provider", + branch_name="TestBranch", + branch_bot=bot, + ai_enabled=True, + meta_enabled=True, + user=user, + timestamp=datetime.utcnow() + ) + bot_sync_config.save() + + restaurant_name, branch_name = CognitionDataProcessor.get_restaurant_and_branch_name(bot) + catalog_images_collection = f"{restaurant_name}_{branch_name}_catalog_images" + fallback_data = { + "image_type": "global", + "image_url": "http://global_image_url.com", + "image_base64": "" + } + CollectionData( + collection_name=catalog_images_collection, + data=fallback_data, + user=user, + bot=bot, + status=True, + timestamp=datetime.utcnow() + ).save() + + result = CognitionDataProcessor.resolve_image_link(bot, item_id) + + expected_result = "http://global_image_url.com" + assert result == expected_result + + BotSyncConfig.objects.delete() + CollectionData.objects(collection_name=catalog_images_collection).delete() + + def test_resolve_image_link_local(self): + bot = "test_bot" + user = "test_user" + item_id = "12345" + + bot_sync_config = BotSyncConfig( + parent_bot="parent_bot", + restaurant_name="TestRestaurant", + provider="some_provider", + branch_name="TestBranch", + branch_bot=bot, + ai_enabled=True, + meta_enabled=True, + user=user, + timestamp=datetime.utcnow() + ) + bot_sync_config.save() + + restaurant_name, branch_name = CognitionDataProcessor.get_restaurant_and_branch_name(bot) + catalog_images_collection = f"{restaurant_name}_{branch_name}_catalog_images" + fallback_data = { + "image_type": "global", + "image_url": "http://global_image_url.com", + "image_base64": "" + } + CollectionData( + collection_name=catalog_images_collection, + data=fallback_data, + user=user, + bot=bot, + status=True, + timestamp=datetime.utcnow() + ).save() + + local_image_data = { + "image_type": "local", + "item_id": int(item_id), + "image_url": "http://local_image_url.com", + "image_base64": "" + } + CollectionData( + collection_name=catalog_images_collection, + data=local_image_data, + user=user, + bot=bot, + status=True, + timestamp=datetime.utcnow() + ).save() + + result = CognitionDataProcessor.resolve_image_link(bot, item_id) + + expected_result = "http://local_image_url.com" + assert result == expected_result + + BotSyncConfig.objects.delete() + CollectionData.objects(collection_name=catalog_images_collection).delete() + + def test_resolve_image_link_no_image(self): + bot = "test_bot" + user = "test_user" + item_id = "12345" + + bot_sync_config = BotSyncConfig( + parent_bot="parent_bot", + restaurant_name="TestRestaurant", + provider="some_provider", + branch_name="TestBranch", + branch_bot=bot, + ai_enabled=True, + meta_enabled=True, + user=user, + timestamp=datetime.utcnow() + ) + bot_sync_config.save() + + restaurant_name, branch_name = CognitionDataProcessor.get_restaurant_and_branch_name(bot) + catalog_images_collection = f"{restaurant_name}_{branch_name}_catalog_images" + + with pytest.raises(Exception, + match=f"Image URL not found for {item_id} in {catalog_images_collection}"): + CognitionDataProcessor.resolve_image_link(bot, item_id) + + BotSyncConfig.objects.delete() + CollectionData.objects(collection_name=catalog_images_collection).delete() + + def test_add_bot_sync_config_success(self): + bot = "test_bot" + user = "test_user" + request_data = POSIntegrationRequest( + provider="petpooja", + config={ + "restaurant_name": "restaurant1", + "branch_name": "branch1", + "restaurant_id": "98765" + }, + meta_config={ + "access_token": "dummy_access_token", + "catalog_id": "12345" + } + ) + + BotSyncConfig.objects(branch_bot=bot, provider="petpooja").delete() + + CognitionDataProcessor.add_bot_sync_config(request_data, bot, user) + + bot_sync = BotSyncConfig.objects.get(branch_bot=bot, provider="petpooja") + assert bot_sync.restaurant_name == "restaurant1" + assert bot_sync.branch_name == "branch1" + assert bot_sync.process_push_menu is False + assert bot_sync.process_item_toggle is False + assert bot_sync.ai_enabled is False + assert bot_sync.meta_enabled is False + assert bot_sync.user == user + assert bot_sync.parent_bot == bot + + BotSyncConfig.objects(branch_bot=bot, provider="petpooja").delete() + + def test_add_bot_sync_config_already_exists(self): + bot = "test_bot" + user = "test_user" + + request_data = POSIntegrationRequest( + provider="petpooja", + config={ + "restaurant_name": "restaurant1", + "branch_name": "branch1", + "restaurant_id": "98765" + }, + meta_config={ + "access_token": "dummy_access_token", + "catalog_id": "12345" + } + ) + + existing_config = BotSyncConfig( + process_push_menu=True, + process_item_toggle=True, + parent_bot=bot, + restaurant_name="restaurant1", + provider="petpooja", + branch_name="branch1", + branch_bot=bot, + ai_enabled=True, + meta_enabled=True, + user=user + ) + existing_config.save() + + CognitionDataProcessor.add_bot_sync_config(request_data, bot, user) + + configs = BotSyncConfig.objects(branch_bot=bot, provider="petpooja") + assert configs.count() == 1 + + config = configs.first() + assert config.process_push_menu is True + assert config.ai_enabled is True + + BotSyncConfig.objects(branch_bot=bot, provider="petpooja").delete() + + def test_get_restaurant_and_branch_name_success(self): + bot = "test_bot" + user = "test_user" + + BotSyncConfig( + parent_bot="parent_bot", + restaurant_name="My Test Restaurant", + provider="test_provider", + branch_name="Main Branch", + branch_bot=bot, + ai_enabled=True, + meta_enabled=False, + user=user, + timestamp=datetime.utcnow() + ).save() + + restaurant_name, branch_name = CognitionDataProcessor.get_restaurant_and_branch_name(bot) + + assert restaurant_name == "my_test_restaurant" + assert branch_name == "main_branch" + + BotSyncConfig.objects(branch_bot=bot).delete() + + def test_get_restaurant_and_branch_name_no_config(self): + bot = "bot_without_config" + BotSyncConfig.objects(branch_bot=bot).delete() + + with pytest.raises(Exception, match=f"No bot sync config found for bot: {bot}"): + CognitionDataProcessor.get_restaurant_and_branch_name(bot) + def test_get_live_agent(self): processor = MongoProcessor() bot = 'test_bot' @@ -2256,14 +2681,14 @@ def test_get_pydantic_type_invalid(self): def test_validate_event_type_valid(self): processor = CognitionDataProcessor() - valid_event_type = list(VaultSyncEventType.__members__.keys())[0] - processor._validate_event_type(valid_event_type) + valid_event_type = list(VaultSyncType.__members__.keys())[0] + processor._validate_sync_type(valid_event_type) def test_validate_event_type_invalid(self): processor = CognitionDataProcessor() invalid_event_type = "invalid_event" with pytest.raises(AppException, match="Event type does not exist"): - processor._validate_event_type(invalid_event_type) + processor._validate_sync_type(invalid_event_type) def test_validate_collection_exists_valid(self): bot = 'test_bot' From ebe1dd1acf2153de65c74ba0d638ae7fa7f9f6ee Mon Sep 17 00:00:00 2001 From: himanshu_gupta Date: Thu, 24 Apr 2025 08:42:16 +0530 Subject: [PATCH 2/5] Force commit catalog sync status html file --- template/emails/catalog_sync_status.html | 278 +++++++++++++++++++++++ 1 file changed, 278 insertions(+) create mode 100644 template/emails/catalog_sync_status.html diff --git a/template/emails/catalog_sync_status.html b/template/emails/catalog_sync_status.html new file mode 100644 index 000000000..f655e215f --- /dev/null +++ b/template/emails/catalog_sync_status.html @@ -0,0 +1,278 @@ + + + + + + + + + + + + + + + +
+ We're thrilled to have you on kAIron! Get ready to dive into your first bot. +
+ + + + + + + + + + + + + + + + +
+ + + + +
+ Kairon +
+
+ + + + +
+ + + + + + + + + + + + + +
+ Verify Email +
+

+ Catalog Sync Status Update +

+
+

Hi,

+

+ Bot ID: BOT_ID
+ Execution ID: EXECUTION_ID

+ Sync Status: SYNC_STATUS

+ Message: MESSAGE

+

+
+

Thanks,

+

The team behind kAIron!​

+
+
+
+ This email was sent to + USER_EMAIL +
+ + + + + + + + + + +
+ Digite +
+ + + + +
+ + + + +
+ + Facebook +
+ + + + +
+ + Twitter +
+ + + + +
+ LinkedIn +
+ + + + +
+ + YouTube + +
+
+
+

Digité, Inc. 21060 Homestead Rd, Suite 220 Cupertino, CA, 95014, US

+
+
+ + + \ No newline at end of file From e43c096ef2821f19fd23c9e219cf95a03915e9e3 Mon Sep 17 00:00:00 2001 From: himanshu_gupta Date: Mon, 28 Apr 2025 12:48:46 +0530 Subject: [PATCH 3/5] Added and updated test cases Removed unused imports --- kairon/api/app/routers/bot/data.py | 4 +- kairon/api/app/routers/bot/integrations.py | 1 - kairon/events/definitions/petpooja_sync.py | 9 +- kairon/meta/processor.py | 3 - .../catalog_sync_log_processor.py | 9 +- kairon/shared/catalog_sync/data_objects.py | 2 +- kairon/shared/cognition/processor.py | 27 - tests/integration_test/services_test.py | 541 +++++++++++++++++- tests/unit_test/action/action_test.py | 6 +- .../data_processor/data_processor_test.py | 66 +-- 10 files changed, 556 insertions(+), 112 deletions(-) diff --git a/kairon/api/app/routers/bot/data.py b/kairon/api/app/routers/bot/data.py index 70d7b3eaa..8027bafd9 100644 --- a/kairon/api/app/routers/bot/data.py +++ b/kairon/api/app/routers/bot/data.py @@ -1,7 +1,7 @@ import os -from typing import List, Text +from typing import List -from fastapi import UploadFile, File, Security, APIRouter, Query, HTTPException, Path +from fastapi import UploadFile, File, Security, APIRouter, Query, HTTPException from starlette.requests import Request from starlette.responses import FileResponse diff --git a/kairon/api/app/routers/bot/integrations.py b/kairon/api/app/routers/bot/integrations.py index 5b91652e0..56d69d0a4 100644 --- a/kairon/api/app/routers/bot/integrations.py +++ b/kairon/api/app/routers/bot/integrations.py @@ -12,7 +12,6 @@ from kairon.shared.constants import CatalogProvider from kairon.shared.constants import DESIGNER_ACCESS from kairon.shared.models import User -from kairon.shared.utils import MailUtility router = APIRouter() cognition_processor = CognitionDataProcessor() diff --git a/kairon/events/definitions/petpooja_sync.py b/kairon/events/definitions/petpooja_sync.py index 9b885c0b0..773333d7a 100644 --- a/kairon/events/definitions/petpooja_sync.py +++ b/kairon/events/definitions/petpooja_sync.py @@ -1,17 +1,10 @@ from typing import Text -from dotenv import set_key - -from kairon import Utility -from loguru import logger - from kairon.catalog_sync.definitions.base import CatalogSyncBase -from kairon.exceptions import AppException from kairon.meta.processor import MetaProcessor from kairon.shared.cognition.processor import CognitionDataProcessor -from kairon.shared.constants import EventClass from kairon.shared.data.constant import SyncType, SYNC_STATUS -from kairon.shared.data.data_objects import POSIntegrations, BotSyncConfig +from kairon.shared.data.data_objects import POSIntegrations from kairon.shared.catalog_sync.catalog_sync_log_processor import CatalogSyncLogProcessor from kairon.shared.utils import MailUtility diff --git a/kairon/meta/processor.py b/kairon/meta/processor.py index 7073e790d..512dff17e 100644 --- a/kairon/meta/processor.py +++ b/kairon/meta/processor.py @@ -1,12 +1,9 @@ import asyncio import json from typing import Text, List -from urllib.parse import urljoin from loguru import logger import requests -from kairon import Utility from kairon.shared.catalog_sync.data_objects import CatalogProviderMapping -from kairon.shared.rest_client import AioRestClient from urllib.parse import quote diff --git a/kairon/shared/catalog_sync/catalog_sync_log_processor.py b/kairon/shared/catalog_sync/catalog_sync_log_processor.py index 242742765..3de5d66b2 100644 --- a/kairon/shared/catalog_sync/catalog_sync_log_processor.py +++ b/kairon/shared/catalog_sync/catalog_sync_log_processor.py @@ -1,18 +1,14 @@ -import json from datetime import datetime -from typing import List from bson import ObjectId from loguru import logger from mongoengine import Q, DoesNotExist -from kairon.shared.cognition.data_objects import CognitionSchema, ColumnMetadata, CollectionData +from kairon.shared.cognition.data_objects import CognitionSchema, CollectionData from kairon.shared.cognition.processor import CognitionDataProcessor -from kairon.shared.content_importer.data_objects import ContentValidationLogs from kairon.shared.data.constant import SYNC_STATUS, SyncType from kairon.shared.data.data_models import CognitionSchemaRequest from kairon.shared.data.data_objects import BotSettings, BotSyncConfig -from kairon.shared.data.processor import MongoProcessor from kairon.shared.catalog_sync.data_objects import CatalogSyncLogs, CatalogProviderMapping from kairon.shared.models import CognitionMetadataType @@ -307,8 +303,7 @@ def validate_image_configurations(bot: str, user: str): f"Global fallback image document not found in `{catalog_images_collection}`") if not document.data.get("image_url"): - raise Exception( - f"Global fallback image URL not found") + raise Exception("Global fallback image URL not found") @staticmethod def get_execution_id_for_bot(bot: str): diff --git a/kairon/shared/catalog_sync/data_objects.py b/kairon/shared/catalog_sync/data_objects.py index 30559532e..f204b62d6 100644 --- a/kairon/shared/catalog_sync/data_objects.py +++ b/kairon/shared/catalog_sync/data_objects.py @@ -1,6 +1,6 @@ from datetime import datetime -from mongoengine import StringField, BooleanField, DateTimeField, DynamicDocument, DictField, ListField +from mongoengine import StringField, DateTimeField, DynamicDocument, DictField from kairon.shared.data.signals import push_notification, auditlogger diff --git a/kairon/shared/cognition/processor.py b/kairon/shared/cognition/processor.py index 75db7faf9..66ef8cd52 100644 --- a/kairon/shared/cognition/processor.py +++ b/kairon/shared/cognition/processor.py @@ -640,33 +640,6 @@ def validate_data(self, primary_key_col: str, collection_name: str, sync_type: s # await self.sync_with_qdrant(llm_processor, qdrant_collection, bot, new_document, user, primary_key_col) # # return {"message": "Upsert complete!"} - - async def sync_with_qdrant(self, llm_processor, collection_name, bot, document, user, primary_key_col): - """ - Syncs a document with Qdrant vector database by generating embeddings and upserting them. - - Args: - llm_processor (LLMProcessor): Instance of LLMProcessor for embedding and Qdrant operations. - collection_name (str): Name of the Qdrant collection. - bot (str): Bot identifier. - document (CognitionData): Document to sync with Qdrant. - user (Text): User performing the operation. - - Raises: - AppException: If Qdrant upsert operation fails. - """ - try: - metadata = self.find_matching_metadata(bot, document['data'], document.get('collection')) - search_payload, embedding_payload = Utility.retrieve_search_payload_and_embedding_payload( - document['data'], metadata) - embeddings = await llm_processor.get_embedding(embedding_payload, user, invocation='knowledge_vault_sync') - points = [{'id': document['vector_id'], 'vector': embeddings, 'payload': search_payload}] - await llm_processor.__collection_upsert__(collection_name, {'points': points}, - err_msg="Unable to train FAQ! Contact support") - logger.info(f"Row with {primary_key_col}: {document['data'].get(primary_key_col)} upserted in Qdrant.") - except Exception as e: - raise AppException(f"Failed to sync document with Qdrant: {str(e)}") - def _validate_sync_type(self, sync_type: str): if sync_type not in VaultSyncType.__members__.keys(): raise AppException("Sync type does not exist") diff --git a/tests/integration_test/services_test.py b/tests/integration_test/services_test.py index 8c5d277d0..b968843dd 100644 --- a/tests/integration_test/services_test.py +++ b/tests/integration_test/services_test.py @@ -8,7 +8,7 @@ from datetime import datetime, timedelta from io import BytesIO from unittest import mock -from unittest.mock import patch +from unittest.mock import patch, AsyncMock from urllib.parse import urljoin from zipfile import ZipFile import litellm @@ -2024,7 +2024,7 @@ def test_knowledge_vault_sync_push_menu(mock_embedding, mock_collection_exists, expected_calls = [ { - "model": "text-embedding-3-small", + "model": "text-embedding-3-large", "input": ['{"id":1,"item":"Juice","price":2.5,"quantity":10}', '{"id":2,"item":"Apples","price":1.2,"quantity":20}'], "metadata": {'user': 'integration@demo.ai', 'bot': pytest.bot, 'invocation': 'knowledge_vault_sync'}, "api_key": "common_openai_key", @@ -2155,7 +2155,7 @@ def test_knowledge_vault_sync_item_toggle(mock_embedding, mock_collection_exists expected_calls = [ { - "model": "text-embedding-3-small", + "model": "text-embedding-3-large", "input": ['{"id":1,"item":"Juice","price":80.5,"quantity":56}', '{"id":2,"item":"Milk","price":27.0,"quantity":12}'], "metadata": {'user': 'integration@demo.ai', 'bot': pytest.bot, 'invocation': 'knowledge_vault_sync'}, "api_key": "common_openai_key", @@ -3327,14 +3327,16 @@ def test_add_pos_integration_config_invalid_sync_type(): @mock.patch.object(LLMProcessor, "__collection_exists__", autospec=True) @mock.patch.object(LLMProcessor, "__create_collection__", autospec=True) @mock.patch.object(LLMProcessor, "__collection_upsert__", autospec=True) +@mock.patch.object(MailUtility,"format_and_send_mail", autospec=True) @mock.patch.object(MetaProcessor, "push_meta_catalog", autospec=True) @mock.patch.object(MetaProcessor, "delete_meta_catalog", autospec=True) @mock.patch.object(litellm, "aembedding", autospec=True) def test_catalog_sync_push_menu_success(mock_embedding, mock_collection_exists, mock_create_collection, - mock_collection_upsert, mock_delete_meta_catalog, mock_push_meta_catalog): + mock_collection_upsert, mock_format_and_send_mail, mock_delete_meta_catalog, mock_push_meta_catalog): mock_collection_exists.return_value = False mock_create_collection.return_value = None mock_collection_upsert.return_value = None + mock_format_and_send_mail.return_value = None mock_push_meta_catalog.return_value = None mock_delete_meta_catalog.return_value = None @@ -3491,14 +3493,17 @@ def test_catalog_sync_push_menu_success(mock_embedding, mock_collection_exists, @mock.patch.object(LLMProcessor, "__create_collection__", autospec=True) @mock.patch.object(LLMProcessor, "__collection_upsert__", autospec=True) @mock.patch.object(LLMProcessor, "__delete_collection_points__", autospec=True) +@mock.patch.object(MailUtility,"format_and_send_mail", autospec=True) @mock.patch.object(MetaProcessor, "push_meta_catalog", autospec=True) @mock.patch.object(MetaProcessor, "delete_meta_catalog", autospec=True) @mock.patch.object(litellm, "aembedding", autospec=True) def test_catalog_sync_push_menu_success_with_delete_data(mock_embedding, mock_collection_exists, mock_create_collection, - mock_collection_upsert, mock_delete_collection_points, mock_delete_meta_catalog, mock_push_meta_catalog): + mock_collection_upsert, mock_format_and_send_mail, mock_delete_collection_points, + mock_delete_meta_catalog, mock_push_meta_catalog): mock_collection_exists.return_value = False mock_create_collection.return_value = None mock_collection_upsert.return_value = None + mock_format_and_send_mail.return_value = None mock_push_meta_catalog.return_value = None mock_delete_meta_catalog.return_value = None mock_delete_collection_points.return_value = None @@ -3618,13 +3623,15 @@ def test_catalog_sync_push_menu_success_with_delete_data(mock_embedding, mock_co @mock.patch.object(LLMProcessor, "__collection_exists__", autospec=True) @mock.patch.object(LLMProcessor, "__create_collection__", autospec=True) @mock.patch.object(LLMProcessor, "__collection_upsert__", autospec=True) +@mock.patch.object(MailUtility,"format_and_send_mail", autospec=True) @mock.patch.object(MetaProcessor, "update_meta_catalog", autospec=True) @mock.patch.object(litellm, "aembedding", autospec=True) def test_catalog_sync_item_toggle_success(mock_embedding, mock_collection_exists, mock_create_collection, - mock_collection_upsert, mock_update_meta_catalog): + mock_collection_upsert, mock_format_and_send_mail, mock_update_meta_catalog): mock_collection_exists.return_value = False mock_create_collection.return_value = None mock_collection_upsert.return_value = None + mock_format_and_send_mail.return_value = None mock_update_meta_catalog.return_value = None embedding = list(np.random.random(LLMProcessor.__embedding__)) @@ -3741,7 +3748,8 @@ def test_catalog_sync_item_toggle_success(mock_embedding, mock_collection_exists CatalogProviderMapping.objects(provider="petpooja").delete() BotSyncConfig.objects(branch_bot=pytest.bot, provider="petpooja").delete() - POSIntegrations.objects(bot=pytest.bot, provider="petpooja", sync_type="invalid_sync").delete() + POSIntegrations.objects(bot=pytest.bot, provider="petpooja", sync_type="push_menu").delete() + POSIntegrations.objects(bot=pytest.bot, provider="petpooja", sync_type="item_toggle").delete() LLMSecret.objects.delete() CollectionData.objects(collection_name=catalog_data_collection).delete() CollectionData.objects(collection_name=catalog_images_collection).delete() @@ -3754,14 +3762,16 @@ def test_catalog_sync_item_toggle_success(mock_embedding, mock_collection_exists @mock.patch.object(LLMProcessor, "__collection_exists__", autospec=True) @mock.patch.object(LLMProcessor, "__create_collection__", autospec=True) @mock.patch.object(LLMProcessor, "__collection_upsert__", autospec=True) +@mock.patch.object(MailUtility,"format_and_send_mail", autospec=True) @mock.patch.object(MetaProcessor, "push_meta_catalog", autospec=True) @mock.patch.object(MetaProcessor, "delete_meta_catalog", autospec=True) @mock.patch.object(litellm, "aembedding", autospec=True) def test_catalog_sync_push_menu_process_push_menu_disabled(mock_embedding, mock_collection_exists, mock_create_collection, - mock_collection_upsert, mock_delete_meta_catalog, mock_push_meta_catalog): + mock_collection_upsert, mock_format_and_send_mail, mock_delete_meta_catalog, mock_push_meta_catalog): mock_collection_exists.return_value = False mock_create_collection.return_value = None mock_collection_upsert.return_value = None + mock_format_and_send_mail.return_value = None mock_push_meta_catalog.return_value = None mock_delete_meta_catalog.return_value = None @@ -3869,7 +3879,7 @@ def test_catalog_sync_push_menu_process_push_menu_disabled(mock_embedding, mock_ CatalogProviderMapping.objects(provider="petpooja").delete() BotSyncConfig.objects(branch_bot=pytest.bot, provider="petpooja").delete() - POSIntegrations.objects(bot=pytest.bot, provider="petpooja", sync_type="invalid_sync").delete() + POSIntegrations.objects(bot=pytest.bot, provider="petpooja", sync_type="push_menu").delete() LLMSecret.objects.delete() CollectionData.objects(collection_name=catalog_data_collection).delete() # CollectionData.objects(collection_name=catalog_images_collection).delete() @@ -3883,13 +3893,15 @@ def test_catalog_sync_push_menu_process_push_menu_disabled(mock_embedding, mock_ @mock.patch.object(LLMProcessor, "__collection_exists__", autospec=True) @mock.patch.object(LLMProcessor, "__create_collection__", autospec=True) @mock.patch.object(LLMProcessor, "__collection_upsert__", autospec=True) +@mock.patch.object(MailUtility,"format_and_send_mail", autospec=True) @mock.patch.object(MetaProcessor, "update_meta_catalog", autospec=True) @mock.patch.object(litellm, "aembedding", autospec=True) def test_catalog_sync_item_toggle_process_item_toggle_disabled(mock_embedding, mock_collection_exists, mock_create_collection, - mock_collection_upsert, update_meta_catalog): + mock_collection_upsert, mock_format_and_send_mail, update_meta_catalog): mock_collection_exists.return_value = False mock_create_collection.return_value = None mock_collection_upsert.return_value = None + mock_format_and_send_mail.return_value = None update_meta_catalog.return_value = None embedding = list(np.random.random(LLMProcessor.__embedding__)) @@ -3947,7 +3959,7 @@ def test_catalog_sync_item_toggle_process_item_toggle_disabled(mock_embedding, m assert bot_sync_config.branch_name == "branch1" assert bot_sync_config.parent_bot == pytest.bot - pos_integration = POSIntegrations.objects(bot=pytest.bot, provider="petpooja", sync_type="push_menu").first() + pos_integration = POSIntegrations.objects(bot=pytest.bot, provider="petpooja", sync_type="item_toggle").first() assert pos_integration is not None assert pos_integration.config["restaurant_id"] == "98765" assert pos_integration.meta_config["access_token"] == "dummy_access_token" @@ -3996,7 +4008,7 @@ def test_catalog_sync_item_toggle_process_item_toggle_disabled(mock_embedding, m CatalogProviderMapping.objects(provider="petpooja").delete() BotSyncConfig.objects(branch_bot=pytest.bot, provider="petpooja").delete() - POSIntegrations.objects(bot=pytest.bot, provider="petpooja", sync_type="invalid_sync").delete() + POSIntegrations.objects(bot=pytest.bot, provider="petpooja", sync_type="item_toggle").delete() LLMSecret.objects.delete() CollectionData.objects(collection_name=catalog_data_collection).delete() # CollectionData.objects(collection_name=catalog_images_collection).delete() @@ -4010,14 +4022,16 @@ def test_catalog_sync_item_toggle_process_item_toggle_disabled(mock_embedding, m @mock.patch.object(LLMProcessor, "__collection_exists__", autospec=True) @mock.patch.object(LLMProcessor, "__create_collection__", autospec=True) @mock.patch.object(LLMProcessor, "__collection_upsert__", autospec=True) +@mock.patch.object(MailUtility,"format_and_send_mail", autospec=True) @mock.patch.object(MetaProcessor, "push_meta_catalog", autospec=True) @mock.patch.object(MetaProcessor, "delete_meta_catalog", autospec=True) @mock.patch.object(litellm, "aembedding", autospec=True) def test_catalog_sync_push_menu_ai_disabled_meta_disabled(mock_embedding, mock_collection_exists, mock_create_collection, - mock_collection_upsert, mock_delete_meta_catalog, mock_push_meta_catalog): + mock_collection_upsert, mock_format_and_send_mail, mock_delete_meta_catalog, mock_push_meta_catalog): mock_collection_exists.return_value = False mock_create_collection.return_value = None mock_collection_upsert.return_value = None + mock_format_and_send_mail.return_value = None mock_push_meta_catalog.return_value = None mock_delete_meta_catalog.return_value = None @@ -4148,6 +4162,8 @@ def test_catalog_sync_push_menu_ai_disabled_meta_disabled(mock_embedding, mock_c {"id": doc.data["id"], "price": doc.data["price"]} for doc in catalog_data_docs ] + for i in catalog_data_docs: + print(i.to_mongo().to_dict()) expected_items = [ {"id": "10539634", "price": 8700.0}, @@ -4162,28 +4178,181 @@ def test_catalog_sync_push_menu_ai_disabled_meta_disabled(mock_embedding, mock_c CatalogProviderMapping.objects(provider="petpooja").delete() BotSyncConfig.objects(branch_bot=pytest.bot, provider="petpooja").delete() - POSIntegrations.objects(bot=pytest.bot, provider="petpooja", sync_type="invalid_sync").delete() + POSIntegrations.objects(bot=pytest.bot, provider="petpooja", sync_type="push_menu").delete() LLMSecret.objects.delete() - CollectionData.objects(collection_name=catalog_data_collection).delete() + # CollectionData.objects(collection_name=catalog_data_collection).delete() CollectionData.objects(collection_name=catalog_images_collection).delete() CatalogSyncLogs.objects.delete() CognitionData.objects(bot=pytest.bot).delete() CognitionSchema.objects(bot=pytest.bot).delete() + @pytest.mark.asyncio @responses.activate @mock.patch.object(LLMProcessor, "__collection_exists__", autospec=True) @mock.patch.object(LLMProcessor, "__create_collection__", autospec=True) @mock.patch.object(LLMProcessor, "__collection_upsert__", autospec=True) +@mock.patch.object(MailUtility,"format_and_send_mail", autospec=True) +@mock.patch.object(MetaProcessor, "update_meta_catalog", autospec=True) +@mock.patch.object(litellm, "aembedding", autospec=True) +def test_catalog_sync_item_toggle_ai_disabled_meta_disabled(mock_embedding, mock_collection_exists, mock_create_collection, + mock_collection_upsert, mock_format_and_send_mail, mock_update_meta_catalog): + mock_collection_exists.return_value = False + mock_create_collection.return_value = None + mock_collection_upsert.return_value = None + mock_format_and_send_mail.return_value = None + mock_update_meta_catalog.return_value = None + + embedding = list(np.random.random(LLMProcessor.__embedding__)) + mock_embedding.return_value = litellm.EmbeddingResponse( + **{'data': [{'embedding': embedding}, {'embedding': embedding}, {'embedding': embedding}]}) + + secrets = [ + { + "llm_type": "openai", + "api_key": "common_openai_key", + "models": ["common_openai_model1", "common_openai_model2"], + "user": "123", + "timestamp": datetime.utcnow() + }, + ] + + for secret in secrets: + LLMSecret(**secret).save() + + payload = { + "provider": "petpooja", + "config": { + "restaurant_name": "restaurant1", + "branch_name": "branch1", + "restaurant_id": "98765" + }, + "meta_config": { + "access_token":"dummy_access_token", + "catalog_id":"12345" + } + } + + response = client.post( + url=f"/api/bot/{pytest.bot}/data/integrations/add?sync_type=item_toggle", + json = payload, + headers={"Authorization": pytest.token_type + " " + pytest.access_token} + ) + actual = response.json() + assert actual["message"] == "POS Integration Complete" + assert actual["error_code"] == 0 + assert actual["success"] + assert "integration/petpooja/item_toggle" in actual["data"] + assert str(pytest.bot) in actual["data"] + sync_url = actual["data"] + token = sync_url.split(str(pytest.bot) + "/")[1] + + provider_mapping = CatalogProviderMapping.objects(provider="petpooja").first() + assert provider_mapping is not None + assert provider_mapping.meta_mappings is not None + assert provider_mapping.kv_mappings is not None + + bot_sync_config = BotSyncConfig.objects(branch_bot=pytest.bot, provider="petpooja").first() + assert bot_sync_config is not None + assert bot_sync_config.restaurant_name == "restaurant1" + assert bot_sync_config.branch_name == "branch1" + assert bot_sync_config.parent_bot == pytest.bot + + pos_integration = POSIntegrations.objects(bot=pytest.bot, provider="petpooja", sync_type="item_toggle").first() + assert pos_integration is not None + assert pos_integration.config["restaurant_id"] == "98765" + assert pos_integration.meta_config["access_token"] == "dummy_access_token" + + bot_sync_config = BotSyncConfig.objects(branch_bot=pytest.bot, provider="petpooja").first() + bot_sync_config.process_item_toggle = True + bot_sync_config.save() + + event_url = urljoin( + Utility.environment["events"]["server_url"], + f"/api/events/execute/{EventClass.catalog_integration}", + ) + responses.add( + "POST", + event_url, + json={"success": True, "message": "Event triggered successfully!"}, + ) + + item_toggle_payload_path = Path("tests/testing_data/catalog_sync/catalog_sync_item_toggle_payload.json") + + with item_toggle_payload_path.open("r", encoding="utf-8") as f: + item_toggle_payload = json.load(f) + + response = client.post( + url=sync_url, + json=item_toggle_payload, + headers={"Authorization": pytest.token_type + " " + pytest.access_token} + ) + actual = response.json() + assert actual["message"] == "Sync in progress! Check logs." + assert actual["error_code"] == 0 + assert actual["data"] is None + assert actual["success"] + + complete_end_to_end_event_execution( + pytest.bot, "integration@demo.ai", EventClass.catalog_integration, sync_type="item_toggle", token=token, + provider="petpooja", data=item_toggle_payload + ) + + latest_log = CatalogSyncLogs.objects(bot=str(pytest.bot)).order_by("-start_timestamp").first() + print(latest_log.to_mongo().to_dict()) + assert latest_log is not None + assert latest_log.execution_id + assert latest_log.sync_status == "Completed" + assert latest_log.status == "Success" + assert hasattr(latest_log, "exception") + assert latest_log.exception == "Sync to knowledge vault and Meta is not allowed for this bot. Contact Support!!" + + restaurant_name, branch_name = CognitionDataProcessor.get_restaurant_and_branch_name(pytest.bot) + catalog_data_collection = f"{restaurant_name}_{branch_name}_catalog_data" + catalog_data_docs = CollectionData.objects(collection_name=catalog_data_collection, bot=pytest.bot) + catalog_item_summaries = [ + {"id": doc.data["id"], "availability": doc.data["availability"]} + for doc in catalog_data_docs + ] + + expected_items = [ + {"id": "10539634", "availability": "in stock"}, + {"id": "10539699", "availability": "in stock"}, + {"id": "10539580", "availability": "out of stock"}, + ] + + assert all(item in catalog_item_summaries for item in expected_items) + + cognition_data_docs = CognitionData.objects(bot=str(pytest.bot)) + assert cognition_data_docs.count() == 0 + + CatalogProviderMapping.objects(provider="petpooja").delete() + BotSyncConfig.objects(branch_bot=pytest.bot, provider="petpooja").delete() + POSIntegrations.objects(bot=pytest.bot, provider="petpooja", sync_type="item_toggle").delete() + LLMSecret.objects.delete() + CollectionData.objects(collection_name=catalog_data_collection).delete() + # CollectionData.objects(collection_name=catalog_images_collection).delete() + CatalogSyncLogs.objects.delete() + CognitionData.objects(bot=pytest.bot).delete() + CognitionSchema.objects(bot=pytest.bot).delete() + + +@pytest.mark.asyncio +@responses.activate +@mock.patch.object(LLMProcessor, "__collection_exists__", autospec=True) +@mock.patch.object(LLMProcessor, "__create_collection__", autospec=True) +@mock.patch.object(LLMProcessor, "__collection_upsert__", autospec=True) +@mock.patch.object(MailUtility,"format_and_send_mail", autospec=True) @mock.patch.object(MetaProcessor, "push_meta_catalog", autospec=True) @mock.patch.object(MetaProcessor, "delete_meta_catalog", autospec=True) @mock.patch.object(litellm, "aembedding", autospec=True) def test_catalog_sync_push_menu_ai_enabled_meta_disabled(mock_embedding, mock_collection_exists, mock_create_collection, - mock_collection_upsert, mock_delete_meta_catalog, mock_push_meta_catalog): + mock_collection_upsert, mock_format_and_send_mail, mock_delete_meta_catalog, mock_push_meta_catalog): mock_collection_exists.return_value = False mock_create_collection.return_value = None mock_collection_upsert.return_value = None + mock_format_and_send_mail.return_value = None mock_push_meta_catalog.return_value = None mock_delete_meta_catalog.return_value = None @@ -4335,28 +4504,187 @@ def test_catalog_sync_push_menu_ai_enabled_meta_disabled(mock_embedding, mock_co CatalogProviderMapping.objects(provider="petpooja").delete() BotSyncConfig.objects(branch_bot=pytest.bot, provider="petpooja").delete() - POSIntegrations.objects(bot=pytest.bot, provider="petpooja", sync_type="invalid_sync").delete() + POSIntegrations.objects(bot=pytest.bot, provider="petpooja", sync_type="push_menu").delete() LLMSecret.objects.delete() - CollectionData.objects(collection_name=catalog_data_collection).delete() + # CollectionData.objects(collection_name=catalog_data_collection).delete() CollectionData.objects(collection_name=catalog_images_collection).delete() CatalogSyncLogs.objects.delete() + # CognitionData.objects(bot=pytest.bot).delete() + # CognitionSchema.objects(bot=pytest.bot).delete() + +@pytest.mark.asyncio +@responses.activate +@mock.patch.object(LLMProcessor, "__collection_exists__", autospec=True) +@mock.patch.object(LLMProcessor, "__create_collection__", autospec=True) +@mock.patch.object(LLMProcessor, "__collection_upsert__", autospec=True) +@mock.patch.object(MailUtility,"format_and_send_mail", autospec=True) +@mock.patch.object(MetaProcessor, "update_meta_catalog", autospec=True) +@mock.patch.object(litellm, "aembedding", autospec=True) +def test_catalog_sync_item_toggle_ai_enabled_meta_disabled(mock_embedding, mock_collection_exists, mock_create_collection, + mock_collection_upsert, mock_format_and_send_mail, mock_update_meta_catalog): + mock_collection_exists.return_value = False + mock_create_collection.return_value = None + mock_collection_upsert.return_value = None + mock_format_and_send_mail.return_value = None + mock_update_meta_catalog.return_value = None + + embedding = list(np.random.random(LLMProcessor.__embedding__)) + mock_embedding.return_value = litellm.EmbeddingResponse( + **{'data': [{'embedding': embedding}, {'embedding': embedding}, {'embedding': embedding}]}) + + secrets = [ + { + "llm_type": "openai", + "api_key": "common_openai_key", + "models": ["common_openai_model1", "common_openai_model2"], + "user": "123", + "timestamp": datetime.utcnow() + }, + ] + + for secret in secrets: + LLMSecret(**secret).save() + + payload = { + "provider": "petpooja", + "config": { + "restaurant_name": "restaurant1", + "branch_name": "branch1", + "restaurant_id": "98765" + }, + "meta_config": { + "access_token":"dummy_access_token", + "catalog_id":"12345" + } + } + + response = client.post( + url=f"/api/bot/{pytest.bot}/data/integrations/add?sync_type=item_toggle", + json = payload, + headers={"Authorization": pytest.token_type + " " + pytest.access_token} + ) + actual = response.json() + assert actual["message"] == "POS Integration Complete" + assert actual["error_code"] == 0 + assert actual["success"] + assert "integration/petpooja/item_toggle" in actual["data"] + assert str(pytest.bot) in actual["data"] + sync_url = actual["data"] + token = sync_url.split(str(pytest.bot) + "/")[1] + + provider_mapping = CatalogProviderMapping.objects(provider="petpooja").first() + assert provider_mapping is not None + assert provider_mapping.meta_mappings is not None + assert provider_mapping.kv_mappings is not None + + bot_sync_config = BotSyncConfig.objects(branch_bot=pytest.bot, provider="petpooja").first() + assert bot_sync_config is not None + assert bot_sync_config.restaurant_name == "restaurant1" + assert bot_sync_config.branch_name == "branch1" + assert bot_sync_config.parent_bot == pytest.bot + + pos_integration = POSIntegrations.objects(bot=pytest.bot, provider="petpooja", sync_type="item_toggle").first() + assert pos_integration is not None + assert pos_integration.config["restaurant_id"] == "98765" + assert pos_integration.meta_config["access_token"] == "dummy_access_token" + + bot_sync_config = BotSyncConfig.objects(branch_bot=pytest.bot, provider="petpooja").first() + bot_sync_config.process_item_toggle = True + bot_sync_config.ai_enabled = True + bot_sync_config.save() + + event_url = urljoin( + Utility.environment["events"]["server_url"], + f"/api/events/execute/{EventClass.catalog_integration}", + ) + responses.add( + "POST", + event_url, + json={"success": True, "message": "Event triggered successfully!"}, + ) + + item_toggle_payload_path = Path("tests/testing_data/catalog_sync/catalog_sync_item_toggle_payload.json") + + with item_toggle_payload_path.open("r", encoding="utf-8") as f: + item_toggle_payload = json.load(f) + + response = client.post( + url=sync_url, + json=item_toggle_payload, + headers={"Authorization": pytest.token_type + " " + pytest.access_token} + ) + actual = response.json() + assert actual["message"] == "Sync in progress! Check logs." + assert actual["error_code"] == 0 + assert actual["data"] is None + assert actual["success"] + + complete_end_to_end_event_execution( + pytest.bot, "integration@demo.ai", EventClass.catalog_integration, sync_type="item_toggle", token=token, + provider="petpooja", data=item_toggle_payload + ) + + latest_log = CatalogSyncLogs.objects(bot=str(pytest.bot)).order_by("-start_timestamp").first() + print(latest_log.to_mongo().to_dict()) + assert latest_log is not None + assert latest_log.execution_id + assert latest_log.sync_status == "Completed" + assert latest_log.status == "Success" + assert hasattr(latest_log, "exception") + assert latest_log.exception == "Sync to Meta is not allowed for this bot. Contact Support!!" + + restaurant_name, branch_name = CognitionDataProcessor.get_restaurant_and_branch_name(pytest.bot) + catalog_data_collection = f"{restaurant_name}_{branch_name}_catalog_data" + catalog_data_docs = CollectionData.objects(collection_name=catalog_data_collection, bot=pytest.bot) + catalog_item_summaries = [ + {"id": doc.data["id"], "availability": doc.data["availability"]} + for doc in catalog_data_docs + ] + + expected_items = [ + {"id": "10539634", "availability": "in stock"}, + {"id": "10539699", "availability": "in stock"}, + {"id": "10539580", "availability": "out of stock"}, + ] + + assert all(item in catalog_item_summaries for item in expected_items) + + cognition_data_docs = CognitionData.objects(bot=str(pytest.bot)) + assert cognition_data_docs.count() == 3 + + cognition_map = {doc.data["id"]: doc.data["availability"] for doc in cognition_data_docs if + "id" in doc.data and "availability" in doc.data} + for item in expected_items: + assert item["id"] in cognition_map + assert cognition_map[item["id"]] == item["availability"] + + CatalogProviderMapping.objects(provider="petpooja").delete() + BotSyncConfig.objects(branch_bot=pytest.bot, provider="petpooja").delete() + POSIntegrations.objects(bot=pytest.bot, provider="petpooja", sync_type="item_toggle").delete() + LLMSecret.objects.delete() + CollectionData.objects(collection_name=catalog_data_collection).delete() + # CollectionData.objects(collection_name=catalog_images_collection).delete() + CatalogSyncLogs.objects.delete() CognitionData.objects(bot=pytest.bot).delete() CognitionSchema.objects(bot=pytest.bot).delete() + @pytest.mark.asyncio @responses.activate @mock.patch.object(LLMProcessor, "__collection_exists__", autospec=True) @mock.patch.object(LLMProcessor, "__create_collection__", autospec=True) @mock.patch.object(LLMProcessor, "__collection_upsert__", autospec=True) +@mock.patch.object(MailUtility,"format_and_send_mail", autospec=True) @mock.patch.object(MetaProcessor, "push_meta_catalog", autospec=True) @mock.patch.object(MetaProcessor, "delete_meta_catalog", autospec=True) @mock.patch.object(litellm, "aembedding", autospec=True) def test_catalog_sync_push_menu_ai_disabled_meta_enabled(mock_embedding, mock_collection_exists, mock_create_collection, - mock_collection_upsert, mock_delete_meta_catalog, mock_push_meta_catalog): + mock_collection_upsert, mock_format_and_send_mail, mock_delete_meta_catalog, mock_push_meta_catalog): mock_collection_exists.return_value = False mock_create_collection.return_value = None mock_collection_upsert.return_value = None + mock_format_and_send_mail.return_value = None mock_push_meta_catalog.return_value = None mock_delete_meta_catalog.return_value = None @@ -4502,28 +4830,180 @@ def test_catalog_sync_push_menu_ai_disabled_meta_enabled(mock_embedding, mock_co CatalogProviderMapping.objects(provider="petpooja").delete() BotSyncConfig.objects(branch_bot=pytest.bot, provider="petpooja").delete() - POSIntegrations.objects(bot=pytest.bot, provider="petpooja", sync_type="invalid_sync").delete() + POSIntegrations.objects(bot=pytest.bot, provider="petpooja", sync_type="push_menu").delete() LLMSecret.objects.delete() - CollectionData.objects(collection_name=catalog_data_collection).delete() + # CollectionData.objects(collection_name=catalog_data_collection).delete() CollectionData.objects(collection_name=catalog_images_collection).delete() CatalogSyncLogs.objects.delete() + # CognitionData.objects(bot=pytest.bot).delete() + # CognitionSchema.objects(bot=pytest.bot).delete() + + +@pytest.mark.asyncio +@responses.activate +@mock.patch.object(LLMProcessor, "__collection_exists__", autospec=True) +@mock.patch.object(LLMProcessor, "__create_collection__", autospec=True) +@mock.patch.object(LLMProcessor, "__collection_upsert__", autospec=True) +@mock.patch.object(MailUtility,"format_and_send_mail", autospec=True) +@mock.patch.object(MetaProcessor, "update_meta_catalog", autospec=True) +@mock.patch.object(litellm, "aembedding", autospec=True) +def test_catalog_sync_item_toggle_ai_disabled_meta_enabled(mock_embedding, mock_collection_exists, mock_create_collection, + mock_collection_upsert, mock_format_and_send_mail, mock_update_meta_catalog): + mock_collection_exists.return_value = False + mock_create_collection.return_value = None + mock_collection_upsert.return_value = None + mock_format_and_send_mail.return_value = None + mock_update_meta_catalog.return_value = None + + embedding = list(np.random.random(LLMProcessor.__embedding__)) + mock_embedding.return_value = litellm.EmbeddingResponse( + **{'data': [{'embedding': embedding}, {'embedding': embedding}, {'embedding': embedding}]}) + + secrets = [ + { + "llm_type": "openai", + "api_key": "common_openai_key", + "models": ["common_openai_model1", "common_openai_model2"], + "user": "123", + "timestamp": datetime.utcnow() + }, + ] + + for secret in secrets: + LLMSecret(**secret).save() + + payload = { + "provider": "petpooja", + "config": { + "restaurant_name": "restaurant1", + "branch_name": "branch1", + "restaurant_id": "98765" + }, + "meta_config": { + "access_token":"dummy_access_token", + "catalog_id":"12345" + } + } + + response = client.post( + url=f"/api/bot/{pytest.bot}/data/integrations/add?sync_type=item_toggle", + json = payload, + headers={"Authorization": pytest.token_type + " " + pytest.access_token} + ) + actual = response.json() + assert actual["message"] == "POS Integration Complete" + assert actual["error_code"] == 0 + assert actual["success"] + assert "integration/petpooja/item_toggle" in actual["data"] + assert str(pytest.bot) in actual["data"] + sync_url = actual["data"] + token = sync_url.split(str(pytest.bot) + "/")[1] + + provider_mapping = CatalogProviderMapping.objects(provider="petpooja").first() + assert provider_mapping is not None + assert provider_mapping.meta_mappings is not None + assert provider_mapping.kv_mappings is not None + + bot_sync_config = BotSyncConfig.objects(branch_bot=pytest.bot, provider="petpooja").first() + assert bot_sync_config is not None + assert bot_sync_config.restaurant_name == "restaurant1" + assert bot_sync_config.branch_name == "branch1" + assert bot_sync_config.parent_bot == pytest.bot + + pos_integration = POSIntegrations.objects(bot=pytest.bot, provider="petpooja", sync_type="item_toggle").first() + assert pos_integration is not None + assert pos_integration.config["restaurant_id"] == "98765" + assert pos_integration.meta_config["access_token"] == "dummy_access_token" + + bot_sync_config = BotSyncConfig.objects(branch_bot=pytest.bot, provider="petpooja").first() + bot_sync_config.process_item_toggle = True + bot_sync_config.meta_enabled = True + bot_sync_config.save() + + event_url = urljoin( + Utility.environment["events"]["server_url"], + f"/api/events/execute/{EventClass.catalog_integration}", + ) + responses.add( + "POST", + event_url, + json={"success": True, "message": "Event triggered successfully!"}, + ) + + item_toggle_payload_path = Path("tests/testing_data/catalog_sync/catalog_sync_item_toggle_payload.json") + + with item_toggle_payload_path.open("r", encoding="utf-8") as f: + item_toggle_payload = json.load(f) + + response = client.post( + url=sync_url, + json=item_toggle_payload, + headers={"Authorization": pytest.token_type + " " + pytest.access_token} + ) + actual = response.json() + assert actual["message"] == "Sync in progress! Check logs." + assert actual["error_code"] == 0 + assert actual["data"] is None + assert actual["success"] + + complete_end_to_end_event_execution( + pytest.bot, "integration@demo.ai", EventClass.catalog_integration, sync_type="item_toggle", token=token, + provider="petpooja", data=item_toggle_payload + ) + + latest_log = CatalogSyncLogs.objects(bot=str(pytest.bot)).order_by("-start_timestamp").first() + print(latest_log.to_mongo().to_dict()) + assert latest_log is not None + assert latest_log.execution_id + assert latest_log.sync_status == "Completed" + assert latest_log.status == "Success" + assert hasattr(latest_log, "exception") + assert latest_log.exception == "Sync to knowledge vault is not allowed for this bot. Contact Support!!" + + restaurant_name, branch_name = CognitionDataProcessor.get_restaurant_and_branch_name(pytest.bot) + catalog_data_collection = f"{restaurant_name}_{branch_name}_catalog_data" + catalog_data_docs = CollectionData.objects(collection_name=catalog_data_collection, bot=pytest.bot) + catalog_item_summaries = [ + {"id": doc.data["id"], "availability": doc.data["availability"]} + for doc in catalog_data_docs + ] + + expected_items = [ + {"id": "10539634", "availability": "in stock"}, + {"id": "10539699", "availability": "in stock"}, + {"id": "10539580", "availability": "out of stock"}, + ] + + assert all(item in catalog_item_summaries for item in expected_items) + + cognition_data_docs = CognitionData.objects(bot=str(pytest.bot)) + assert cognition_data_docs.count() == 0 + + CatalogProviderMapping.objects(provider="petpooja").delete() + BotSyncConfig.objects(branch_bot=pytest.bot, provider="petpooja").delete() + POSIntegrations.objects(bot=pytest.bot, provider="petpooja", sync_type="item_toggle").delete() + LLMSecret.objects.delete() + CollectionData.objects(collection_name=catalog_data_collection).delete() + # CollectionData.objects(collection_name=catalog_images_collection).delete() + CatalogSyncLogs.objects.delete() CognitionData.objects(bot=pytest.bot).delete() CognitionSchema.objects(bot=pytest.bot).delete() - @pytest.mark.asyncio @responses.activate @mock.patch.object(LLMProcessor, "__collection_exists__", autospec=True) @mock.patch.object(LLMProcessor, "__create_collection__", autospec=True) @mock.patch.object(LLMProcessor, "__collection_upsert__", autospec=True) +@mock.patch.object(MailUtility,"format_and_send_mail", autospec=True) @mock.patch.object(MetaProcessor, "push_meta_catalog", autospec=True) @mock.patch.object(MetaProcessor, "delete_meta_catalog", autospec=True) @mock.patch.object(litellm, "aembedding", autospec=True) def test_catalog_sync_push_menu_global_image_not_found(mock_embedding, mock_collection_exists, mock_create_collection, - mock_collection_upsert, mock_delete_meta_catalog, mock_push_meta_catalog): + mock_collection_upsert, mock_format_and_send_mail, mock_delete_meta_catalog, mock_push_meta_catalog): mock_collection_exists.return_value = False mock_create_collection.return_value = None mock_collection_upsert.return_value = None + mock_format_and_send_mail.return_value = None mock_push_meta_catalog.return_value = None mock_delete_meta_catalog.return_value = None @@ -4652,14 +5132,16 @@ def test_catalog_sync_push_menu_global_image_not_found(mock_embedding, mock_coll @mock.patch.object(LLMProcessor, "__collection_exists__", autospec=True) @mock.patch.object(LLMProcessor, "__create_collection__", autospec=True) @mock.patch.object(LLMProcessor, "__collection_upsert__", autospec=True) +@mock.patch.object(MailUtility,"format_and_send_mail", autospec=True) @mock.patch.object(MetaProcessor, "push_meta_catalog", autospec=True) @mock.patch.object(MetaProcessor, "delete_meta_catalog", autospec=True) @mock.patch.object(litellm, "aembedding", autospec=True) def test_catalog_sync_push_menu_global_local_images_success(mock_embedding, mock_collection_exists, mock_create_collection, - mock_collection_upsert, mock_delete_meta_catalog, mock_push_meta_catalog): + mock_collection_upsert, mock_format_and_send_mail, mock_delete_meta_catalog, mock_push_meta_catalog): mock_collection_exists.return_value = False mock_create_collection.return_value = None mock_collection_upsert.return_value = None + mock_format_and_send_mail.return_value = None mock_push_meta_catalog.return_value = None mock_delete_meta_catalog.return_value = None @@ -4850,14 +5332,16 @@ def test_catalog_sync_push_menu_global_local_images_success(mock_embedding, mock @mock.patch.object(LLMProcessor, "__collection_exists__", autospec=True) @mock.patch.object(LLMProcessor, "__create_collection__", autospec=True) @mock.patch.object(LLMProcessor, "__collection_upsert__", autospec=True) +@mock.patch.object(MailUtility,"format_and_send_mail", autospec=True) @mock.patch.object(MetaProcessor, "push_meta_catalog", autospec=True) @mock.patch.object(MetaProcessor, "delete_meta_catalog", autospec=True) @mock.patch.object(litellm, "aembedding", autospec=True) def test_catalog_rerun_sync_push_menu_success(mock_embedding, mock_collection_exists, mock_create_collection, - mock_collection_upsert, mock_delete_meta_catalog, mock_push_meta_catalog): + mock_collection_upsert, mock_format_and_send_mail, mock_delete_meta_catalog, mock_push_meta_catalog): mock_collection_exists.return_value = False mock_create_collection.return_value = None mock_collection_upsert.return_value = None + mock_format_and_send_mail.return_value = None mock_push_meta_catalog.return_value = None mock_delete_meta_catalog.return_value = None @@ -27927,7 +28411,6 @@ def test_get_bot_settings(): assert actual["message"] is None actual["data"].pop("bot") actual["data"].pop("user") - actual["data"].pop("timestamp") actual["data"].pop("status") assert actual['data'] == {'is_billed': False, 'chat_token_expiry': 30, 'data_generation_limit_per_day': 3, @@ -27949,7 +28432,8 @@ def test_get_bot_settings(): 'cognition_columns_per_collection_limit': 5, 'content_importer_limit_per_day': 5, 'integrations_per_user_limit': 3, - 'retry_broadcasting_limit': 3} + 'retry_broadcasting_limit': 3, + 'catalog_sync_limit_per_day': 5} @patch("kairon.shared.utils.Utility.request_event_server", autospec=True) @@ -28056,7 +28540,8 @@ def test_update_analytics_settings(): 'content_importer_limit_per_day': 5, 'cognition_columns_per_collection_limit': 5, 'integrations_per_user_limit': 3, - 'retry_broadcasting_limit': 3} + 'retry_broadcasting_limit': 3, + 'catalog_sync_limit_per_day': 5} def test_delete_channels_config(): diff --git a/tests/unit_test/action/action_test.py b/tests/unit_test/action/action_test.py index 5b7847693..652489f54 100644 --- a/tests/unit_test/action/action_test.py +++ b/tests/unit_test/action/action_test.py @@ -2697,7 +2697,8 @@ def test_get_prompt_action_config(self): 'content_importer_limit_per_day': 5, 'integrations_per_user_limit': 3, 'live_agent_enabled': False, - 'retry_broadcasting_limit': 3} + 'retry_broadcasting_limit': 3, + 'catalog_sync_limit_per_day': 5} def test_prompt_action_not_exists(self): with pytest.raises(ActionFailure, match="Faq feature is disabled for the bot! Please contact support."): @@ -3939,7 +3940,8 @@ def test_get_bot_settings(self): 'cognition_columns_per_collection_limit': 5, 'integrations_per_user_limit': 3, 'live_agent_enabled': False, - 'retry_broadcasting_limit': 3} + 'retry_broadcasting_limit': 3, + 'catalog_sync_limit_per_day': 5} def test_get_prompt_action_config_2(self): bot = "test_bot_action_test" diff --git a/tests/unit_test/data_processor/data_processor_test.py b/tests/unit_test/data_processor/data_processor_test.py index f91b9d0b7..59fbbbcd5 100644 --- a/tests/unit_test/data_processor/data_processor_test.py +++ b/tests/unit_test/data_processor/data_processor_test.py @@ -1797,7 +1797,7 @@ def test_validate_data_push_menu_success(self): user = 'test_user' collection_name = 'groceries' primary_key_col = "id" - event_type = 'push_menu' + sync_type = 'push_menu' metadata = [ { @@ -1846,7 +1846,7 @@ def test_validate_data_push_menu_success(self): validation_summary = processor.validate_data( primary_key_col=primary_key_col, collection_name=collection_name, - event_type=event_type, + sync_type=sync_type, data=data, bot=bot ) @@ -1854,12 +1854,12 @@ def test_validate_data_push_menu_success(self): assert validation_summary == {} CognitionSchema.objects(bot=bot, collection_name="groceries").delete() - def test_validate_data_field_update_success(self): + def test_validate_data_item_toggle_success(self): bot = 'test_bot' user = 'test_user' collection_name = 'groceries' primary_key_col = "id" - event_type = "field_update" + sync_type = "item_toggle" metadata = [ { @@ -1940,7 +1940,7 @@ def test_validate_data_field_update_success(self): validation_summary = processor.validate_data( primary_key_col=primary_key_col, collection_name=collection_name, - event_type=event_type, + sync_type=sync_type, data=data, bot=bot ) @@ -1949,20 +1949,20 @@ def test_validate_data_field_update_success(self): CognitionSchema.objects(bot=bot, collection_name="groceries").delete() CognitionData.objects(bot=bot, collection="groceries").delete() - def test_validate_data_event_type_does_not_exist(self): + def test_validate_data_sync_type_does_not_exist(self): bot = 'test_bot' collection_name = 'groceries' primary_key_col = "id" data = [{"id": 1, "item": "Juice", "price": 2.50, "quantity": 10}] - event_type = 'non_existent_event_type' + sync_type = 'non_existent_sync_type' processor = CognitionDataProcessor() - with pytest.raises(AppException, match=f"Event type does not exist"): + with pytest.raises(AppException, match=f"Sync type does not exist"): processor.validate_data( primary_key_col=primary_key_col, collection_name=collection_name, - event_type=event_type, + sync_type=sync_type, data=data, bot=bot ) @@ -1972,7 +1972,7 @@ def test_validate_data_missing_collection(self): collection_name = 'nonexistent_collection' primary_key_col = "id" data = [{"id": 1, "item": "Juice", "price": 2.50, "quantity": 10}] - event_type = 'push_menu' + sync_type = 'push_menu' processor = CognitionDataProcessor() @@ -1980,7 +1980,7 @@ def test_validate_data_missing_collection(self): processor.validate_data( primary_key_col=primary_key_col, collection_name=collection_name, - event_type=event_type, + sync_type=sync_type, data=data, bot=bot ) @@ -1990,7 +1990,7 @@ def test_validate_data_missing_primary_key(self): user = 'test_user' collection_name = 'groceries' primary_key_col = "id" - event_type = 'push_menu' + sync_type = 'push_menu' metadata = [ {"column_name": "id", "data_type": "int", "enable_search": True, "create_embeddings": True}, @@ -2020,7 +2020,7 @@ def test_validate_data_missing_primary_key(self): primary_key_col=primary_key_col, collection_name=collection_name, data=data, - event_type=event_type, + sync_type=sync_type, bot=bot ) CognitionSchema.objects(bot=bot, collection_name="groceries").delete() @@ -2030,7 +2030,7 @@ def test_validate_data_column_length_mismatch(self): user = 'test_user' collection_name = 'groceries' primary_key_col = "id" - event_type = 'push_menu' + sync_type = 'push_menu' metadata = [ {"column_name": "id", "data_type": "int", "enable_search": True, "create_embeddings": True}, @@ -2057,7 +2057,7 @@ def test_validate_data_column_length_mismatch(self): validation_summary = processor.validate_data( primary_key_col=primary_key_col, collection_name=collection_name, - event_type=event_type, + sync_type=sync_type, data=data, bot=bot ) @@ -2072,7 +2072,7 @@ def test_validate_data_invalid_columns(self): user = 'test_user' collection_name = 'groceries' primary_key_col = "id" - event_type = 'push_menu' + sync_type = 'push_menu' metadata = [ {"column_name": "id", "data_type": "int", "enable_search": True, "create_embeddings": True}, @@ -2099,7 +2099,7 @@ def test_validate_data_invalid_columns(self): validation_summary = processor.validate_data( primary_key_col=primary_key_col, collection_name=collection_name, - event_type=event_type, + sync_type=sync_type, data=data, bot=bot ) @@ -2114,7 +2114,7 @@ def test_validate_data_document_non_existence(self): user = 'test_user' collection_name = 'groceries' primary_key_col = "id" - event_type = 'field_update' + sync_type = 'item_toggle' metadata = [ {"column_name": "id", "data_type": "int", "enable_search": True, "create_embeddings": True}, @@ -2157,7 +2157,7 @@ def test_validate_data_document_non_existence(self): validation_summary = processor.validate_data( primary_key_col=primary_key_col, collection_name=collection_name, - event_type=event_type, + sync_type=sync_type, data=data, bot=bot ) @@ -2179,7 +2179,7 @@ async def test_upsert_data_push_menu_success(self, mock_embedding, mock_collecti user = 'test_user' collection_name = 'groceries' primary_key_col = 'id' - event_type = 'push_menu' + sync_type = 'push_menu' metadata = [ {"column_name": "id", "data_type": "int", "enable_search": True, "create_embeddings": True}, @@ -2241,7 +2241,7 @@ async def test_upsert_data_push_menu_success(self, mock_embedding, mock_collecti result = await processor.upsert_data( primary_key_col=primary_key_col, collection_name=collection_name, - event_type=event_type, + sync_type=sync_type, data=upsert_data, bot=bot, user=user @@ -2273,14 +2273,14 @@ async def test_upsert_data_push_menu_success(self, mock_embedding, mock_collecti @patch.object(LLMProcessor, "__create_collection__", autospec=True) @patch.object(LLMProcessor, "__collection_upsert__", autospec=True) @patch.object(litellm, "aembedding", autospec=True) - async def test_upsert_data_field_update_success(self, mock_embedding, mock_collection_upsert, + async def test_upsert_data_item_toggle_success(self, mock_embedding, mock_collection_upsert, mock_create_collection, mock_collection_exists): bot = 'test_bot' user = 'test_user' collection_name = 'groceries' primary_key_col = 'id' - event_type = 'field_update' + sync_type = 'item_toggle' metadata = [ {"column_name": "id", "data_type": "int", "enable_search": True, "create_embeddings": True}, @@ -2358,7 +2358,7 @@ async def test_upsert_data_field_update_success(self, mock_embedding, mock_colle result = await processor.upsert_data( primary_key_col=primary_key_col, collection_name=collection_name, - event_type=event_type, + sync_type=sync_type, data=upsert_data, bot=bot, user=user @@ -2396,7 +2396,7 @@ async def test_upsert_data_empty_data_list(self, mock_embedding, mock_collection user = 'test_user' collection_name = 'groceries' primary_key_col = 'id' - event_type = 'push_menu' + sync_type = 'push_menu' metadata = [ {"column_name": "id", "data_type": "int", "enable_search": True, "create_embeddings": True}, @@ -2454,7 +2454,7 @@ async def test_upsert_data_empty_data_list(self, mock_embedding, mock_collection result = await processor.upsert_data( primary_key_col=primary_key_col, collection_name=collection_name, - event_type=event_type, + sync_type=sync_type, data=upsert_data, bot=bot, user=user @@ -2679,16 +2679,16 @@ def test_get_pydantic_type_invalid(self): with pytest.raises(ValueError, match="Unsupported data type: unknown"): CognitionDataProcessor.get_pydantic_type('unknown') - def test_validate_event_type_valid(self): + def test_validate_sync_type_valid(self): processor = CognitionDataProcessor() - valid_event_type = list(VaultSyncType.__members__.keys())[0] - processor._validate_sync_type(valid_event_type) + valid_sync_type = list(VaultSyncType.__members__.keys())[0] + processor._validate_sync_type(valid_sync_type) - def test_validate_event_type_invalid(self): + def test_validate_sync_type_invalid(self): processor = CognitionDataProcessor() - invalid_event_type = "invalid_event" - with pytest.raises(AppException, match="Event type does not exist"): - processor._validate_sync_type(invalid_event_type) + invalid_sync_type = "invalid_event" + with pytest.raises(AppException, match="Sync type does not exist"): + processor._validate_sync_type(invalid_sync_type) def test_validate_collection_exists_valid(self): bot = 'test_bot' From 780a404a5ee7f91741137dcd8916b2f607292bac Mon Sep 17 00:00:00 2001 From: himanshu_gupta Date: Mon, 28 Apr 2025 21:45:58 +0530 Subject: [PATCH 4/5] Updated Test Cases --- tests/integration_test/services_test.py | 4 + .../data_processor/data_processor_test.py | 215 ++---------------- 2 files changed, 22 insertions(+), 197 deletions(-) diff --git a/tests/integration_test/services_test.py b/tests/integration_test/services_test.py index b968843dd..d39d1d3b3 100644 --- a/tests/integration_test/services_test.py +++ b/tests/integration_test/services_test.py @@ -8709,6 +8709,9 @@ def _mock_get_bot_settings(*args, **kwargs): def test_metadata_upload_api_column_limit_exceeded(): + bot_settings = BotSettings.objects(bot=pytest.bot).get() + bot_settings.cognition_columns_per_collection_limit = 5 + bot_settings.save() response = client.post( url=f"/api/bot/{pytest.bot}/data/cognition/schema", json={ @@ -28411,6 +28414,7 @@ def test_get_bot_settings(): assert actual["message"] is None actual["data"].pop("bot") actual["data"].pop("user") + actual["data"].pop("timestamp") actual["data"].pop("status") assert actual['data'] == {'is_billed': False, 'chat_token_expiry': 30, 'data_generation_limit_per_day': 3, diff --git a/tests/unit_test/data_processor/data_processor_test.py b/tests/unit_test/data_processor/data_processor_test.py index 59fbbbcd5..1d80ed954 100644 --- a/tests/unit_test/data_processor/data_processor_test.py +++ b/tests/unit_test/data_processor/data_processor_test.py @@ -2264,6 +2264,14 @@ async def test_upsert_data_push_menu_success(self, mock_embedding, mock_collecti assert updated_record.data["price"] == 3.00 # Updated price assert updated_record.data["quantity"] == 5 + mock_embedding.assert_called_once_with( + model="text-embedding-3-large", + input=['{"id":1,"item":"Juice","price":2.5,"quantity":10}', '{"id":2,"item":"Milk","price":3.0,"quantity":5}'], + metadata={'user': user, 'bot': bot, 'invocation': 'knowledge_vault_sync'}, + api_key="openai_key", + num_retries=3 + ) + CognitionSchema.objects(bot=bot, collection_name="groceries").delete() CognitionData.objects(bot=bot, collection="groceries").delete() LLMSecret.objects.delete() @@ -2381,6 +2389,15 @@ async def test_upsert_data_item_toggle_success(self, mock_embedding, mock_collec assert updated_record.data["price"] == 27.00 # Updated price assert updated_record.data["quantity"] == 12 + mock_embedding.assert_called_once_with( + model="text-embedding-3-large", + input=['{"id":1,"item":"Juice","price":80.5,"quantity":56}', + '{"id":2,"item":"Milk","price":27.0,"quantity":12}'], + metadata={'user': user, 'bot': bot, 'invocation': 'knowledge_vault_sync'}, + api_key="openai_key", + num_retries=3 + ) + CognitionSchema.objects(bot=bot, collection_name="groceries").delete() CognitionData.objects(bot=bot, collection="groceries").delete() LLMSecret.objects.delete() @@ -2463,208 +2480,12 @@ async def test_upsert_data_empty_data_list(self, mock_embedding, mock_collection data = list(CognitionData.objects(bot=bot, collection=collection_name)) assert result["message"] == "Upsert complete!" - assert len(data) == 1 - - existing_record = data[0] - assert existing_record.data["id"] == 2 - assert existing_record.data["item"] == "Milk" - assert existing_record.data["price"] == 2.80 - assert existing_record.data["quantity"] == 5 + assert len(data) == 0 CognitionSchema.objects(bot=bot, collection_name=collection_name).delete() CognitionData.objects(bot=bot, collection=collection_name).delete() LLMSecret.objects.delete() - @pytest.mark.asyncio - @patch.object(litellm, "aembedding", autospec=True) - @patch.object(LLMProcessor, "__collection_upsert__", autospec=True) - async def test_sync_with_qdrant_success(self, mock_collection_upsert, mock_embedding): - bot = "test_bot" - user = "test_user" - collection_name = "groceries" - primary_key_col = "id" - - metadata = [ - {"column_name": "id", "data_type": "int", "enable_search": True, "create_embeddings": True}, - {"column_name": "item", "data_type": "str", "enable_search": True, "create_embeddings": True}, - {"column_name": "price", "data_type": "float", "enable_search": True, "create_embeddings": True}, - {"column_name": "quantity", "data_type": "int", "enable_search": True, "create_embeddings": True}, - ] - - cognition_schema = CognitionSchema( - metadata=[ColumnMetadata(**item) for item in metadata], - collection_name=collection_name, - user=user, - bot=bot, - timestamp=datetime.utcnow() - ) - cognition_schema.validate(clean=True) - cognition_schema.save() - - document_data = { - "id": 2, - "item": "Milk", - "price": 2.80, - "quantity": 5 - } - document = CognitionData( - data=document_data, - content_type="json", - collection=collection_name, - user=user, - bot=bot, - timestamp=datetime.utcnow() - ) - document.save() - - saved_document = None - for doc in CognitionData.objects(bot=bot, collection=collection_name): - doc_dict = doc.to_mongo().to_dict() - if doc_dict.get("data", {}).get("id") == 2: # Match based on `data.id` - saved_document = doc_dict - break - assert saved_document, "Saved CognitionData document not found" - vector_id = saved_document["vector_id"] - - if not isinstance(document, dict): - document = document.to_mongo().to_dict() - - embedding = list(np.random.random(1532)) - mock_embedding.return_value = {'data': [{'embedding': embedding}, {'embedding': embedding}]} - - mock_collection_upsert.return_value = None - - llm_secret = LLMSecret( - llm_type="openai", - api_key="openai_key", - models=["model1", "model2"], - api_base_url="https://api.example.com", - bot=bot, - user=user - ) - llm_secret.save() - - processor = CognitionDataProcessor() - llm_processor = LLMProcessor(bot, DEFAULT_LLM) - await processor.sync_with_qdrant( - llm_processor=llm_processor, - collection_name=collection_name, - bot=bot, - document=document, - user=user, - primary_key_col=primary_key_col - ) - - mock_embedding.assert_called_once_with( - model="text-embedding-3-large", - input=['{"id":2,"item":"Milk","price":2.8,"quantity":5}'], - metadata={'user': user, 'bot': bot, 'invocation': 'knowledge_vault_sync'}, - api_key="openai_key", - num_retries=3 - ) - mock_collection_upsert.assert_called_once_with( - llm_processor, - collection_name, - { - "points": [ - { - "id": vector_id, - "vector": embedding, - "payload": {'id': 2, 'item': 'Milk', 'price': 2.8, 'quantity': 5} - } - ] - }, - err_msg="Unable to train FAQ! Contact support" - ) - - CognitionSchema.objects(bot=bot, collection_name="groceries").delete() - CognitionData.objects(bot=bot, collection="groceries").delete() - LLMSecret.objects.delete() - - @pytest.mark.asyncio - @patch.object(litellm, "aembedding", autospec=True) - @patch.object(AioRestClient, "request", autospec=True) - async def test_sync_with_qdrant_upsert_failure(self, mock_request, mock_embedding): - bot = "test_bot" - user = "test_user" - collection_name = "groceries" - primary_key_col = "id" - - metadata = [ - {"column_name": "id", "data_type": "int", "enable_search": True, "create_embeddings": True}, - {"column_name": "item", "data_type": "str", "enable_search": True, "create_embeddings": True}, - {"column_name": "price", "data_type": "float", "enable_search": True, "create_embeddings": True}, - {"column_name": "quantity", "data_type": "int", "enable_search": True, "create_embeddings": True}, - ] - - cognition_schema = CognitionSchema( - metadata=[ColumnMetadata(**item) for item in metadata], - collection_name=collection_name, - user=user, - bot=bot, - timestamp=datetime.utcnow() - ) - cognition_schema.validate(clean=True) - cognition_schema.save() - - document_data = { - "id": 2, - "item": "Milk", - "price": 2.80, - "quantity": 5 - } - document = CognitionData( - data=document_data, - content_type="json", - collection=collection_name, - user=user, - bot=bot, - timestamp=datetime.utcnow() - ) - document.save() - if not isinstance(document, dict): - document = document.to_mongo().to_dict() - - embedding = list(np.random.random(1532)) - mock_embedding.return_value = {'data': [{'embedding': embedding}, {'embedding': embedding}]} - - mock_request.side_effect = ConnectionError("Failed to connect to Qdrant") - - llm_secret = LLMSecret( - llm_type="openai", - api_key="openai_key", - models=["model1", "model2"], - api_base_url="https://api.example.com", - bot=bot, - user=user - ) - llm_secret.save() - - processor = CognitionDataProcessor() - llm_processor = LLMProcessor(bot, DEFAULT_LLM) - - with pytest.raises(AppException, match="Failed to sync document with Qdrant: Failed to connect to Qdrant"): - await processor.sync_with_qdrant( - llm_processor=llm_processor, - collection_name=collection_name, - bot=bot, - document=document, - user=user, - primary_key_col=primary_key_col - ) - - mock_embedding.assert_called_once_with( - model="text-embedding-3-large", - input=['{"id":2,"item":"Milk","price":2.8,"quantity":5}'], - metadata={'user': user, 'bot': bot, 'invocation': 'knowledge_vault_sync'}, - api_key="openai_key", - num_retries=3 - ) - - CognitionSchema.objects(bot=bot, collection_name="groceries").delete() - CognitionData.objects(bot=bot, collection="groceries").delete() - LLMSecret.objects.delete() - def test_get_pydantic_type_int(self): result = CognitionDataProcessor().get_pydantic_type('int') expected = (int, ...) From a72c552d1b71e5d1446d2322c7792851d7d3ab3f Mon Sep 17 00:00:00 2001 From: himanshu_gupta Date: Mon, 28 Apr 2025 23:46:36 +0530 Subject: [PATCH 5/5] Updated Test Cases --- .../data_processor/catalog_sync_log_processor_test.py | 3 ++- tests/unit_test/data_processor/data_processor_test.py | 4 +++- 2 files changed, 5 insertions(+), 2 deletions(-) diff --git a/tests/unit_test/data_processor/catalog_sync_log_processor_test.py b/tests/unit_test/data_processor/catalog_sync_log_processor_test.py index 2c85821ef..1c4440449 100644 --- a/tests/unit_test/data_processor/catalog_sync_log_processor_test.py +++ b/tests/unit_test/data_processor/catalog_sync_log_processor_test.py @@ -568,4 +568,5 @@ def test_validate_image_configurations_when_catalog_images_missing_global_fallba with pytest.raises(Exception, match="Global fallback image URL not found"): CatalogSyncLogProcessor.validate_image_configurations(bot, user) - BotSyncConfig.objects.delete() \ No newline at end of file + BotSyncConfig.objects.delete() + CollectionData.objects.delete() \ No newline at end of file diff --git a/tests/unit_test/data_processor/data_processor_test.py b/tests/unit_test/data_processor/data_processor_test.py index 1d80ed954..027abfa9c 100644 --- a/tests/unit_test/data_processor/data_processor_test.py +++ b/tests/unit_test/data_processor/data_processor_test.py @@ -2406,8 +2406,9 @@ async def test_upsert_data_item_toggle_success(self, mock_embedding, mock_collec @patch.object(LLMProcessor, "__collection_exists__", autospec=True) @patch.object(LLMProcessor, "__create_collection__", autospec=True) @patch.object(LLMProcessor, "__collection_upsert__", autospec=True) + @patch.object(LLMProcessor, "__delete_collection_points__",autospec=True) @patch.object(litellm, "aembedding", autospec=True) - async def test_upsert_data_empty_data_list(self, mock_embedding, mock_collection_upsert, mock_create_collection, + async def test_upsert_data_empty_data_list(self, mock_embedding, mock_delete_collection_points, mock_collection_upsert, mock_create_collection, mock_collection_exists): bot = 'test_bot' user = 'test_user' @@ -2463,6 +2464,7 @@ async def test_upsert_data_empty_data_list(self, mock_embedding, mock_collection mock_collection_exists.return_value = False mock_create_collection.return_value = None mock_collection_upsert.return_value = None + mock_delete_collection_points.return_value = None embedding = list(np.random.random(1532)) mock_embedding.return_value = {'data': [{'embedding': embedding}, {'embedding': embedding}]}