From 7e3ddea4948f9e7bf5f99f4878909fc40c78e30f Mon Sep 17 00:00:00 2001 From: antejavor Date: Tue, 15 Jul 2025 09:56:25 +0200 Subject: [PATCH 1/2] Update. --- .gitignore | 6 +- integrations/agents/README.md | 231 +++++++++++ integrations/agents/cypher_generator.py | 254 ++++++++++++ integrations/agents/database_analyzer.py | 159 ++++++++ integrations/agents/main.py | 448 ++++++++++++++++++++++ integrations/agents/mysql_troubleshoot.py | 294 ++++++++++++++ integrations/agents/pyproject.toml | 31 ++ integrations/mcp-memgraph/pyproject.toml | 2 +- pyproject.toml | 1 + uv.lock | 335 ++++++++++++++-- 10 files changed, 1731 insertions(+), 30 deletions(-) create mode 100644 integrations/agents/README.md create mode 100644 integrations/agents/cypher_generator.py create mode 100644 integrations/agents/database_analyzer.py create mode 100644 integrations/agents/main.py create mode 100644 integrations/agents/mysql_troubleshoot.py create mode 100644 integrations/agents/pyproject.toml diff --git a/.gitignore b/.gitignore index 1800114..68b0296 100644 --- a/.gitignore +++ b/.gitignore @@ -171,4 +171,8 @@ cython_debug/ .ruff_cache/ # PyPI configuration file -.pypirc \ No newline at end of file +.pypirc + + +# VS Code +.vscode/ \ No newline at end of file diff --git a/integrations/agents/README.md b/integrations/agents/README.md new file mode 100644 index 0000000..c106785 --- /dev/null +++ b/integrations/agents/README.md @@ -0,0 +1,231 @@ +# MySQL to Memgraph Migration Agent + +This agent analyzes MySQL databases, generates appropriate Cypher queries, and migrates data to Memgraph using LangGraph workflow. It's specifically designed to work with the Sakila sample database but can be adapted for other MySQL databases. + +## Features + +- **Automatic Schema Analysis**: Connects to MySQL and analyzes table structures, relationships, and constraints +- **Intelligent Migration Planning**: Uses OpenAI GPT to generate optimal migration strategies +- **Cypher Query Generation**: Automatically generates Cypher queries for creating nodes, relationships, and constraints +- **Data Type Mapping**: Maps MySQL data types to appropriate Memgraph/Cypher types +- **Relationship Detection**: Identifies foreign key relationships and converts them to graph relationships +- **Progress Tracking**: Provides detailed progress updates and error handling +- **Verification**: Validates migration results by comparing counts and structures + +## Prerequisites + +1. **Python 3.10+** +2. **MySQL database** with Sakila dataset (or your own database) +3. **Memgraph** instance running and accessible +4. **OpenAI API key** for natural language processing tasks +5. **UV package manager** (already configured in the project) + +## Installation + +1. Navigate to the agents directory: + + ```bash + cd integrations/agents + ``` + +2. Install dependencies using UV: + + ```bash + uv sync + ``` + +3. Copy the environment configuration file: + + ```bash + cp .env.example .env + ``` + +4. Edit the `.env` file with your actual configuration: + + ```bash + # OpenAI API Configuration + OPENAI_API_KEY=your_actual_openai_api_key + + # MySQL Database Configuration + MYSQL_HOST=localhost + MYSQL_USER=root + MYSQL_PASSWORD=your_mysql_password + MYSQL_DATABASE=sakila + MYSQL_PORT=3306 + + # Memgraph Database Configuration + MEMGRAPH_URL=bolt://localhost:7687 + MEMGRAPH_USER= + MEMGRAPH_PASSWORD= + MEMGRAPH_DATABASE=memgraph + ``` + +## Setting Up Sakila Database + +If you don't have the Sakila database set up: + +1. Download the Sakila database from MySQL's official site +2. Import it into your MySQL instance: + ```sql + SOURCE sakila-schema.sql; + SOURCE sakila-data.sql; + ``` + +## Usage + +### Basic Usage + +Run the migration agent: + +```bash +uv run python main.py +``` + +### Programmatic Usage + +```python +from main import MySQLToMemgraphAgent + +# Configure your databases +mysql_config = { + "host": "localhost", + "user": "root", + "password": "", + "database": "sakila", + "port": 3306 +} + +memgraph_config = { + "url": "bolt://localhost:7687", + "username": "", + "password": "", + "database": "memgraph" +} + +# Create and run the agent +agent = MySQLToMemgraphAgent() +result = agent.migrate(mysql_config, memgraph_config) + +print(f"Success: {result['success']}") +print(f"Migrated {len(result['completed_tables'])} tables") +``` + +## How It Works + +The agent follows a multi-step workflow: + +1. **Schema Analysis**: + + - Connects to MySQL database + - Extracts table schemas, foreign keys, and relationships + - Counts rows in each table + +2. **Migration Planning**: + + - Uses OpenAI GPT to analyze the database structure + - Generates an optimal migration plan considering dependencies + - Identifies potential issues and optimizations + +3. **Query Generation**: + + - Maps MySQL data types to Cypher types + - Generates node creation queries for each table + - Creates relationship queries based on foreign keys + - Generates constraint and index creation queries + +4. **Query Validation**: + + - Tests connection to Memgraph + - Validates query syntax + +5. **Migration Execution**: + + - Creates constraints and indexes first + - Migrates data table by table + - Creates relationships between nodes + - Handles errors gracefully + +6. **Verification**: + - Compares node and relationship counts + - Provides detailed migration summary + +## Graph Model for Sakila + +The Sakila database is converted to a graph model with the following approach: + +- **Tables → Node Labels**: Each table becomes a node type (e.g., `film` → `Film`) +- **Foreign Keys → Relationships**: FK relationships become directed edges +- **Primary Keys → Node IDs**: Primary keys become unique node identifiers +- **Data Types**: MySQL types are mapped to Cypher-compatible types + +Example transformations: + +- `film` table → `Film` nodes +- `actor` table → `Actor` nodes +- `film_actor` junction table → `ACTED_IN` relationships +- `customer` → `Customer` nodes with `PLACED` relationships to `Rental` nodes + +## Customization + +### Adding Custom Type Mappings + +Edit `cypher_generator.py` to add custom MySQL to Cypher type mappings: + +```python +self.type_mapping['your_mysql_type'] = 'CYPHER_TYPE' +``` + +### Custom Relationship Names + +Modify the `_generate_relationship_type` method in `cypher_generator.py` to customize relationship naming logic. + +### Advanced Query Generation + +Override methods in `CypherGenerator` class to customize how queries are generated for your specific use case. + +## Troubleshooting + +### Common Issues + +1. **Connection Errors**: + + - Verify MySQL and Memgraph are running + - Check connection credentials in `.env` + - Ensure ports are accessible + +2. **Import Errors**: + + - Make sure all dependencies are installed: `uv sync` + - Check Python path configurations + +3. **Migration Failures**: + + - Check logs for specific error messages + - Verify data integrity in source database + - Ensure target database has sufficient permissions + +4. **Memory Issues**: + - For large databases, consider implementing batch processing + - Monitor memory usage during migration + +### Logging + +The agent provides detailed logging. To increase verbosity: + +```python +import logging +logging.basicConfig(level=logging.DEBUG) +``` + +## Contributing + +To extend the agent for other database types or add features: + +1. Fork the repository +2. Create feature branches +3. Add tests for new functionality +4. Submit pull requests + +## License + +This project is part of the AI Toolkit and follows the same licensing terms. diff --git a/integrations/agents/cypher_generator.py b/integrations/agents/cypher_generator.py new file mode 100644 index 0000000..66ed5b1 --- /dev/null +++ b/integrations/agents/cypher_generator.py @@ -0,0 +1,254 @@ +""" +Cypher query generator for converting MySQL schema to Memgraph. +""" + +from typing import Dict, List, Any +import logging + +logger = logging.getLogger(__name__) + + +class CypherGenerator: + """Generates Cypher queries for Memgraph based on MySQL schema.""" + + def __init__(self): + """Initialize the Cypher generator.""" + self.type_mapping = { + "int": "INTEGER", + "bigint": "INTEGER", + "smallint": "INTEGER", + "tinyint": "INTEGER", + "varchar": "STRING", + "char": "STRING", + "text": "STRING", + "longtext": "STRING", + "mediumtext": "STRING", + "decimal": "FLOAT", + "float": "FLOAT", + "double": "FLOAT", + "datetime": "DATETIME", + "timestamp": "DATETIME", + "date": "DATE", + "time": "TIME", + "enum": "STRING", + "set": "STRING", + "blob": "STRING", + "json": "STRING", + } + + def mysql_to_cypher_type(self, mysql_type: str) -> str: + """Convert MySQL data type to Cypher/Memgraph type.""" + # Extract base type (remove size specifications) + base_type = mysql_type.split("(")[0].lower() + return self.type_mapping.get(base_type, "STRING") + + def generate_node_creation_query( + self, table_name: str, schema: List[Dict[str, Any]] + ) -> str: + """Generate Cypher query to create nodes for a table.""" + # Determine primary key + primary_keys = [col["field"] for col in schema if col["key"] == "PRI"] + + if not primary_keys: + # If no primary key, use first field as identifier + id_field = schema[0]["field"] if schema else "id" + else: + id_field = primary_keys[0] + + # Create property definitions + properties = [] + for col in schema: + if col["field"] != id_field: # Skip the ID field in properties + cypher_type = self.mysql_to_cypher_type(col["type"]) + properties.append(f"{col['field']}: {cypher_type}") + + # Generate the query + label = self._table_name_to_label(table_name) + query = f""" + // Create {label} nodes + UNWIND $data AS row + CREATE (n:{label} {{ + {id_field}: row.{id_field}""" + + if properties: + query += ",\n " + ",\n ".join( + f"{prop.split(':')[0]}: row.{prop.split(':')[0]}" for prop in properties + ) + + query += "\n })" + + return query.strip() + + def generate_relationship_query( + self, from_table: str, from_column: str, to_table: str, to_column: str + ) -> str: + """Generate Cypher query to create relationships.""" + from_label = self._table_name_to_label(from_table) + to_label = self._table_name_to_label(to_table) + rel_type = self._generate_relationship_type(from_table, to_table) + + query = f""" + // Create {rel_type} relationships from {from_label} to {to_label} + MATCH (from:{from_label}) + MATCH (to:{to_label}) + WHERE from.{from_column} = to.{to_column} + CREATE (from)-[:{rel_type}]->(to) + """ + + return query.strip() + + def generate_index_queries( + self, table_name: str, schema: List[Dict[str, Any]] + ) -> List[str]: + """Generate index creation queries.""" + queries = [] + label = self._table_name_to_label(table_name) + + for col in schema: + if col["key"] in ["PRI", "UNI", "MUL"]: + index_type = "UNIQUE" if col["key"] in ["PRI", "UNI"] else "" + query = f"CREATE {index_type} INDEX ON :{label}({col['field']})" + queries.append(query.strip()) + + return queries + + def generate_constraint_queries( + self, table_name: str, schema: List[Dict[str, Any]] + ) -> List[str]: + """Generate constraint creation queries.""" + queries = [] + label = self._table_name_to_label(table_name) + + # Primary key constraints + primary_keys = [col["field"] for col in schema if col["key"] == "PRI"] + for pk in primary_keys: + query = f"CREATE CONSTRAINT ON (n:{label}) ASSERT n.{pk} IS UNIQUE" + queries.append(query) + + # Unique constraints + unique_keys = [col["field"] for col in schema if col["key"] == "UNI"] + for uk in unique_keys: + query = f"CREATE CONSTRAINT ON (n:{label}) ASSERT n.{uk} IS UNIQUE" + queries.append(query) + + return queries + + def generate_full_migration_script(self, structure: Dict[str, Any]) -> List[str]: + """Generate complete migration script.""" + queries = [] + + # Add header comment + queries.append("// MySQL to Memgraph Migration Script") + queries.append("// Generated automatically") + queries.append("") + + # 1. Create constraints first + queries.append("// Step 1: Create constraints") + for table_name, table_info in structure["tables"].items(): + constraint_queries = self.generate_constraint_queries( + table_name, table_info["schema"] + ) + queries.extend(constraint_queries) + queries.append("") + + # 2. Create indexes + queries.append("// Step 2: Create indexes") + for table_name, table_info in structure["tables"].items(): + index_queries = self.generate_index_queries( + table_name, table_info["schema"] + ) + queries.extend(index_queries) + queries.append("") + + # 3. Create nodes + queries.append("// Step 3: Create nodes") + for table_name, table_info in structure["tables"].items(): + node_query = self.generate_node_creation_query( + table_name, table_info["schema"] + ) + queries.append(node_query) + queries.append("") + + # 4. Create relationships + queries.append("// Step 4: Create relationships") + for rel in structure["relationships"]: + rel_query = self.generate_relationship_query( + rel["from_table"], rel["from_column"], rel["to_table"], rel["to_column"] + ) + queries.append(rel_query) + queries.append("") + + return queries + + def _table_name_to_label(self, table_name: str) -> str: + """Convert table name to Cypher label.""" + # Convert to PascalCase + return "".join(word.capitalize() for word in table_name.split("_")) + + def _generate_relationship_type(self, from_table: str, to_table: str) -> str: + """Generate relationship type name.""" + # Create a meaningful relationship name + from_label = self._table_name_to_label(from_table) + to_label = self._table_name_to_label(to_table) + + # Common relationship patterns + if "customer" in from_table.lower() and "order" in to_table.lower(): + return "PLACED" + elif "order" in from_table.lower() and "item" in to_table.lower(): + return "CONTAINS" + elif "film" in from_table.lower() and "actor" in to_table.lower(): + return "FEATURES" + elif "actor" in from_table.lower() and "film" in to_table.lower(): + return "ACTED_IN" + elif "store" in from_table.lower(): + return "BELONGS_TO" + elif "address" in to_table.lower(): + return "LOCATED_AT" + elif "category" in to_table.lower(): + return "BELONGS_TO_CATEGORY" + elif "language" in to_table.lower(): + return "IN_LANGUAGE" + else: + return f"RELATED_TO_{to_label.upper()}" + + def prepare_data_for_cypher( + self, data: List[Dict[str, Any]], schema: List[Dict[str, Any]] + ) -> List[Dict[str, Any]]: + """Prepare data for Cypher ingestion by handling null values + and type conversions.""" + prepared_data = [] + + for row in data: + prepared_row = {} + for col in schema: + field_name = col["field"] + value = row.get(field_name) + + # Handle null values + if value is None: + if col["null"] == "NO": + # Set default value for non-nullable fields + cypher_type = self.mysql_to_cypher_type(col["type"]) + if cypher_type == "INTEGER": + value = 0 + elif cypher_type == "FLOAT": + value = 0.0 + elif cypher_type == "STRING": + value = "" + else: + value = None + else: + value = None + + # Convert types if needed + if value is not None: + cypher_type = self.mysql_to_cypher_type(col["type"]) + if cypher_type in ["DATETIME", "DATE", "TIME"]: + # Convert datetime objects to strings + value = str(value) if value else None + + prepared_row[field_name] = value + + prepared_data.append(prepared_row) + + return prepared_data diff --git a/integrations/agents/database_analyzer.py b/integrations/agents/database_analyzer.py new file mode 100644 index 0000000..8af6d76 --- /dev/null +++ b/integrations/agents/database_analyzer.py @@ -0,0 +1,159 @@ +""" +Database analyzer module for extracting schema and data from MySQL databases. +""" + +import mysql.connector +from typing import Dict, List, Any, Optional +import logging + +logger = logging.getLogger(__name__) + + +class MySQLAnalyzer: + """Analyzes MySQL database structure and extracts data.""" + + def __init__( + self, host: str, user: str, password: str, database: str, port: int = 3306 + ): + """Initialize MySQL connection.""" + self.connection_config = { + "host": host, + "user": user, + "password": password, + "database": database, + "port": port, + } + self.connection = None + + def connect(self) -> bool: + """Establish connection to MySQL database.""" + try: + self.connection = mysql.connector.connect(**self.connection_config) + logger.info("Successfully connected to MySQL database") + return True + except mysql.connector.Error as e: + logger.error(f"Error connecting to MySQL: {e}") + return False + + def disconnect(self): + """Close MySQL connection.""" + if self.connection and self.connection.is_connected(): + self.connection.close() + logger.info("MySQL connection closed") + + def get_tables(self) -> List[str]: + """Get list of all tables in the database.""" + if not self.connection: + raise ConnectionError("Not connected to database") + + cursor = self.connection.cursor() + cursor.execute("SHOW TABLES") + tables = [table[0] for table in cursor.fetchall()] + cursor.close() + return tables + + def get_table_schema(self, table_name: str) -> List[Dict[str, Any]]: + """Get schema information for a specific table.""" + if not self.connection: + raise ConnectionError("Not connected to database") + + cursor = self.connection.cursor() + cursor.execute(f"DESCRIBE {table_name}") + columns = [] + for row in cursor.fetchall(): + columns.append( + { + "field": row[0], + "type": row[1], + "null": row[2], + "key": row[3], + "default": row[4], + "extra": row[5], + } + ) + cursor.close() + return columns + + def get_foreign_keys(self, table_name: str) -> List[Dict[str, str]]: + """Get foreign key relationships for a table.""" + if not self.connection: + raise ConnectionError("Not connected to database") + + cursor = self.connection.cursor() + query = """ + SELECT + COLUMN_NAME, + REFERENCED_TABLE_NAME, + REFERENCED_COLUMN_NAME + FROM INFORMATION_SCHEMA.KEY_COLUMN_USAGE + WHERE TABLE_SCHEMA = %s + AND TABLE_NAME = %s + AND REFERENCED_TABLE_NAME IS NOT NULL + """ + cursor.execute(query, (self.connection_config["database"], table_name)) + + foreign_keys = [] + for row in cursor.fetchall(): + foreign_keys.append( + { + "column": row[0], + "referenced_table": row[1], + "referenced_column": row[2], + } + ) + cursor.close() + return foreign_keys + + def get_table_data( + self, table_name: str, limit: Optional[int] = None + ) -> List[Dict[str, Any]]: + """Get data from a specific table.""" + if not self.connection: + raise ConnectionError("Not connected to database") + + cursor = self.connection.cursor(dictionary=True) + query = f"SELECT * FROM {table_name}" + if limit: + query += f" LIMIT {limit}" + + cursor.execute(query) + data = cursor.fetchall() + cursor.close() + return data + + def get_database_structure(self) -> Dict[str, Any]: + """Get complete database structure including tables, schemas, + and relationships.""" + structure = {"tables": {}, "relationships": []} + + tables = self.get_tables() + + for table in tables: + structure["tables"][table] = { + "schema": self.get_table_schema(table), + "foreign_keys": self.get_foreign_keys(table), + } + + # Add relationships + for fk in structure["tables"][table]["foreign_keys"]: + structure["relationships"].append( + { + "from_table": table, + "from_column": fk["column"], + "to_table": fk["referenced_table"], + "to_column": fk["referenced_column"], + } + ) + + return structure + + def get_table_row_count(self, table_name: str) -> int: + """Get the number of rows in a table.""" + if not self.connection: + raise ConnectionError("Not connected to database") + + cursor = self.connection.cursor() + cursor.execute(f"SELECT COUNT(*) FROM {table_name}") + count = cursor.fetchone()[0] + cursor.close() + return count diff --git a/integrations/agents/main.py b/integrations/agents/main.py new file mode 100644 index 0000000..7296317 --- /dev/null +++ b/integrations/agents/main.py @@ -0,0 +1,448 @@ +""" +MySQL to Memgraph Migration Agent + +This agent analyzes MySQL databases, generates appropriate Cypher queries, +and migrates data to Memgraph using LangGraph workflow. +""" + +import os +import sys +import logging +from typing import Dict, List, Any, TypedDict +from pathlib import Path + +# Add parent directories to path for imports +sys.path.append(str(Path(__file__).parent.parent.parent / "memgraph-toolbox" / "src")) +sys.path.append(str(Path(__file__).parent.parent / "langchain-memgraph")) + +from langgraph.graph import StateGraph, END +from langchain_openai import ChatOpenAI +from langchain_core.messages import HumanMessage, SystemMessage +from dotenv import load_dotenv + +from database_analyzer import MySQLAnalyzer +from cypher_generator import CypherGenerator +from memgraph_toolbox.api.memgraph import Memgraph + +# Load environment variables +load_dotenv() + +# Setup logging +logging.basicConfig(level=logging.INFO) +logger = logging.getLogger(__name__) + + +class MigrationState(TypedDict): + """State for the migration workflow.""" + + mysql_config: Dict[str, str] + memgraph_config: Dict[str, str] + database_structure: Dict[str, Any] + migration_queries: List[str] + migration_plan: str + current_step: str + errors: List[str] + completed_tables: List[str] + total_tables: int + + +class MySQLToMemgraphAgent: + """Agent for migrating MySQL databases to Memgraph.""" + + def __init__(self): + """Initialize the migration agent.""" + self.llm = ChatOpenAI( + model="gpt-4o-mini", temperature=0.1, api_key=os.getenv("OPENAI_API_KEY") + ) + self.mysql_analyzer = None + self.cypher_generator = CypherGenerator() + self.memgraph_client = None + + # Build the workflow graph + self.workflow = self._build_workflow() + + def _build_workflow(self) -> StateGraph: + """Build the LangGraph workflow.""" + workflow = StateGraph(MigrationState) + + # Add nodes + workflow.add_node("analyze_mysql", self._analyze_mysql_schema) + workflow.add_node("generate_migration_plan", self._generate_migration_plan) + workflow.add_node("generate_cypher_queries", self._generate_cypher_queries) + workflow.add_node("validate_queries", self._validate_queries) + workflow.add_node("execute_migration", self._execute_migration) + workflow.add_node("verify_migration", self._verify_migration) + + # Add edges + workflow.add_edge("analyze_mysql", "generate_migration_plan") + workflow.add_edge("generate_migration_plan", "generate_cypher_queries") + workflow.add_edge("generate_cypher_queries", "validate_queries") + workflow.add_edge("validate_queries", "execute_migration") + workflow.add_edge("execute_migration", "verify_migration") + workflow.add_edge("verify_migration", END) + + # Set entry point + workflow.set_entry_point("analyze_mysql") + + return workflow.compile() + + def _analyze_mysql_schema(self, state: MigrationState) -> MigrationState: + """Analyze MySQL database schema and structure.""" + logger.info("Analyzing MySQL database schema...") + + try: + # Initialize MySQL analyzer + self.mysql_analyzer = MySQLAnalyzer(**state["mysql_config"]) + + if not self.mysql_analyzer.connect(): + raise Exception("Failed to connect to MySQL database") + + # Get database structure + structure = self.mysql_analyzer.get_database_structure() + + # Add table counts for progress tracking + structure["table_counts"] = {} + for table_name in structure["tables"].keys(): + count = self.mysql_analyzer.get_table_row_count(table_name) + structure["table_counts"][table_name] = count + + state["database_structure"] = structure + state["total_tables"] = len(structure["tables"]) + state["current_step"] = "Schema analysis completed" + + logger.info( + f"Found {len(structure['tables'])} tables and " + f"{len(structure['relationships'])} relationships" + ) + + except Exception as e: + logger.error(f"Error analyzing MySQL schema: {e}") + state["errors"].append(f"Schema analysis failed: {e}") + + return state + + def _generate_migration_plan(self, state: MigrationState) -> MigrationState: + """Generate a migration plan using LLM.""" + logger.info("Generating migration plan...") + + try: + structure = state["database_structure"] + + # Prepare context for LLM + context = { + "tables": list(structure["tables"].keys()), + "relationships": structure["relationships"], + "table_counts": structure.get("table_counts", {}), + } + + system_message = SystemMessage( + content=""" +You are an expert database migration specialist. You need to create a +detailed migration plan for moving data from MySQL to Memgraph (a graph database). + +Your task is to: +1. Analyze the database structure +2. Identify the optimal order for creating nodes and relationships +3. Consider dependencies between tables +4. Suggest any optimizations for graph modeling +5. Identify potential issues or challenges + +Provide a detailed, step-by-step migration plan. + """ + ) + + human_message = HumanMessage( + content=f""" +Create a migration plan for the following MySQL database structure: + +Tables: {context['tables']} +Relationships: {context['relationships']} +Table row counts: {context['table_counts']} + +Please provide a detailed migration plan including: +1. Order of operations +2. Node creation strategy +3. Relationship creation strategy +4. Any potential issues to watch for +5. Estimated timeline considerations + """ + ) + + response = self.llm.invoke([system_message, human_message]) + state["migration_plan"] = response.content + state["current_step"] = "Migration plan generated" + + except Exception as e: + logger.error(f"Error generating migration plan: {e}") + state["errors"].append(f"Migration plan generation failed: {e}") + + return state + + def _generate_cypher_queries(self, state: MigrationState) -> MigrationState: + """Generate Cypher queries for the migration.""" + logger.info("Generating Cypher queries...") + + try: + structure = state["database_structure"] + queries = self.cypher_generator.generate_full_migration_script(structure) + + state["migration_queries"] = queries + state["current_step"] = "Cypher queries generated" + + logger.info(f"Generated {len(queries)} migration queries") + + except Exception as e: + logger.error(f"Error generating Cypher queries: {e}") + state["errors"].append(f"Query generation failed: {e}") + + return state + + def _validate_queries(self, state: MigrationState) -> MigrationState: + """Validate generated Cypher queries.""" + logger.info("Validating Cypher queries...") + + try: + # Initialize Memgraph connection for validation + self.memgraph_client = Memgraph(**state["memgraph_config"]) + + # Test connection + test_query = "MATCH (n) RETURN count(n) as node_count LIMIT 1" + self.memgraph_client.query(test_query) + + state["current_step"] = "Queries validated successfully" + logger.info("Memgraph connection established and queries validated") + + except Exception as e: + logger.error(f"Error validating queries: {e}") + state["errors"].append(f"Query validation failed: {e}") + + return state + + def _execute_migration(self, state: MigrationState) -> MigrationState: + """Execute the migration queries.""" + logger.info("Executing migration...") + + try: + structure = state["database_structure"] + queries = state["migration_queries"] + + # Execute constraint and index creation queries first + constraint_queries = [ + q for q in queries if "CONSTRAINT" in q or "INDEX" in q + ] + for query in constraint_queries: + if query.strip() and not query.startswith("//"): + try: + self.memgraph_client.query(query) + logger.info(f"Executed: {query[:50]}...") + except Exception as e: + logger.warning(f"Constraint/Index creation failed: {e}") + + # Migrate data for each table + for table_name, table_info in structure["tables"].items(): + logger.info(f"Migrating table: {table_name}") + + # Get data from MySQL + data = self.mysql_analyzer.get_table_data(table_name) + + if data: + # Prepare data for Cypher + prepared_data = self.cypher_generator.prepare_data_for_cypher( + data, table_info["schema"] + ) + + # Find the node creation query for this table + node_query = None + for query in queries: + if ( + f"Create {self.cypher_generator._table_name_to_label(table_name)} nodes" + in query + ): + node_query = query + break + + if node_query: + # Execute the query with data + try: + # Clean the query (remove comments) + clean_query = "\n".join( + [ + line + for line in node_query.split("\n") + if not line.strip().startswith("//") + ] + ).strip() + + self.memgraph_client.query( + clean_query, {"data": prepared_data} + ) + state["completed_tables"].append(table_name) + logger.info( + f"Successfully migrated {len(data)} rows from {table_name}" + ) + except Exception as e: + logger.error(f"Failed to migrate table {table_name}: {e}") + state["errors"].append( + f"Table migration failed for {table_name}: {e}" + ) + else: + logger.info(f"No data found in table {table_name}") + + # Create relationships + logger.info("Creating relationships...") + relationship_queries = [ + q for q in queries if "CREATE (" in q and ")-[:" in q + ] + for query in relationship_queries: + if query.strip() and not query.startswith("//"): + try: + clean_query = "\n".join( + [ + line + for line in query.split("\n") + if not line.strip().startswith("//") + ] + ).strip() + + self.memgraph_client.query(clean_query) + logger.info(f"Created relationships: {query[:50]}...") + except Exception as e: + logger.warning(f"Relationship creation failed: {e}") + + state["current_step"] = "Migration execution completed" + + except Exception as e: + logger.error(f"Error executing migration: {e}") + state["errors"].append(f"Migration execution failed: {e}") + + return state + + def _verify_migration(self, state: MigrationState) -> MigrationState: + """Verify the migration results.""" + logger.info("Verifying migration results...") + + try: + # Count nodes and relationships in Memgraph + node_count_query = "MATCH (n) RETURN count(n) as node_count" + relationship_count_query = "MATCH ()-[r]->() RETURN count(r) as rel_count" + + node_result = self.memgraph_client.query(node_count_query) + rel_result = self.memgraph_client.query(relationship_count_query) + + node_count = node_result[0]["node_count"] if node_result else 0 + rel_count = rel_result[0]["rel_count"] if rel_result else 0 + + # Calculate expected counts from MySQL + structure = state["database_structure"] + expected_nodes = sum(structure.get("table_counts", {}).values()) + + logger.info(f"Migration verification:") + logger.info(f" - Nodes created: {node_count} (expected: {expected_nodes})") + logger.info(f" - Relationships created: {rel_count}") + logger.info( + f" - Tables migrated: {len(state['completed_tables'])}/{state['total_tables']}" + ) + + state["current_step"] = "Migration verification completed" + + except Exception as e: + logger.error(f"Error verifying migration: {e}") + state["errors"].append(f"Migration verification failed: {e}") + + return state + + def migrate( + self, mysql_config: Dict[str, str], memgraph_config: Dict[str, str] = None + ) -> Dict[str, Any]: + """Execute the complete migration workflow.""" + logger.info("Starting MySQL to Memgraph migration...") + + # Default Memgraph configuration + if not memgraph_config: + memgraph_config = { + "url": os.getenv("MEMGRAPH_URL", "bolt://localhost:7687"), + "username": os.getenv("MEMGRAPH_USER", ""), + "password": os.getenv("MEMGRAPH_PASSWORD", ""), + "database": os.getenv("MEMGRAPH_DATABASE", "memgraph"), + } + + # Initialize state + initial_state = MigrationState( + mysql_config=mysql_config, + memgraph_config=memgraph_config, + database_structure={}, + migration_queries=[], + migration_plan="", + current_step="Starting migration", + errors=[], + completed_tables=[], + total_tables=0, + ) + + try: + # Execute workflow + final_state = self.workflow.invoke(initial_state) + + # Cleanup connections + if self.mysql_analyzer: + self.mysql_analyzer.disconnect() + if self.memgraph_client: + self.memgraph_client.close() + + return { + "success": len(final_state["errors"]) == 0, + "migration_plan": final_state["migration_plan"], + "completed_tables": final_state["completed_tables"], + "total_tables": final_state["total_tables"], + "errors": final_state["errors"], + "final_step": final_state["current_step"], + } + + except Exception as e: + logger.error(f"Migration workflow failed: {e}") + return { + "success": False, + "errors": [f"Workflow execution failed: {e}"], + "migration_plan": "", + "completed_tables": [], + "total_tables": 0, + "final_step": "Failed", + } + + +def main(): + """Main function to run the migration agent.""" + + # Example configuration for Sakila database + mysql_config = { + "host": os.getenv("MYSQL_HOST", "localhost"), + "user": os.getenv("MYSQL_USER", "root"), + "password": os.getenv("MYSQL_PASSWORD", "password"), + "database": os.getenv("MYSQL_DATABASE", "sakila"), + "port": int(os.getenv("MYSQL_PORT", "3306")), + } + + print("MySQL to Memgraph Migration Agent") + print("=" * 40) + + # Create and run the agent + agent = MySQLToMemgraphAgent() + result = agent.migrate(mysql_config) + + print(f"\nMigration Result:") + print(f"Success: {result['success']}") + print( + f"Completed Tables: {len(result['completed_tables'])}/{result['total_tables']}" + ) + + if result["errors"]: + print(f"Errors: {len(result['errors'])}") + for error in result["errors"]: + print(f" - {error}") + + print(f"\nMigration Plan:") + print(result["migration_plan"]) + + +if __name__ == "__main__": + main() diff --git a/integrations/agents/mysql_troubleshoot.py b/integrations/agents/mysql_troubleshoot.py new file mode 100644 index 0000000..6281bb7 --- /dev/null +++ b/integrations/agents/mysql_troubleshoot.py @@ -0,0 +1,294 @@ +# TODO(antejavor): This should become a test script for MySQL connection issues + +#!/usr/bin/env python3 +""" +MySQL Connection Troubleshooting Script +This script helps diagnose MySQL connection issues for the migration agent. +""" + +import os +import sys +from pathlib import Path +from dotenv import load_dotenv + +# Load environment variables +load_dotenv() + + +def test_mysql_import(): + """Test if MySQL connector can be imported.""" + print("Testing MySQL connector import...") + try: + import mysql.connector + + print("✓ mysql.connector imported successfully") + return True + except ImportError as e: + print(f"✗ Failed to import mysql.connector: {e}") + print("Solution: Install mysql-connector-python") + print("Run: pip install mysql-connector-python") + return False + + +def check_environment_variables(): + """Check if required environment variables are set.""" + print("\nChecking environment variables...") + + env_vars = { + "MYSQL_HOST": os.getenv("MYSQL_HOST", "localhost"), + "MYSQL_USER": os.getenv("MYSQL_USER", "root"), + "MYSQL_PASSWORD": os.getenv("MYSQL_PASSWORD"), + "MYSQL_DATABASE": os.getenv("MYSQL_DATABASE", "sakila"), + "MYSQL_PORT": os.getenv("MYSQL_PORT", "3306"), + } + + missing_vars = [] + for var, value in env_vars.items(): + if value is None or value == "": + missing_vars.append(var) + print(f"✗ {var}: Not set") + else: + # Mask password for security + display_value = "*" * len(value) if "PASSWORD" in var else value + print(f"✓ {var}: {display_value}") + + if missing_vars: + print(f"\nMissing variables: {', '.join(missing_vars)}") + print("Please set these in your .env file") + return False + return True + + +def test_basic_connection(): + """Test basic MySQL connection.""" + print("\nTesting MySQL connection...") + + try: + import mysql.connector + from mysql.connector import Error + + config = { + "host": os.getenv("MYSQL_HOST", "localhost"), + "user": os.getenv("MYSQL_USER", "root"), + "password": os.getenv("MYSQL_PASSWORD"), + "port": int(os.getenv("MYSQL_PORT", "3306")), + } + + print( + f"Attempting to connect to {config['host']}:{config['port']} as {config['user']}" + ) + + connection = mysql.connector.connect(**config) + + if connection.is_connected(): + db_info = connection.get_server_info() + print(f"✓ Successfully connected to MySQL Server version {db_info}") + + cursor = connection.cursor() + cursor.execute("SELECT DATABASE();") + current_db = cursor.fetchone() + print(f"✓ Current database: {current_db[0] if current_db[0] else 'None'}") + + cursor.close() + connection.close() + return True + + except Error as e: + print(f"✗ MySQL connection failed: {e}") + + # Provide specific troubleshooting based on error + error_str = str(e).lower() + if "access denied" in error_str: + print("\nTroubleshooting: Access Denied") + print("- Check username and password in .env file") + print("- Verify user has permission to connect to MySQL") + print("- Try connecting with mysql client: mysql -u root -p") + elif "can't connect" in error_str or "connection refused" in error_str: + print("\nTroubleshooting: Connection Refused") + print("- Check if MySQL server is running") + print("- Verify the host and port are correct") + print("- Check if firewall is blocking the connection") + print("- Try: brew services start mysql (on macOS)") + print("- Or: sudo systemctl start mysql (on Linux)") + elif "unknown database" in error_str: + print("\nTroubleshooting: Database Not Found") + print("- Check if the database name is correct in .env") + print("- Create the database if it doesn't exist") + + return False + + except Exception as e: + print(f"✗ Unexpected error: {e}") + return False + + +def test_sakila_database(): + """Test connection to Sakila database specifically.""" + print("\nTesting Sakila database connection...") + + try: + import mysql.connector + + config = { + "host": os.getenv("MYSQL_HOST", "localhost"), + "user": os.getenv("MYSQL_USER", "root"), + "password": os.getenv("MYSQL_PASSWORD"), + "database": os.getenv("MYSQL_DATABASE", "sakila"), + "port": int(os.getenv("MYSQL_PORT", "3306")), + } + + connection = mysql.connector.connect(**config) + cursor = connection.cursor() + + # Check if Sakila tables exist + cursor.execute("SHOW TABLES") + tables = [table[0] for table in cursor.fetchall()] + + expected_tables = ["actor", "film", "customer", "rental", "inventory"] + found_tables = [t for t in expected_tables if t in tables] + + print(f"✓ Connected to {config['database']} database") + print(f"✓ Found {len(tables)} tables total") + print(f"✓ Sakila tables found: {found_tables}") + + if len(found_tables) < 3: + print( + "⚠️ Warning: Few Sakila tables found. Database might not be properly imported." + ) + print("Consider importing Sakila schema and data:") + print("1. Download from: https://dev.mysql.com/doc/index-other.html") + print("2. Import: mysql -u root -p < sakila-schema.sql") + print("3. Import: mysql -u root -p < sakila-data.sql") + + # Test a simple query + cursor.execute("SELECT COUNT(*) FROM actor") + actor_count = cursor.fetchone()[0] + print(f"✓ Actor table has {actor_count} records") + + cursor.close() + connection.close() + return True + + except Exception as e: + print(f"✗ Sakila database test failed: {e}") + return False + + +def test_database_analyzer(): + """Test the custom database analyzer.""" + print("\nTesting database analyzer...") + + try: + # Add current directory to path + sys.path.insert(0, str(Path(__file__).parent)) + + from database_analyzer import MySQLAnalyzer + + config = { + "host": os.getenv("MYSQL_HOST", "localhost"), + "user": os.getenv("MYSQL_USER", "root"), + "password": os.getenv("MYSQL_PASSWORD"), + "database": os.getenv("MYSQL_DATABASE", "sakila"), + "port": int(os.getenv("MYSQL_PORT", "3306")), + } + + analyzer = MySQLAnalyzer(**config) + + if analyzer.connect(): + print("✓ Database analyzer connected successfully") + + tables = analyzer.get_tables() + print( + f"✓ Found {len(tables)} tables: {tables[:5]}{'...' if len(tables) > 5 else ''}" + ) + + if tables: + schema = analyzer.get_table_schema(tables[0]) + print(f"✓ Successfully retrieved schema for '{tables[0]}' table") + + analyzer.disconnect() + return True + else: + print("✗ Database analyzer failed to connect") + return False + + except ImportError as e: + print(f"✗ Failed to import database_analyzer: {e}") + return False + except Exception as e: + print(f"✗ Database analyzer test failed: {e}") + return False + + +def provide_setup_instructions(): + """Provide setup instructions for MySQL.""" + print("\n" + "=" * 60) + print("MYSQL SETUP INSTRUCTIONS") + print("=" * 60) + + print("\n1. Install MySQL (if not installed):") + print(" macOS: brew install mysql") + print(" Ubuntu: sudo apt install mysql-server") + print(" Windows: Download from https://dev.mysql.com/downloads/") + + print("\n2. Start MySQL service:") + print(" macOS: brew services start mysql") + print(" Linux: sudo systemctl start mysql") + print(" Windows: Start MySQL service from Services panel") + + print("\n3. Set up MySQL user (if needed):") + print(" mysql -u root -p") + print(" CREATE USER 'your_user'@'localhost' IDENTIFIED BY 'your_password';") + print(" GRANT ALL PRIVILEGES ON *.* TO 'your_user'@'localhost';") + print(" FLUSH PRIVILEGES;") + + print("\n4. Download and import Sakila database:") + print(" Download: https://dev.mysql.com/doc/index-other.html") + print(" Import schema: mysql -u root -p < sakila-schema.sql") + print(" Import data: mysql -u root -p < sakila-data.sql") + + print("\n5. Update .env file with your credentials:") + print(" MYSQL_HOST=localhost") + print(" MYSQL_USER=root") + print(" MYSQL_PASSWORD=your_password") + print(" MYSQL_DATABASE=sakila") + print(" MYSQL_PORT=3306") + + +def main(): + """Run all MySQL connection tests.""" + print("MySQL Connection Troubleshooting") + print("=" * 40) + + tests = [ + ("MySQL Import Test", test_mysql_import), + ("Environment Variables Check", check_environment_variables), + ("Basic Connection Test", test_basic_connection), + ("Sakila Database Test", test_sakila_database), + ("Database Analyzer Test", test_database_analyzer), + ] + + passed = 0 + total = len(tests) + + for test_name, test_func in tests: + print(f"\n{test_name}") + print("-" * len(test_name)) + if test_func(): + passed += 1 + else: + # If a test fails, stop and provide help + break + + print(f"\n" + "=" * 40) + print(f"Tests passed: {passed}/{total}") + + if passed == total: + print("🎉 All MySQL tests passed! Your setup is ready.") + else: + print("⚠️ Some tests failed. See troubleshooting advice above.") + provide_setup_instructions() + + +if __name__ == "__main__": + main() diff --git a/integrations/agents/pyproject.toml b/integrations/agents/pyproject.toml new file mode 100644 index 0000000..a784990 --- /dev/null +++ b/integrations/agents/pyproject.toml @@ -0,0 +1,31 @@ +[project] +name = "agents" +version = "0.1.0" +description = "Database migration agent from MySQL to Memgraph using LangGraph" +readme = "README.md" +requires-python = ">=3.10" +dependencies = [ + "langgraph>=0.2.0", + "langchain>=0.3.0", + "langchain-openai>=0.2.0", + "langchain-core>=0.3.0", + "mysql-connector-python>=9.0.0", + "neo4j>=5.0.0", + "openai>=1.0.0", + "python-dotenv>=1.0.0", + "pydantic>=2.0.0", + "sqlalchemy>=2.0.0", + "pymysql>=1.1.0", +] + +[build-system] +requires = ["hatchling"] +build-backend = "hatchling.build" + +[tool.hatch.build.targets.wheel] +packages = ["."] + +[tool.uv] +dev-dependencies = [] + + diff --git a/integrations/mcp-memgraph/pyproject.toml b/integrations/mcp-memgraph/pyproject.toml index 9fdc4cb..621ac57 100644 --- a/integrations/mcp-memgraph/pyproject.toml +++ b/integrations/mcp-memgraph/pyproject.toml @@ -3,7 +3,7 @@ name = "mcp-memgraph" version = "0.1.3" description = "MCP integration and utilities for Memgraph MCP server" readme = "README.md" -requires-python = ">=3.13" +requires-python = ">=3.10" authors = [ { name = "antejavor", email = "ante.javor@memgraph.io" } diff --git a/pyproject.toml b/pyproject.toml index 47a6320..ae2acfa 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -18,4 +18,5 @@ members = [ "memgraph-toolbox/", "integrations/langchain-memgraph/", "integrations/mcp-memgraph/", + "integrations/agents", ] diff --git a/uv.lock b/uv.lock index 5b64dea..b5945b3 100644 --- a/uv.lock +++ b/uv.lock @@ -2,18 +2,56 @@ version = 1 revision = 1 requires-python = ">=3.10" resolution-markers = [ - "python_full_version >= '3.12.4'", + "python_full_version >= '3.13'", + "python_full_version >= '3.12.4' and python_full_version < '3.13'", "python_full_version < '3.12.4'", ] [manifest] members = [ + "agents", "langchain-memgraph", "mcp-memgraph", "memgraph-ai", "memgraph-toolbox", ] +[[package]] +name = "agents" +version = "0.1.0" +source = { editable = "integrations/agents" } +dependencies = [ + { name = "langchain" }, + { name = "langchain-core" }, + { name = "langchain-openai" }, + { name = "langgraph" }, + { name = "mysql-connector-python" }, + { name = "neo4j" }, + { name = "openai" }, + { name = "pydantic" }, + { name = "pymysql" }, + { name = "python-dotenv" }, + { name = "sqlalchemy" }, +] + +[package.metadata] +requires-dist = [ + { name = "langchain", specifier = ">=0.3.0" }, + { name = "langchain-core", specifier = ">=0.3.0" }, + { name = "langchain-openai", specifier = ">=0.2.0" }, + { name = "langgraph", specifier = ">=0.2.0" }, + { name = "mysql-connector-python", specifier = ">=9.0.0" }, + { name = "neo4j", specifier = ">=5.0.0" }, + { name = "openai", specifier = ">=1.0.0" }, + { name = "pydantic", specifier = ">=2.0.0" }, + { name = "pymysql", specifier = ">=1.1.0" }, + { name = "python-dotenv", specifier = ">=1.0.0" }, + { name = "sqlalchemy", specifier = ">=2.0.0" }, +] + +[package.metadata.requires-dev] +dev = [] + [[package]] name = "aiohappyeyeballs" version = "2.6.1" @@ -620,9 +658,36 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/71/92/5e77f98553e9e75130c78900d000368476aed74276eb8ae8796f65f00918/jsonpointer-3.0.0-py2.py3-none-any.whl", hash = "sha256:13e088adc14fca8b6aa8177c044e12701e6ad4b28ff10e65f2267a90109c9942", size = 7595 }, ] +[[package]] +name = "jsonschema" +version = "4.24.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "attrs" }, + { name = "jsonschema-specifications" }, + { name = "referencing" }, + { name = "rpds-py" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/bf/d3/1cf5326b923a53515d8f3a2cd442e6d7e94fcc444716e879ea70a0ce3177/jsonschema-4.24.0.tar.gz", hash = "sha256:0b4e8069eb12aedfa881333004bccaec24ecef5a8a6a4b6df142b2cc9599d196", size = 353480 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a2/3d/023389198f69c722d039351050738d6755376c8fd343e91dc493ea485905/jsonschema-4.24.0-py3-none-any.whl", hash = "sha256:a462455f19f5faf404a7902952b6f0e3ce868f3ee09a359b05eca6673bd8412d", size = 88709 }, +] + +[[package]] +name = "jsonschema-specifications" +version = "2025.4.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "referencing" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/bf/ce/46fbd9c8119cfc3581ee5643ea49464d168028cfb5caff5fc0596d0cf914/jsonschema_specifications-2025.4.1.tar.gz", hash = "sha256:630159c9f4dbea161a6a2205c3011cc4f18ff381b189fff48bb39b9bf26ae608", size = 15513 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/01/0e/b27cdbaccf30b890c40ed1da9fd4a3593a5cf94dae54fb34f8a4b74fcd3f/jsonschema_specifications-2025.4.1-py3-none-any.whl", hash = "sha256:4653bffbd6584f7de83a67e0d620ef16900b390ddc7939d56684d6c81e33f1af", size = 18437 }, +] + [[package]] name = "langchain" -version = "0.3.23" +version = "0.3.26" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "async-timeout", marker = "python_full_version < '3.11'" }, @@ -634,14 +699,14 @@ dependencies = [ { name = "requests" }, { name = "sqlalchemy" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/47/ea/b0de568ca17614d5c00275c4ca506af4139cc7c51d0418802b2447055c00/langchain-0.3.23.tar.gz", hash = "sha256:d95004afe8abebb52d51d6026270248da3f4b53d93e9bf699f76005e0c83ad34", size = 10225576 } +sdist = { url = "https://files.pythonhosted.org/packages/7f/13/a9931800ee42bbe0f8850dd540de14e80dda4945e7ee36e20b5d5964286e/langchain-0.3.26.tar.gz", hash = "sha256:8ff034ee0556d3e45eff1f1e96d0d745ced57858414dba7171c8ebdbeb5580c9", size = 10226808 } wheels = [ - { url = "https://files.pythonhosted.org/packages/d4/49/6e933837da1931c9db745967282ff8bfff51bc3faec0eade846b12203b75/langchain-0.3.23-py3-none-any.whl", hash = "sha256:084f05ee7e80b7c3f378ebadd7309f2a37868ce2906fa0ae64365a67843ade3d", size = 1011778 }, + { url = "https://files.pythonhosted.org/packages/f1/f2/c09a2e383283e3af1db669ab037ac05a45814f4b9c472c48dc24c0cef039/langchain-0.3.26-py3-none-any.whl", hash = "sha256:361bb2e61371024a8c473da9f9c55f4ee50f269c5ab43afdb2b1309cb7ac36cf", size = 1012336 }, ] [[package]] name = "langchain-community" -version = "0.3.21" +version = "0.3.27" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "aiohttp" }, @@ -657,14 +722,14 @@ dependencies = [ { name = "sqlalchemy" }, { name = "tenacity" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/d8/be/5288a737069570741d46390028b4e8518354329345294ca89fcb2d44a9c1/langchain_community-0.3.21.tar.gz", hash = "sha256:b87b9992cbeea7553ed93e3d39faf9893a8690318485f7dc861751c7878729f7", size = 33226597 } +sdist = { url = "https://files.pythonhosted.org/packages/5c/76/200494f6de488217a196c4369e665d26b94c8c3642d46e2fd62f9daf0a3a/langchain_community-0.3.27.tar.gz", hash = "sha256:e1037c3b9da0c6d10bf06e838b034eb741e016515c79ef8f3f16e53ead33d882", size = 33237737 } wheels = [ - { url = "https://files.pythonhosted.org/packages/bb/72/4046a132a180b569265bc8aa7ecd6f958f6c11085bdf68c7e1bbe52f1907/langchain_community-0.3.21-py3-none-any.whl", hash = "sha256:8cb9bbb7ef15e5eea776193528dd0e0e1299047146d0c78b6c696ae2dc62e81f", size = 2526687 }, + { url = "https://files.pythonhosted.org/packages/c8/bc/f8c7dae8321d37ed39ac9d7896617c4203248240a4835b136e3724b3bb62/langchain_community-0.3.27-py3-none-any.whl", hash = "sha256:581f97b795f9633da738ea95da9cb78f8879b538090c9b7a68c0aed49c828f0d", size = 2530442 }, ] [[package]] name = "langchain-core" -version = "0.3.51" +version = "0.3.68" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "jsonpatch" }, @@ -675,9 +740,9 @@ dependencies = [ { name = "tenacity" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/6e/24/74dfce829f63aaf09885ae569121335a62ecfa5043a35d9e819cd0e046f0/langchain_core-0.3.51.tar.gz", hash = "sha256:db76b9cc331411602cb40ba0469a161febe7a0663fbcaddbc9056046ac2d22f4", size = 542003 } +sdist = { url = "https://files.pythonhosted.org/packages/23/20/f5b18a17bfbe3416177e702ab2fd230b7d168abb17be31fb48f43f0bb772/langchain_core-0.3.68.tar.gz", hash = "sha256:312e1932ac9aa2eaf111b70fdc171776fa571d1a86c1f873dcac88a094b19c6f", size = 563041 } wheels = [ - { url = "https://files.pythonhosted.org/packages/a9/bf/3464d759bf8687a3bbdfeb9af2f2aeb0a265c6d5ef5fd9274c2a70449f77/langchain_core-0.3.51-py3-none-any.whl", hash = "sha256:4bd71e8acd45362aa428953f2a91d8162318014544a2216e4b769463caf68e13", size = 423303 }, + { url = "https://files.pythonhosted.org/packages/f9/da/c89be0a272993bfcb762b2a356b9f55de507784c2755ad63caec25d183bf/langchain_core-0.3.68-py3-none-any.whl", hash = "sha256:5e5c1fbef419590537c91b8c2d86af896fbcbaf0d5ed7fdcdd77f7d8f3467ba0", size = 441405 }, ] [[package]] @@ -695,9 +760,10 @@ wheels = [ [[package]] name = "langchain-memgraph" -version = "0.1.2" +version = "0.1.5" source = { editable = "integrations/langchain-memgraph" } dependencies = [ + { name = "langchain" }, { name = "langchain-core" }, { name = "memgraph-toolbox" }, { name = "neo4j" }, @@ -719,13 +785,14 @@ test = [ [package.metadata] requires-dist = [ + { name = "langchain", specifier = ">=0.3.25" }, { name = "langchain-core", specifier = ">=0.3.15" }, - { name = "langchain-core", marker = "extra == 'test'", specifier = ">=0.3.51" }, + { name = "langchain-core", marker = "extra == 'test'", specifier = ">=0.3.67" }, { name = "langchain-experimental", marker = "extra == 'test'", specifier = ">=0.3.4" }, { name = "langchain-openai", marker = "extra == 'test'", specifier = ">=0.3.12" }, { name = "langchain-tests", marker = "extra == 'test'", specifier = ">=0.3.17" }, { name = "langgraph", marker = "extra == 'test'", specifier = ">=0.3.11" }, - { name = "memgraph-toolbox" }, + { name = "memgraph-toolbox", specifier = ">=0.1.2" }, { name = "neo4j", specifier = ">=5.28.1" }, { name = "pytest", marker = "extra == 'test'", specifier = ">=8.3.5" }, { name = "pytest-asyncio", marker = "extra == 'test'", specifier = ">=0.26.0" }, @@ -836,7 +903,7 @@ wheels = [ [[package]] name = "langsmith" -version = "0.3.26" +version = "0.4.5" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "httpx" }, @@ -847,9 +914,9 @@ dependencies = [ { name = "requests-toolbelt" }, { name = "zstandard" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/be/2e/9748883c40e8a3c5d628d5a81a531e1a5ae3a2bfced429ffdf86a437b269/langsmith-0.3.26.tar.gz", hash = "sha256:3bd5b952a5fc82d69b0e2c030e502ee081a8ccf20468e96fd3d53e1572aef6fc", size = 342721 } +sdist = { url = "https://files.pythonhosted.org/packages/5c/92/7885823f3d13222f57773921f0da19b37d628c64607491233dc853a0f6ea/langsmith-0.4.5.tar.gz", hash = "sha256:49444bd8ccd4e46402f1b9ff1d686fa8e3a31b175e7085e72175ab8ec6164a34", size = 352235 } wheels = [ - { url = "https://files.pythonhosted.org/packages/13/14/f2e972ac0cf9b4ff4d405f7843b0f14a1ef686544a54f91ac4d5ac723140/langsmith-0.3.26-py3-none-any.whl", hash = "sha256:3ae49e49d6f3c980a524d15ac2fd895896e709ecedc83ac150c38e1ead776e1b", size = 357325 }, + { url = "https://files.pythonhosted.org/packages/c8/10/ad3107b666c3203b7938d10ea6b8746b9735c399cf737a51386d58e41d34/langsmith-0.4.5-py3-none-any.whl", hash = "sha256:4167717a2cccc4dff5809dbddc439628e836f6fd13d4fdb31ea013bc8d5cfaf5", size = 367795 }, ] [[package]] @@ -878,21 +945,24 @@ wheels = [ [[package]] name = "mcp" -version = "1.6.0" +version = "1.11.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "anyio" }, { name = "httpx" }, { name = "httpx-sse" }, + { name = "jsonschema" }, { name = "pydantic" }, { name = "pydantic-settings" }, + { name = "python-multipart" }, + { name = "pywin32", marker = "sys_platform == 'win32'" }, { name = "sse-starlette" }, { name = "starlette" }, - { name = "uvicorn" }, + { name = "uvicorn", marker = "sys_platform != 'emscripten'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/95/d2/f587cb965a56e992634bebc8611c5b579af912b74e04eb9164bd49527d21/mcp-1.6.0.tar.gz", hash = "sha256:d9324876de2c5637369f43161cd71eebfd803df5a95e46225cab8d280e366723", size = 200031 } +sdist = { url = "https://files.pythonhosted.org/packages/3a/f5/9506eb5578d5bbe9819ee8ba3198d0ad0e2fbe3bab8b257e4131ceb7dfb6/mcp-1.11.0.tar.gz", hash = "sha256:49a213df56bb9472ff83b3132a4825f5c8f5b120a90246f08b0dac6bedac44c8", size = 406907 } wheels = [ - { url = "https://files.pythonhosted.org/packages/10/30/20a7f33b0b884a9d14dd3aa94ff1ac9da1479fe2ad66dd9e2736075d2506/mcp-1.6.0-py3-none-any.whl", hash = "sha256:7bd24c6ea042dbec44c754f100984d186620d8b841ec30f1b19eda9b93a634d0", size = 76077 }, + { url = "https://files.pythonhosted.org/packages/92/9c/c9ca79f9c512e4113a5d07043013110bb3369fc7770040c61378c7fbcf70/mcp-1.11.0-py3-none-any.whl", hash = "sha256:58deac37f7483e4b338524b98bc949b7c2b7c33d978f5fafab5bde041c5e2595", size = 155880 }, ] [package.optional-dependencies] @@ -903,7 +973,7 @@ cli = [ [[package]] name = "mcp-memgraph" -version = "0.1.0" +version = "0.1.3" source = { editable = "integrations/mcp-memgraph" } dependencies = [ { name = "httpx" }, @@ -925,9 +995,9 @@ test = [ requires-dist = [ { name = "anthropic", marker = "extra == 'test'" }, { name = "httpx", specifier = ">=0.28.1" }, - { name = "mcp", extras = ["cli"], specifier = ">=1.3.0" }, + { name = "mcp", extras = ["cli"], specifier = ">=1.9.3" }, { name = "mcp", extras = ["cli"], marker = "extra == 'test'", specifier = ">=1.3.0" }, - { name = "memgraph-toolbox" }, + { name = "memgraph-toolbox", specifier = ">=0.1.2" }, { name = "neo4j", specifier = ">=5.28.1" }, { name = "pytest", marker = "extra == 'test'", specifier = ">=8.3.5" }, { name = "pytest-asyncio", marker = "extra == 'test'", specifier = ">=0.20.3" }, @@ -946,7 +1016,7 @@ wheels = [ [[package]] name = "memgraph-ai" -version = "0.1.0" +version = "0.1.2" source = { virtual = "." } dependencies = [ { name = "mcp", extra = ["cli"] }, @@ -963,7 +1033,7 @@ requires-dist = [ [[package]] name = "memgraph-toolbox" -version = "0.1.0" +version = "0.1.3" source = { editable = "memgraph-toolbox" } dependencies = [ { name = "neo4j" }, @@ -1079,6 +1149,35 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/2a/e2/5d3f6ada4297caebe1a2add3b126fe800c96f56dbe5d1988a2cbe0b267aa/mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d", size = 4695 }, ] +[[package]] +name = "mysql-connector-python" +version = "9.3.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/82/5e/55b265cb95938e271208e5692d7e615c53f2aeea894ab72a9f14ab198e9a/mysql-connector-python-9.3.0.tar.gz", hash = "sha256:8b16d51447e3603f18478fb5a19b333bfb73fb58f872eb055a105635f53d2345", size = 942579 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/59/f8/b36f551601a4b942e2014f80a0bfa5f2f0da30ef2710182cc96d875a5852/mysql_connector_python-9.3.0-cp310-cp310-macosx_14_0_arm64.whl", hash = "sha256:f979e712187796ad57cd0bef76666dd48ed4887104775833c9489ea837144ad8", size = 15148231 }, + { url = "https://files.pythonhosted.org/packages/41/ae/abd18c61277ec9e00c36de6a4f53f84003ae9fc34ca6077241a19e2c440f/mysql_connector_python-9.3.0-cp310-cp310-macosx_14_0_x86_64.whl", hash = "sha256:ee1a901c287471013570e29cdf5ca7159898af31cf3a582180eadd41c96b42c9", size = 15964353 }, + { url = "https://files.pythonhosted.org/packages/0a/98/ce72b24c53327dbe0a2520f8a0828a18726bcb8e4f2012b274a4507bbed3/mysql_connector_python-9.3.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:5508ff6b79d8d46b15791401784a1b5abd10c8e05aec2684c4a50e92c5893cd2", size = 33449033 }, + { url = "https://files.pythonhosted.org/packages/a2/5f/10a89734281ac9d74c7e3bc44f42dbf2105709435ea1bebfbc71e214af18/mysql_connector_python-9.3.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:d47a0d5b2b9b02f06647d5d7bbb19e237f234d6be91d0e0c935629faacf0797f", size = 33847325 }, + { url = "https://files.pythonhosted.org/packages/58/53/a04fc2186f90fdd2a52d02856f15f2c3c894215799bdaeb313899e75a27b/mysql_connector_python-9.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:e24be22a5d96f3535afa5dd331166b02bf72655ea6ed6a2a0eb548c313548788", size = 16359157 }, + { url = "https://files.pythonhosted.org/packages/65/59/fa9bef2d9a7eafdc5629b82916e4e1e29446c9bbb0b33706988bbf541b18/mysql_connector_python-9.3.0-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:e8b0131006608e533b8eab20078f9e65486068c984ed3efd28413d350d241f44", size = 15148256 }, + { url = "https://files.pythonhosted.org/packages/14/ae/4ac81d7dc2ce8dff22fd63fa16d4562b113ef0458b04bd958675da3adc74/mysql_connector_python-9.3.0-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:cb72fcda90b616f0b2d3dae257441e06e8896b2780c3dddc6a65275ec1408d9a", size = 15964339 }, + { url = "https://files.pythonhosted.org/packages/88/f4/088022373f0b71aae6f3190278423fce1fe0c31ecbddf33eb5c0cbf87c4d/mysql_connector_python-9.3.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:9cc8d3c2f45d16b064b0063db857f8a7187b8659253dd32e3f19df1bf1d55ea0", size = 33456359 }, + { url = "https://files.pythonhosted.org/packages/b9/38/96a602ad402fb71175d83bed3178bd8c16e04251d279e314e0bc53e0b861/mysql_connector_python-9.3.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:9c898c5f3e34314ed825f2ffdd52d674e03d59c45d02ac8083a8ec5173c1e0f8", size = 33852738 }, + { url = "https://files.pythonhosted.org/packages/ec/55/63567fa4082aa22bad5cecaf16fe3604f026aea40b06d0bf2a9fd75212ff/mysql_connector_python-9.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:f10fe89397e8da81026d8143e17fc5c12ae5e66e51753a0f49e1db179c4f7113", size = 16358431 }, + { url = "https://files.pythonhosted.org/packages/bf/73/b42061ea4c0500edad4f92834ed7d75b1a740d11970e531c5be4dc1af5cd/mysql_connector_python-9.3.0-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:2589af070babdff9c920ee37f929218d80afa704f4e2a99f1ddcb13d19de4450", size = 15151288 }, + { url = "https://files.pythonhosted.org/packages/27/87/9cd7e803c762c5098683c83837d2258c2f83cf82d33fabd1d0eaadae06ee/mysql_connector_python-9.3.0-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:1916256ecd039f4673715550d28138416bac5962335e06d36f7434c47feb5232", size = 15967397 }, + { url = "https://files.pythonhosted.org/packages/5a/5d/cd63f31bf5d0536ee1e4216fb2f3f57175ca1e0dd37e1e8139083d2156e8/mysql_connector_python-9.3.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:d33e2f88e1d4b15844cfed2bb6e90612525ba2c1af2fb10b4a25b2c89a1fe49a", size = 33457025 }, + { url = "https://files.pythonhosted.org/packages/76/65/9609a96edc0d015d1017176974c42b955cf87ba92cd31765f99cba835715/mysql_connector_python-9.3.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:0aedee809e1f8dbab6b2732f51ee1619b54a56d15b9070655bc31fb822c1a015", size = 33853427 }, + { url = "https://files.pythonhosted.org/packages/c2/da/f81eeb5b63dea3ebe035fbbbdc036ae517155ad73f2e9640ee7c9eace09d/mysql_connector_python-9.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:3853799f4b719357ea25eba05f5f278a158a85a5c8209b3d058947a948bc9262", size = 16358560 }, + { url = "https://files.pythonhosted.org/packages/6a/16/5762061505a0d0d3a333613b6f5d7b8eb3222a689aa32f71ed15f1532ad1/mysql_connector_python-9.3.0-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:9516a4cdbaee3c9200f0e7d9aafb31057692f45c202cdcb43a3f9b37c94e7c84", size = 15151425 }, + { url = "https://files.pythonhosted.org/packages/db/40/22de86e966e648ea0e3e438ad523c86d0cf4866b3841e248726fb4afded8/mysql_connector_python-9.3.0-cp313-cp313-macosx_14_0_x86_64.whl", hash = "sha256:495798dd34445d749991fb3a2aa87b4205100676939556d8d4aab5d5558e7a1f", size = 15967663 }, + { url = "https://files.pythonhosted.org/packages/4c/19/36983937347b6a58af546950c88a9403cdce944893850e80ffb7f602a099/mysql_connector_python-9.3.0-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:be0ef15f6023ae2037347498f005a4471f694f8a6b8384c3194895e153120286", size = 33457288 }, + { url = "https://files.pythonhosted.org/packages/18/12/7ccbc678a130df0f751596b37eddb98b2e40930d0ebc9ee41965ffbf0b92/mysql_connector_python-9.3.0-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:4364d3a37c449f1c0bb9e52fd4eddc620126b9897b6b9f2fd1b3f33dacc16356", size = 33853838 }, + { url = "https://files.pythonhosted.org/packages/c2/5e/c361caa024ce14ffc1f5b153d90f0febf5e9483a60c4b5c84e1e012363cc/mysql_connector_python-9.3.0-cp313-cp313-win_amd64.whl", hash = "sha256:2a5de57814217077a8672063167b616b1034a37b614b93abcb602cc0b8c6fade", size = 16358561 }, + { url = "https://files.pythonhosted.org/packages/23/1d/8c2c6672094b538f4881f7714e5332fdcddd05a7e196cbc9eb4a9b5e9a45/mysql_connector_python-9.3.0-py2.py3-none-any.whl", hash = "sha256:8ab7719d614cf5463521082fab86afc21ada504b538166090e00eeaa1ff729bc", size = 399302 }, +] + [[package]] name = "neo4j" version = "5.28.1" @@ -1514,6 +1613,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/8a/0b/9fcc47d19c48b59121088dd6da2488a49d5f72dacf8262e2790a1d2c7d15/pygments-2.19.1-py3-none-any.whl", hash = "sha256:9ea1544ad55cecf4b8242fab6dd35a93bbce657034b0611ee383099054ab6d8c", size = 1225293 }, ] +[[package]] +name = "pymysql" +version = "1.1.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/b3/8f/ce59b5e5ed4ce8512f879ff1fa5ab699d211ae2495f1adaa5fbba2a1eada/pymysql-1.1.1.tar.gz", hash = "sha256:e127611aaf2b417403c60bf4dc570124aeb4a57f5f37b8e95ae399a42f904cd0", size = 47678 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/0c/94/e4181a1f6286f545507528c78016e00065ea913276888db2262507693ce5/PyMySQL-1.1.1-py3-none-any.whl", hash = "sha256:4de15da4c61dc132f4fb9ab763063e693d521a80fd0e87943b9a453dd4c19d6c", size = 44972 }, +] + [[package]] name = "pytest" version = "8.3.5" @@ -1577,6 +1685,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/1e/18/98a99ad95133c6a6e2005fe89faedf294a748bd5dc803008059409ac9b1e/python_dotenv-1.1.0-py3-none-any.whl", hash = "sha256:d7c01d9e2293916c18baf562d95698754b0dbbb5e74d457c45d4f6561fb9d55d", size = 20256 }, ] +[[package]] +name = "python-multipart" +version = "0.0.20" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f3/87/f44d7c9f274c7ee665a29b885ec97089ec5dc034c7f3fafa03da9e39a09e/python_multipart-0.0.20.tar.gz", hash = "sha256:8dd0cab45b8e23064ae09147625994d090fa46f5b0d1e13af944c331a7fa9d13", size = 37158 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/45/58/38b5afbc1a800eeea951b9285d3912613f2603bdf897a4ab0f4bd7f405fc/python_multipart-0.0.20-py3-none-any.whl", hash = "sha256:8a62d3a8335e06589fe01f2a3e178cdcc632f3fbe0d492ad9ee0ec35aab1f104", size = 24546 }, +] + [[package]] name = "pytz" version = "2025.2" @@ -1586,6 +1703,28 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/81/c4/34e93fe5f5429d7570ec1fa436f1986fb1f00c3e0f43a589fe2bbcd22c3f/pytz-2025.2-py2.py3-none-any.whl", hash = "sha256:5ddf76296dd8c44c26eb8f4b6f35488f3ccbf6fbbd7adee0b7262d43f0ec2f00", size = 509225 }, ] +[[package]] +name = "pywin32" +version = "311" +source = { registry = "https://pypi.org/simple" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7b/40/44efbb0dfbd33aca6a6483191dae0716070ed99e2ecb0c53683f400a0b4f/pywin32-311-cp310-cp310-win32.whl", hash = "sha256:d03ff496d2a0cd4a5893504789d4a15399133fe82517455e78bad62efbb7f0a3", size = 8760432 }, + { url = "https://files.pythonhosted.org/packages/5e/bf/360243b1e953bd254a82f12653974be395ba880e7ec23e3731d9f73921cc/pywin32-311-cp310-cp310-win_amd64.whl", hash = "sha256:797c2772017851984b97180b0bebe4b620bb86328e8a884bb626156295a63b3b", size = 9590103 }, + { url = "https://files.pythonhosted.org/packages/57/38/d290720e6f138086fb3d5ffe0b6caa019a791dd57866940c82e4eeaf2012/pywin32-311-cp310-cp310-win_arm64.whl", hash = "sha256:0502d1facf1fed4839a9a51ccbcc63d952cf318f78ffc00a7e78528ac27d7a2b", size = 8778557 }, + { url = "https://files.pythonhosted.org/packages/7c/af/449a6a91e5d6db51420875c54f6aff7c97a86a3b13a0b4f1a5c13b988de3/pywin32-311-cp311-cp311-win32.whl", hash = "sha256:184eb5e436dea364dcd3d2316d577d625c0351bf237c4e9a5fabbcfa5a58b151", size = 8697031 }, + { url = "https://files.pythonhosted.org/packages/51/8f/9bb81dd5bb77d22243d33c8397f09377056d5c687aa6d4042bea7fbf8364/pywin32-311-cp311-cp311-win_amd64.whl", hash = "sha256:3ce80b34b22b17ccbd937a6e78e7225d80c52f5ab9940fe0506a1a16f3dab503", size = 9508308 }, + { url = "https://files.pythonhosted.org/packages/44/7b/9c2ab54f74a138c491aba1b1cd0795ba61f144c711daea84a88b63dc0f6c/pywin32-311-cp311-cp311-win_arm64.whl", hash = "sha256:a733f1388e1a842abb67ffa8e7aad0e70ac519e09b0f6a784e65a136ec7cefd2", size = 8703930 }, + { url = "https://files.pythonhosted.org/packages/e7/ab/01ea1943d4eba0f850c3c61e78e8dd59757ff815ff3ccd0a84de5f541f42/pywin32-311-cp312-cp312-win32.whl", hash = "sha256:750ec6e621af2b948540032557b10a2d43b0cee2ae9758c54154d711cc852d31", size = 8706543 }, + { url = "https://files.pythonhosted.org/packages/d1/a8/a0e8d07d4d051ec7502cd58b291ec98dcc0c3fff027caad0470b72cfcc2f/pywin32-311-cp312-cp312-win_amd64.whl", hash = "sha256:b8c095edad5c211ff31c05223658e71bf7116daa0ecf3ad85f3201ea3190d067", size = 9495040 }, + { url = "https://files.pythonhosted.org/packages/ba/3a/2ae996277b4b50f17d61f0603efd8253cb2d79cc7ae159468007b586396d/pywin32-311-cp312-cp312-win_arm64.whl", hash = "sha256:e286f46a9a39c4a18b319c28f59b61de793654af2f395c102b4f819e584b5852", size = 8710102 }, + { url = "https://files.pythonhosted.org/packages/a5/be/3fd5de0979fcb3994bfee0d65ed8ca9506a8a1260651b86174f6a86f52b3/pywin32-311-cp313-cp313-win32.whl", hash = "sha256:f95ba5a847cba10dd8c4d8fefa9f2a6cf283b8b88ed6178fa8a6c1ab16054d0d", size = 8705700 }, + { url = "https://files.pythonhosted.org/packages/e3/28/e0a1909523c6890208295a29e05c2adb2126364e289826c0a8bc7297bd5c/pywin32-311-cp313-cp313-win_amd64.whl", hash = "sha256:718a38f7e5b058e76aee1c56ddd06908116d35147e133427e59a3983f703a20d", size = 9494700 }, + { url = "https://files.pythonhosted.org/packages/04/bf/90339ac0f55726dce7d794e6d79a18a91265bdf3aa70b6b9ca52f35e022a/pywin32-311-cp313-cp313-win_arm64.whl", hash = "sha256:7b4075d959648406202d92a2310cb990fea19b535c7f4a78d3f5e10b926eeb8a", size = 8709318 }, + { url = "https://files.pythonhosted.org/packages/c9/31/097f2e132c4f16d99a22bfb777e0fd88bd8e1c634304e102f313af69ace5/pywin32-311-cp314-cp314-win32.whl", hash = "sha256:b7a2c10b93f8986666d0c803ee19b5990885872a7de910fc460f9b0c2fbf92ee", size = 8840714 }, + { url = "https://files.pythonhosted.org/packages/90/4b/07c77d8ba0e01349358082713400435347df8426208171ce297da32c313d/pywin32-311-cp314-cp314-win_amd64.whl", hash = "sha256:3aca44c046bd2ed8c90de9cb8427f581c479e594e99b5c0bb19b29c10fd6cb87", size = 9656800 }, + { url = "https://files.pythonhosted.org/packages/c0/d2/21af5c535501a7233e734b8af901574572da66fcc254cb35d0609c9080dd/pywin32-311-cp314-cp314-win_arm64.whl", hash = "sha256:a508e2d9025764a8270f93111a970e1d0fbfc33f4153b388bb649b7eec4f9b42", size = 8932540 }, +] + [[package]] name = "pyyaml" version = "6.0.2" @@ -1630,6 +1769,20 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/fa/de/02b54f42487e3d3c6efb3f89428677074ca7bf43aae402517bc7cca949f3/PyYAML-6.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:8388ee1976c416731879ac16da0aff3f63b286ffdd57cdeb95f3f2e085687563", size = 156446 }, ] +[[package]] +name = "referencing" +version = "0.36.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "attrs" }, + { name = "rpds-py" }, + { name = "typing-extensions", marker = "python_full_version < '3.13'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/2f/db/98b5c277be99dd18bfd91dd04e1b759cad18d1a338188c936e92f921c7e2/referencing-0.36.2.tar.gz", hash = "sha256:df2e89862cd09deabbdba16944cc3f10feb6b3e6f18e902f7cc25609a34775aa", size = 74744 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c1/b1/3baf80dc6d2b7bc27a95a67752d0208e410351e3feb4eb78de5f77454d8d/referencing-0.36.2-py3-none-any.whl", hash = "sha256:e8699adbbf8b5c7de96d8ffa0eb5c158b3beafce084968e2ea8bb08c6794dcd0", size = 26775 }, +] + [[package]] name = "regex" version = "2024.11.6" @@ -1740,6 +1893,132 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/0d/9b/63f4c7ebc259242c89b3acafdb37b41d1185c07ff0011164674e9076b491/rich-14.0.0-py3-none-any.whl", hash = "sha256:1c9491e1951aac09caffd42f448ee3d04e58923ffe14993f6e83068dc395d7e0", size = 243229 }, ] +[[package]] +name = "rpds-py" +version = "0.26.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a5/aa/4456d84bbb54adc6a916fb10c9b374f78ac840337644e4a5eda229c81275/rpds_py-0.26.0.tar.gz", hash = "sha256:20dae58a859b0906f0685642e591056f1e787f3a8b39c8e8749a45dc7d26bdb0", size = 27385 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b9/31/1459645f036c3dfeacef89e8e5825e430c77dde8489f3b99eaafcd4a60f5/rpds_py-0.26.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:4c70c70f9169692b36307a95f3d8c0a9fcd79f7b4a383aad5eaa0e9718b79b37", size = 372466 }, + { url = "https://files.pythonhosted.org/packages/dd/ff/3d0727f35836cc8773d3eeb9a46c40cc405854e36a8d2e951f3a8391c976/rpds_py-0.26.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:777c62479d12395bfb932944e61e915741e364c843afc3196b694db3d669fcd0", size = 357825 }, + { url = "https://files.pythonhosted.org/packages/bf/ce/badc5e06120a54099ae287fa96d82cbb650a5f85cf247ffe19c7b157fd1f/rpds_py-0.26.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ec671691e72dff75817386aa02d81e708b5a7ec0dec6669ec05213ff6b77e1bd", size = 381530 }, + { url = "https://files.pythonhosted.org/packages/1e/a5/fa5d96a66c95d06c62d7a30707b6a4cfec696ab8ae280ee7be14e961e118/rpds_py-0.26.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6a1cb5d6ce81379401bbb7f6dbe3d56de537fb8235979843f0d53bc2e9815a79", size = 396933 }, + { url = "https://files.pythonhosted.org/packages/00/a7/7049d66750f18605c591a9db47d4a059e112a0c9ff8de8daf8fa0f446bba/rpds_py-0.26.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4f789e32fa1fb6a7bf890e0124e7b42d1e60d28ebff57fe806719abb75f0e9a3", size = 513973 }, + { url = "https://files.pythonhosted.org/packages/0e/f1/528d02c7d6b29d29fac8fd784b354d3571cc2153f33f842599ef0cf20dd2/rpds_py-0.26.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9c55b0a669976cf258afd718de3d9ad1b7d1fe0a91cd1ab36f38b03d4d4aeaaf", size = 402293 }, + { url = "https://files.pythonhosted.org/packages/15/93/fde36cd6e4685df2cd08508f6c45a841e82f5bb98c8d5ecf05649522acb5/rpds_py-0.26.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c70d9ec912802ecfd6cd390dadb34a9578b04f9bcb8e863d0a7598ba5e9e7ccc", size = 383787 }, + { url = "https://files.pythonhosted.org/packages/69/f2/5007553aaba1dcae5d663143683c3dfd03d9395289f495f0aebc93e90f24/rpds_py-0.26.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:3021933c2cb7def39d927b9862292e0f4c75a13d7de70eb0ab06efed4c508c19", size = 416312 }, + { url = "https://files.pythonhosted.org/packages/8f/a7/ce52c75c1e624a79e48a69e611f1c08844564e44c85db2b6f711d76d10ce/rpds_py-0.26.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:8a7898b6ca3b7d6659e55cdac825a2e58c638cbf335cde41f4619e290dd0ad11", size = 558403 }, + { url = "https://files.pythonhosted.org/packages/79/d5/e119db99341cc75b538bf4cb80504129fa22ce216672fb2c28e4a101f4d9/rpds_py-0.26.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:12bff2ad9447188377f1b2794772f91fe68bb4bbfa5a39d7941fbebdbf8c500f", size = 588323 }, + { url = "https://files.pythonhosted.org/packages/93/94/d28272a0b02f5fe24c78c20e13bbcb95f03dc1451b68e7830ca040c60bd6/rpds_py-0.26.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:191aa858f7d4902e975d4cf2f2d9243816c91e9605070aeb09c0a800d187e323", size = 554541 }, + { url = "https://files.pythonhosted.org/packages/93/e0/8c41166602f1b791da892d976057eba30685486d2e2c061ce234679c922b/rpds_py-0.26.0-cp310-cp310-win32.whl", hash = "sha256:b37a04d9f52cb76b6b78f35109b513f6519efb481d8ca4c321f6a3b9580b3f45", size = 220442 }, + { url = "https://files.pythonhosted.org/packages/87/f0/509736bb752a7ab50fb0270c2a4134d671a7b3038030837e5536c3de0e0b/rpds_py-0.26.0-cp310-cp310-win_amd64.whl", hash = "sha256:38721d4c9edd3eb6670437d8d5e2070063f305bfa2d5aa4278c51cedcd508a84", size = 231314 }, + { url = "https://files.pythonhosted.org/packages/09/4c/4ee8f7e512030ff79fda1df3243c88d70fc874634e2dbe5df13ba4210078/rpds_py-0.26.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:9e8cb77286025bdb21be2941d64ac6ca016130bfdcd228739e8ab137eb4406ed", size = 372610 }, + { url = "https://files.pythonhosted.org/packages/fa/9d/3dc16be00f14fc1f03c71b1d67c8df98263ab2710a2fbd65a6193214a527/rpds_py-0.26.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5e09330b21d98adc8ccb2dbb9fc6cb434e8908d4c119aeaa772cb1caab5440a0", size = 358032 }, + { url = "https://files.pythonhosted.org/packages/e7/5a/7f1bf8f045da2866324a08ae80af63e64e7bfaf83bd31f865a7b91a58601/rpds_py-0.26.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2c9c1b92b774b2e68d11193dc39620d62fd8ab33f0a3c77ecdabe19c179cdbc1", size = 381525 }, + { url = "https://files.pythonhosted.org/packages/45/8a/04479398c755a066ace10e3d158866beb600867cacae194c50ffa783abd0/rpds_py-0.26.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:824e6d3503ab990d7090768e4dfd9e840837bae057f212ff9f4f05ec6d1975e7", size = 397089 }, + { url = "https://files.pythonhosted.org/packages/72/88/9203f47268db488a1b6d469d69c12201ede776bb728b9d9f29dbfd7df406/rpds_py-0.26.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8ad7fd2258228bf288f2331f0a6148ad0186b2e3643055ed0db30990e59817a6", size = 514255 }, + { url = "https://files.pythonhosted.org/packages/f5/b4/01ce5d1e853ddf81fbbd4311ab1eff0b3cf162d559288d10fd127e2588b5/rpds_py-0.26.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0dc23bbb3e06ec1ea72d515fb572c1fea59695aefbffb106501138762e1e915e", size = 402283 }, + { url = "https://files.pythonhosted.org/packages/34/a2/004c99936997bfc644d590a9defd9e9c93f8286568f9c16cdaf3e14429a7/rpds_py-0.26.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d80bf832ac7b1920ee29a426cdca335f96a2b5caa839811803e999b41ba9030d", size = 383881 }, + { url = "https://files.pythonhosted.org/packages/05/1b/ef5fba4a8f81ce04c427bfd96223f92f05e6cd72291ce9d7523db3b03a6c/rpds_py-0.26.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:0919f38f5542c0a87e7b4afcafab6fd2c15386632d249e9a087498571250abe3", size = 415822 }, + { url = "https://files.pythonhosted.org/packages/16/80/5c54195aec456b292f7bd8aa61741c8232964063fd8a75fdde9c1e982328/rpds_py-0.26.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d422b945683e409000c888e384546dbab9009bb92f7c0b456e217988cf316107", size = 558347 }, + { url = "https://files.pythonhosted.org/packages/f2/1c/1845c1b1fd6d827187c43afe1841d91678d7241cbdb5420a4c6de180a538/rpds_py-0.26.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:77a7711fa562ba2da1aa757e11024ad6d93bad6ad7ede5afb9af144623e5f76a", size = 587956 }, + { url = "https://files.pythonhosted.org/packages/2e/ff/9e979329dd131aa73a438c077252ddabd7df6d1a7ad7b9aacf6261f10faa/rpds_py-0.26.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:238e8c8610cb7c29460e37184f6799547f7e09e6a9bdbdab4e8edb90986a2318", size = 554363 }, + { url = "https://files.pythonhosted.org/packages/00/8b/d78cfe034b71ffbe72873a136e71acc7a831a03e37771cfe59f33f6de8a2/rpds_py-0.26.0-cp311-cp311-win32.whl", hash = "sha256:893b022bfbdf26d7bedb083efeea624e8550ca6eb98bf7fea30211ce95b9201a", size = 220123 }, + { url = "https://files.pythonhosted.org/packages/94/c1/3c8c94c7dd3905dbfde768381ce98778500a80db9924731d87ddcdb117e9/rpds_py-0.26.0-cp311-cp311-win_amd64.whl", hash = "sha256:87a5531de9f71aceb8af041d72fc4cab4943648d91875ed56d2e629bef6d4c03", size = 231732 }, + { url = "https://files.pythonhosted.org/packages/67/93/e936fbed1b734eabf36ccb5d93c6a2e9246fbb13c1da011624b7286fae3e/rpds_py-0.26.0-cp311-cp311-win_arm64.whl", hash = "sha256:de2713f48c1ad57f89ac25b3cb7daed2156d8e822cf0eca9b96a6f990718cc41", size = 221917 }, + { url = "https://files.pythonhosted.org/packages/ea/86/90eb87c6f87085868bd077c7a9938006eb1ce19ed4d06944a90d3560fce2/rpds_py-0.26.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:894514d47e012e794f1350f076c427d2347ebf82f9b958d554d12819849a369d", size = 363933 }, + { url = "https://files.pythonhosted.org/packages/63/78/4469f24d34636242c924626082b9586f064ada0b5dbb1e9d096ee7a8e0c6/rpds_py-0.26.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:fc921b96fa95a097add244da36a1d9e4f3039160d1d30f1b35837bf108c21136", size = 350447 }, + { url = "https://files.pythonhosted.org/packages/ad/91/c448ed45efdfdade82348d5e7995e15612754826ea640afc20915119734f/rpds_py-0.26.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e1157659470aa42a75448b6e943c895be8c70531c43cb78b9ba990778955582", size = 384711 }, + { url = "https://files.pythonhosted.org/packages/ec/43/e5c86fef4be7f49828bdd4ecc8931f0287b1152c0bb0163049b3218740e7/rpds_py-0.26.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:521ccf56f45bb3a791182dc6b88ae5f8fa079dd705ee42138c76deb1238e554e", size = 400865 }, + { url = "https://files.pythonhosted.org/packages/55/34/e00f726a4d44f22d5c5fe2e5ddd3ac3d7fd3f74a175607781fbdd06fe375/rpds_py-0.26.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9def736773fd56b305c0eef698be5192c77bfa30d55a0e5885f80126c4831a15", size = 517763 }, + { url = "https://files.pythonhosted.org/packages/52/1c/52dc20c31b147af724b16104500fba13e60123ea0334beba7b40e33354b4/rpds_py-0.26.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cdad4ea3b4513b475e027be79e5a0ceac8ee1c113a1a11e5edc3c30c29f964d8", size = 406651 }, + { url = "https://files.pythonhosted.org/packages/2e/77/87d7bfabfc4e821caa35481a2ff6ae0b73e6a391bb6b343db2c91c2b9844/rpds_py-0.26.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:82b165b07f416bdccf5c84546a484cc8f15137ca38325403864bfdf2b5b72f6a", size = 386079 }, + { url = "https://files.pythonhosted.org/packages/e3/d4/7f2200c2d3ee145b65b3cddc4310d51f7da6a26634f3ac87125fd789152a/rpds_py-0.26.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d04cab0a54b9dba4d278fe955a1390da3cf71f57feb78ddc7cb67cbe0bd30323", size = 421379 }, + { url = "https://files.pythonhosted.org/packages/ae/13/9fdd428b9c820869924ab62236b8688b122baa22d23efdd1c566938a39ba/rpds_py-0.26.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:79061ba1a11b6a12743a2b0f72a46aa2758613d454aa6ba4f5a265cc48850158", size = 562033 }, + { url = "https://files.pythonhosted.org/packages/f3/e1/b69686c3bcbe775abac3a4c1c30a164a2076d28df7926041f6c0eb5e8d28/rpds_py-0.26.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:f405c93675d8d4c5ac87364bb38d06c988e11028a64b52a47158a355079661f3", size = 591639 }, + { url = "https://files.pythonhosted.org/packages/5c/c9/1e3d8c8863c84a90197ac577bbc3d796a92502124c27092413426f670990/rpds_py-0.26.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:dafd4c44b74aa4bed4b250f1aed165b8ef5de743bcca3b88fc9619b6087093d2", size = 557105 }, + { url = "https://files.pythonhosted.org/packages/9f/c5/90c569649057622959f6dcc40f7b516539608a414dfd54b8d77e3b201ac0/rpds_py-0.26.0-cp312-cp312-win32.whl", hash = "sha256:3da5852aad63fa0c6f836f3359647870e21ea96cf433eb393ffa45263a170d44", size = 223272 }, + { url = "https://files.pythonhosted.org/packages/7d/16/19f5d9f2a556cfed454eebe4d354c38d51c20f3db69e7b4ce6cff904905d/rpds_py-0.26.0-cp312-cp312-win_amd64.whl", hash = "sha256:cf47cfdabc2194a669dcf7a8dbba62e37a04c5041d2125fae0233b720da6f05c", size = 234995 }, + { url = "https://files.pythonhosted.org/packages/83/f0/7935e40b529c0e752dfaa7880224771b51175fce08b41ab4a92eb2fbdc7f/rpds_py-0.26.0-cp312-cp312-win_arm64.whl", hash = "sha256:20ab1ae4fa534f73647aad289003f1104092890849e0266271351922ed5574f8", size = 223198 }, + { url = "https://files.pythonhosted.org/packages/6a/67/bb62d0109493b12b1c6ab00de7a5566aa84c0e44217c2d94bee1bd370da9/rpds_py-0.26.0-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:696764a5be111b036256c0b18cd29783fab22154690fc698062fc1b0084b511d", size = 363917 }, + { url = "https://files.pythonhosted.org/packages/4b/f3/34e6ae1925a5706c0f002a8d2d7f172373b855768149796af87bd65dcdb9/rpds_py-0.26.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:1e6c15d2080a63aaed876e228efe4f814bc7889c63b1e112ad46fdc8b368b9e1", size = 350073 }, + { url = "https://files.pythonhosted.org/packages/75/83/1953a9d4f4e4de7fd0533733e041c28135f3c21485faaef56a8aadbd96b5/rpds_py-0.26.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:390e3170babf42462739a93321e657444f0862c6d722a291accc46f9d21ed04e", size = 384214 }, + { url = "https://files.pythonhosted.org/packages/48/0e/983ed1b792b3322ea1d065e67f4b230f3b96025f5ce3878cc40af09b7533/rpds_py-0.26.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:7da84c2c74c0f5bc97d853d9e17bb83e2dcafcff0dc48286916001cc114379a1", size = 400113 }, + { url = "https://files.pythonhosted.org/packages/69/7f/36c0925fff6f660a80be259c5b4f5e53a16851f946eb080351d057698528/rpds_py-0.26.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4c5fe114a6dd480a510b6d3661d09d67d1622c4bf20660a474507aaee7eeeee9", size = 515189 }, + { url = "https://files.pythonhosted.org/packages/13/45/cbf07fc03ba7a9b54662c9badb58294ecfb24f828b9732970bd1a431ed5c/rpds_py-0.26.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3100b3090269f3a7ea727b06a6080d4eb7439dca4c0e91a07c5d133bb1727ea7", size = 406998 }, + { url = "https://files.pythonhosted.org/packages/6c/b0/8fa5e36e58657997873fd6a1cf621285ca822ca75b4b3434ead047daa307/rpds_py-0.26.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2c03c9b0c64afd0320ae57de4c982801271c0c211aa2d37f3003ff5feb75bb04", size = 385903 }, + { url = "https://files.pythonhosted.org/packages/4b/f7/b25437772f9f57d7a9fbd73ed86d0dcd76b4c7c6998348c070d90f23e315/rpds_py-0.26.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:5963b72ccd199ade6ee493723d18a3f21ba7d5b957017607f815788cef50eaf1", size = 419785 }, + { url = "https://files.pythonhosted.org/packages/a7/6b/63ffa55743dfcb4baf2e9e77a0b11f7f97ed96a54558fcb5717a4b2cd732/rpds_py-0.26.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:9da4e873860ad5bab3291438525cae80169daecbfafe5657f7f5fb4d6b3f96b9", size = 561329 }, + { url = "https://files.pythonhosted.org/packages/2f/07/1f4f5e2886c480a2346b1e6759c00278b8a69e697ae952d82ae2e6ee5db0/rpds_py-0.26.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:5afaddaa8e8c7f1f7b4c5c725c0070b6eed0228f705b90a1732a48e84350f4e9", size = 590875 }, + { url = "https://files.pythonhosted.org/packages/cc/bc/e6639f1b91c3a55f8c41b47d73e6307051b6e246254a827ede730624c0f8/rpds_py-0.26.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4916dc96489616a6f9667e7526af8fa693c0fdb4f3acb0e5d9f4400eb06a47ba", size = 556636 }, + { url = "https://files.pythonhosted.org/packages/05/4c/b3917c45566f9f9a209d38d9b54a1833f2bb1032a3e04c66f75726f28876/rpds_py-0.26.0-cp313-cp313-win32.whl", hash = "sha256:2a343f91b17097c546b93f7999976fd6c9d5900617aa848c81d794e062ab302b", size = 222663 }, + { url = "https://files.pythonhosted.org/packages/e0/0b/0851bdd6025775aaa2365bb8de0697ee2558184c800bfef8d7aef5ccde58/rpds_py-0.26.0-cp313-cp313-win_amd64.whl", hash = "sha256:0a0b60701f2300c81b2ac88a5fb893ccfa408e1c4a555a77f908a2596eb875a5", size = 234428 }, + { url = "https://files.pythonhosted.org/packages/ed/e8/a47c64ed53149c75fb581e14a237b7b7cd18217e969c30d474d335105622/rpds_py-0.26.0-cp313-cp313-win_arm64.whl", hash = "sha256:257d011919f133a4746958257f2c75238e3ff54255acd5e3e11f3ff41fd14256", size = 222571 }, + { url = "https://files.pythonhosted.org/packages/89/bf/3d970ba2e2bcd17d2912cb42874107390f72873e38e79267224110de5e61/rpds_py-0.26.0-cp313-cp313t-macosx_10_12_x86_64.whl", hash = "sha256:529c8156d7506fba5740e05da8795688f87119cce330c244519cf706a4a3d618", size = 360475 }, + { url = "https://files.pythonhosted.org/packages/82/9f/283e7e2979fc4ec2d8ecee506d5a3675fce5ed9b4b7cb387ea5d37c2f18d/rpds_py-0.26.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:f53ec51f9d24e9638a40cabb95078ade8c99251945dad8d57bf4aabe86ecee35", size = 346692 }, + { url = "https://files.pythonhosted.org/packages/e3/03/7e50423c04d78daf391da3cc4330bdb97042fc192a58b186f2d5deb7befd/rpds_py-0.26.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7ab504c4d654e4a29558eaa5bb8cea5fdc1703ea60a8099ffd9c758472cf913f", size = 379415 }, + { url = "https://files.pythonhosted.org/packages/57/00/d11ee60d4d3b16808432417951c63df803afb0e0fc672b5e8d07e9edaaae/rpds_py-0.26.0-cp313-cp313t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fd0641abca296bc1a00183fe44f7fced8807ed49d501f188faa642d0e4975b83", size = 391783 }, + { url = "https://files.pythonhosted.org/packages/08/b3/1069c394d9c0d6d23c5b522e1f6546b65793a22950f6e0210adcc6f97c3e/rpds_py-0.26.0-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:69b312fecc1d017b5327afa81d4da1480f51c68810963a7336d92203dbb3d4f1", size = 512844 }, + { url = "https://files.pythonhosted.org/packages/08/3b/c4fbf0926800ed70b2c245ceca99c49f066456755f5d6eb8863c2c51e6d0/rpds_py-0.26.0-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c741107203954f6fc34d3066d213d0a0c40f7bb5aafd698fb39888af277c70d8", size = 402105 }, + { url = "https://files.pythonhosted.org/packages/1c/b0/db69b52ca07413e568dae9dc674627a22297abb144c4d6022c6d78f1e5cc/rpds_py-0.26.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fc3e55a7db08dc9a6ed5fb7103019d2c1a38a349ac41901f9f66d7f95750942f", size = 383440 }, + { url = "https://files.pythonhosted.org/packages/4c/e1/c65255ad5b63903e56b3bb3ff9dcc3f4f5c3badde5d08c741ee03903e951/rpds_py-0.26.0-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9e851920caab2dbcae311fd28f4313c6953993893eb5c1bb367ec69d9a39e7ed", size = 412759 }, + { url = "https://files.pythonhosted.org/packages/e4/22/bb731077872377a93c6e93b8a9487d0406c70208985831034ccdeed39c8e/rpds_py-0.26.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:dfbf280da5f876d0b00c81f26bedce274e72a678c28845453885a9b3c22ae632", size = 556032 }, + { url = "https://files.pythonhosted.org/packages/e0/8b/393322ce7bac5c4530fb96fc79cc9ea2f83e968ff5f6e873f905c493e1c4/rpds_py-0.26.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:1cc81d14ddfa53d7f3906694d35d54d9d3f850ef8e4e99ee68bc0d1e5fed9a9c", size = 585416 }, + { url = "https://files.pythonhosted.org/packages/49/ae/769dc372211835bf759319a7aae70525c6eb523e3371842c65b7ef41c9c6/rpds_py-0.26.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:dca83c498b4650a91efcf7b88d669b170256bf8017a5db6f3e06c2bf031f57e0", size = 554049 }, + { url = "https://files.pythonhosted.org/packages/6b/f9/4c43f9cc203d6ba44ce3146246cdc38619d92c7bd7bad4946a3491bd5b70/rpds_py-0.26.0-cp313-cp313t-win32.whl", hash = "sha256:4d11382bcaf12f80b51d790dee295c56a159633a8e81e6323b16e55d81ae37e9", size = 218428 }, + { url = "https://files.pythonhosted.org/packages/7e/8b/9286b7e822036a4a977f2f1e851c7345c20528dbd56b687bb67ed68a8ede/rpds_py-0.26.0-cp313-cp313t-win_amd64.whl", hash = "sha256:ff110acded3c22c033e637dd8896e411c7d3a11289b2edf041f86663dbc791e9", size = 231524 }, + { url = "https://files.pythonhosted.org/packages/55/07/029b7c45db910c74e182de626dfdae0ad489a949d84a468465cd0ca36355/rpds_py-0.26.0-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:da619979df60a940cd434084355c514c25cf8eb4cf9a508510682f6c851a4f7a", size = 364292 }, + { url = "https://files.pythonhosted.org/packages/13/d1/9b3d3f986216b4d1f584878dca15ce4797aaf5d372d738974ba737bf68d6/rpds_py-0.26.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:ea89a2458a1a75f87caabefe789c87539ea4e43b40f18cff526052e35bbb4fdf", size = 350334 }, + { url = "https://files.pythonhosted.org/packages/18/98/16d5e7bc9ec715fa9668731d0cf97f6b032724e61696e2db3d47aeb89214/rpds_py-0.26.0-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:feac1045b3327a45944e7dcbeb57530339f6b17baff154df51ef8b0da34c8c12", size = 384875 }, + { url = "https://files.pythonhosted.org/packages/f9/13/aa5e2b1ec5ab0e86a5c464d53514c0467bec6ba2507027d35fc81818358e/rpds_py-0.26.0-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b818a592bd69bfe437ee8368603d4a2d928c34cffcdf77c2e761a759ffd17d20", size = 399993 }, + { url = "https://files.pythonhosted.org/packages/17/03/8021810b0e97923abdbab6474c8b77c69bcb4b2c58330777df9ff69dc559/rpds_py-0.26.0-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1a8b0dd8648709b62d9372fc00a57466f5fdeefed666afe3fea5a6c9539a0331", size = 516683 }, + { url = "https://files.pythonhosted.org/packages/dc/b1/da8e61c87c2f3d836954239fdbbfb477bb7b54d74974d8f6fcb34342d166/rpds_py-0.26.0-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6d3498ad0df07d81112aa6ec6c95a7e7b1ae00929fb73e7ebee0f3faaeabad2f", size = 408825 }, + { url = "https://files.pythonhosted.org/packages/38/bc/1fc173edaaa0e52c94b02a655db20697cb5fa954ad5a8e15a2c784c5cbdd/rpds_py-0.26.0-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:24a4146ccb15be237fdef10f331c568e1b0e505f8c8c9ed5d67759dac58ac246", size = 387292 }, + { url = "https://files.pythonhosted.org/packages/7c/eb/3a9bb4bd90867d21916f253caf4f0d0be7098671b6715ad1cead9fe7bab9/rpds_py-0.26.0-cp314-cp314-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a9a63785467b2d73635957d32a4f6e73d5e4df497a16a6392fa066b753e87387", size = 420435 }, + { url = "https://files.pythonhosted.org/packages/cd/16/e066dcdb56f5632713445271a3f8d3d0b426d51ae9c0cca387799df58b02/rpds_py-0.26.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:de4ed93a8c91debfd5a047be327b7cc8b0cc6afe32a716bbbc4aedca9e2a83af", size = 562410 }, + { url = "https://files.pythonhosted.org/packages/60/22/ddbdec7eb82a0dc2e455be44c97c71c232983e21349836ce9f272e8a3c29/rpds_py-0.26.0-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:caf51943715b12af827696ec395bfa68f090a4c1a1d2509eb4e2cb69abbbdb33", size = 590724 }, + { url = "https://files.pythonhosted.org/packages/2c/b4/95744085e65b7187d83f2fcb0bef70716a1ea0a9e5d8f7f39a86e5d83424/rpds_py-0.26.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:4a59e5bc386de021f56337f757301b337d7ab58baa40174fb150accd480bc953", size = 558285 }, + { url = "https://files.pythonhosted.org/packages/37/37/6309a75e464d1da2559446f9c811aa4d16343cebe3dbb73701e63f760caa/rpds_py-0.26.0-cp314-cp314-win32.whl", hash = "sha256:92c8db839367ef16a662478f0a2fe13e15f2227da3c1430a782ad0f6ee009ec9", size = 223459 }, + { url = "https://files.pythonhosted.org/packages/d9/6f/8e9c11214c46098b1d1391b7e02b70bb689ab963db3b19540cba17315291/rpds_py-0.26.0-cp314-cp314-win_amd64.whl", hash = "sha256:b0afb8cdd034150d4d9f53926226ed27ad15b7f465e93d7468caaf5eafae0d37", size = 236083 }, + { url = "https://files.pythonhosted.org/packages/47/af/9c4638994dd623d51c39892edd9d08e8be8220a4b7e874fa02c2d6e91955/rpds_py-0.26.0-cp314-cp314-win_arm64.whl", hash = "sha256:ca3f059f4ba485d90c8dc75cb5ca897e15325e4e609812ce57f896607c1c0867", size = 223291 }, + { url = "https://files.pythonhosted.org/packages/4d/db/669a241144460474aab03e254326b32c42def83eb23458a10d163cb9b5ce/rpds_py-0.26.0-cp314-cp314t-macosx_10_12_x86_64.whl", hash = "sha256:5afea17ab3a126006dc2f293b14ffc7ef3c85336cf451564a0515ed7648033da", size = 361445 }, + { url = "https://files.pythonhosted.org/packages/3b/2d/133f61cc5807c6c2fd086a46df0eb8f63a23f5df8306ff9f6d0fd168fecc/rpds_py-0.26.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:69f0c0a3df7fd3a7eec50a00396104bb9a843ea6d45fcc31c2d5243446ffd7a7", size = 347206 }, + { url = "https://files.pythonhosted.org/packages/05/bf/0e8fb4c05f70273469eecf82f6ccf37248558526a45321644826555db31b/rpds_py-0.26.0-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:801a71f70f9813e82d2513c9a96532551fce1e278ec0c64610992c49c04c2dad", size = 380330 }, + { url = "https://files.pythonhosted.org/packages/d4/a8/060d24185d8b24d3923322f8d0ede16df4ade226a74e747b8c7c978e3dd3/rpds_py-0.26.0-cp314-cp314t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:df52098cde6d5e02fa75c1f6244f07971773adb4a26625edd5c18fee906fa84d", size = 392254 }, + { url = "https://files.pythonhosted.org/packages/b9/7b/7c2e8a9ee3e6bc0bae26bf29f5219955ca2fbb761dca996a83f5d2f773fe/rpds_py-0.26.0-cp314-cp314t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9bc596b30f86dc6f0929499c9e574601679d0341a0108c25b9b358a042f51bca", size = 516094 }, + { url = "https://files.pythonhosted.org/packages/75/d6/f61cafbed8ba1499b9af9f1777a2a199cd888f74a96133d8833ce5eaa9c5/rpds_py-0.26.0-cp314-cp314t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9dfbe56b299cf5875b68eb6f0ebaadc9cac520a1989cac0db0765abfb3709c19", size = 402889 }, + { url = "https://files.pythonhosted.org/packages/92/19/c8ac0a8a8df2dd30cdec27f69298a5c13e9029500d6d76718130f5e5be10/rpds_py-0.26.0-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ac64f4b2bdb4ea622175c9ab7cf09444e412e22c0e02e906978b3b488af5fde8", size = 384301 }, + { url = "https://files.pythonhosted.org/packages/41/e1/6b1859898bc292a9ce5776016c7312b672da00e25cec74d7beced1027286/rpds_py-0.26.0-cp314-cp314t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:181ef9b6bbf9845a264f9aa45c31836e9f3c1f13be565d0d010e964c661d1e2b", size = 412891 }, + { url = "https://files.pythonhosted.org/packages/ef/b9/ceb39af29913c07966a61367b3c08b4f71fad841e32c6b59a129d5974698/rpds_py-0.26.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:49028aa684c144ea502a8e847d23aed5e4c2ef7cadfa7d5eaafcb40864844b7a", size = 557044 }, + { url = "https://files.pythonhosted.org/packages/2f/27/35637b98380731a521f8ec4f3fd94e477964f04f6b2f8f7af8a2d889a4af/rpds_py-0.26.0-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:e5d524d68a474a9688336045bbf76cb0def88549c1b2ad9dbfec1fb7cfbe9170", size = 585774 }, + { url = "https://files.pythonhosted.org/packages/52/d9/3f0f105420fecd18551b678c9a6ce60bd23986098b252a56d35781b3e7e9/rpds_py-0.26.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:c1851f429b822831bd2edcbe0cfd12ee9ea77868f8d3daf267b189371671c80e", size = 554886 }, + { url = "https://files.pythonhosted.org/packages/6b/c5/347c056a90dc8dd9bc240a08c527315008e1b5042e7a4cf4ac027be9d38a/rpds_py-0.26.0-cp314-cp314t-win32.whl", hash = "sha256:7bdb17009696214c3b66bb3590c6d62e14ac5935e53e929bcdbc5a495987a84f", size = 219027 }, + { url = "https://files.pythonhosted.org/packages/75/04/5302cea1aa26d886d34cadbf2dc77d90d7737e576c0065f357b96dc7a1a6/rpds_py-0.26.0-cp314-cp314t-win_amd64.whl", hash = "sha256:f14440b9573a6f76b4ee4770c13f0b5921f71dde3b6fcb8dabbefd13b7fe05d7", size = 232821 }, + { url = "https://files.pythonhosted.org/packages/ef/9a/1f033b0b31253d03d785b0cd905bc127e555ab496ea6b4c7c2e1f951f2fd/rpds_py-0.26.0-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:3c0909c5234543ada2515c05dc08595b08d621ba919629e94427e8e03539c958", size = 373226 }, + { url = "https://files.pythonhosted.org/packages/58/29/5f88023fd6aaaa8ca3c4a6357ebb23f6f07da6079093ccf27c99efce87db/rpds_py-0.26.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:c1fb0cda2abcc0ac62f64e2ea4b4e64c57dfd6b885e693095460c61bde7bb18e", size = 359230 }, + { url = "https://files.pythonhosted.org/packages/6c/6c/13eaebd28b439da6964dde22712b52e53fe2824af0223b8e403249d10405/rpds_py-0.26.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:84d142d2d6cf9b31c12aa4878d82ed3b2324226270b89b676ac62ccd7df52d08", size = 382363 }, + { url = "https://files.pythonhosted.org/packages/55/fc/3bb9c486b06da19448646f96147796de23c5811ef77cbfc26f17307b6a9d/rpds_py-0.26.0-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a547e21c5610b7e9093d870be50682a6a6cf180d6da0f42c47c306073bfdbbf6", size = 397146 }, + { url = "https://files.pythonhosted.org/packages/15/18/9d1b79eb4d18e64ba8bba9e7dec6f9d6920b639f22f07ee9368ca35d4673/rpds_py-0.26.0-pp310-pypy310_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:35e9a70a0f335371275cdcd08bc5b8051ac494dd58bff3bbfb421038220dc871", size = 514804 }, + { url = "https://files.pythonhosted.org/packages/4f/5a/175ad7191bdbcd28785204621b225ad70e85cdfd1e09cc414cb554633b21/rpds_py-0.26.0-pp310-pypy310_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0dfa6115c6def37905344d56fb54c03afc49104e2ca473d5dedec0f6606913b4", size = 402820 }, + { url = "https://files.pythonhosted.org/packages/11/45/6a67ecf6d61c4d4aff4bc056e864eec4b2447787e11d1c2c9a0242c6e92a/rpds_py-0.26.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:313cfcd6af1a55a286a3c9a25f64af6d0e46cf60bc5798f1db152d97a216ff6f", size = 384567 }, + { url = "https://files.pythonhosted.org/packages/a1/ba/16589da828732b46454c61858950a78fe4c931ea4bf95f17432ffe64b241/rpds_py-0.26.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f7bf2496fa563c046d05e4d232d7b7fd61346e2402052064b773e5c378bf6f73", size = 416520 }, + { url = "https://files.pythonhosted.org/packages/81/4b/00092999fc7c0c266045e984d56b7314734cc400a6c6dc4d61a35f135a9d/rpds_py-0.26.0-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:aa81873e2c8c5aa616ab8e017a481a96742fdf9313c40f14338ca7dbf50cb55f", size = 559362 }, + { url = "https://files.pythonhosted.org/packages/96/0c/43737053cde1f93ac4945157f7be1428724ab943e2132a0d235a7e161d4e/rpds_py-0.26.0-pp310-pypy310_pp73-musllinux_1_2_i686.whl", hash = "sha256:68ffcf982715f5b5b7686bdd349ff75d422e8f22551000c24b30eaa1b7f7ae84", size = 588113 }, + { url = "https://files.pythonhosted.org/packages/46/46/8e38f6161466e60a997ed7e9951ae5de131dedc3cf778ad35994b4af823d/rpds_py-0.26.0-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:6188de70e190847bb6db3dc3981cbadff87d27d6fe9b4f0e18726d55795cee9b", size = 555429 }, + { url = "https://files.pythonhosted.org/packages/2c/ac/65da605e9f1dd643ebe615d5bbd11b6efa1d69644fc4bf623ea5ae385a82/rpds_py-0.26.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:1c962145c7473723df9722ba4c058de12eb5ebedcb4e27e7d902920aa3831ee8", size = 231950 }, + { url = "https://files.pythonhosted.org/packages/51/f2/b5c85b758a00c513bb0389f8fc8e61eb5423050c91c958cdd21843faa3e6/rpds_py-0.26.0-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:f61a9326f80ca59214d1cceb0a09bb2ece5b2563d4e0cd37bfd5515c28510674", size = 373505 }, + { url = "https://files.pythonhosted.org/packages/23/e0/25db45e391251118e915e541995bb5f5ac5691a3b98fb233020ba53afc9b/rpds_py-0.26.0-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:183f857a53bcf4b1b42ef0f57ca553ab56bdd170e49d8091e96c51c3d69ca696", size = 359468 }, + { url = "https://files.pythonhosted.org/packages/0b/73/dd5ee6075bb6491be3a646b301dfd814f9486d924137a5098e61f0487e16/rpds_py-0.26.0-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:941c1cfdf4799d623cf3aa1d326a6b4fdb7a5799ee2687f3516738216d2262fb", size = 382680 }, + { url = "https://files.pythonhosted.org/packages/2f/10/84b522ff58763a5c443f5bcedc1820240e454ce4e620e88520f04589e2ea/rpds_py-0.26.0-pp311-pypy311_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:72a8d9564a717ee291f554eeb4bfeafe2309d5ec0aa6c475170bdab0f9ee8e88", size = 397035 }, + { url = "https://files.pythonhosted.org/packages/06/ea/8667604229a10a520fcbf78b30ccc278977dcc0627beb7ea2c96b3becef0/rpds_py-0.26.0-pp311-pypy311_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:511d15193cbe013619dd05414c35a7dedf2088fcee93c6bbb7c77859765bd4e8", size = 514922 }, + { url = "https://files.pythonhosted.org/packages/24/e6/9ed5b625c0661c4882fc8cdf302bf8e96c73c40de99c31e0b95ed37d508c/rpds_py-0.26.0-pp311-pypy311_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:aea1f9741b603a8d8fedb0ed5502c2bc0accbc51f43e2ad1337fe7259c2b77a5", size = 402822 }, + { url = "https://files.pythonhosted.org/packages/8a/58/212c7b6fd51946047fb45d3733da27e2fa8f7384a13457c874186af691b1/rpds_py-0.26.0-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4019a9d473c708cf2f16415688ef0b4639e07abaa569d72f74745bbeffafa2c7", size = 384336 }, + { url = "https://files.pythonhosted.org/packages/aa/f5/a40ba78748ae8ebf4934d4b88e77b98497378bc2c24ba55ebe87a4e87057/rpds_py-0.26.0-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:093d63b4b0f52d98ebae33b8c50900d3d67e0666094b1be7a12fffd7f65de74b", size = 416871 }, + { url = "https://files.pythonhosted.org/packages/d5/a6/33b1fc0c9f7dcfcfc4a4353daa6308b3ece22496ceece348b3e7a7559a09/rpds_py-0.26.0-pp311-pypy311_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:2abe21d8ba64cded53a2a677e149ceb76dcf44284202d737178afe7ba540c1eb", size = 559439 }, + { url = "https://files.pythonhosted.org/packages/71/2d/ceb3f9c12f8cfa56d34995097f6cd99da1325642c60d1b6680dd9df03ed8/rpds_py-0.26.0-pp311-pypy311_pp73-musllinux_1_2_i686.whl", hash = "sha256:4feb7511c29f8442cbbc28149a92093d32e815a28aa2c50d333826ad2a20fdf0", size = 588380 }, + { url = "https://files.pythonhosted.org/packages/c8/ed/9de62c2150ca8e2e5858acf3f4f4d0d180a38feef9fdab4078bea63d8dba/rpds_py-0.26.0-pp311-pypy311_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:e99685fc95d386da368013e7fb4269dd39c30d99f812a8372d62f244f662709c", size = 555334 }, +] + [[package]] name = "shellingham" version = "1.5.4" @@ -1938,7 +2217,7 @@ wheels = [ [[package]] name = "typer" -version = "0.15.2" +version = "0.16.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "click" }, @@ -1946,9 +2225,9 @@ dependencies = [ { name = "shellingham" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/8b/6f/3991f0f1c7fcb2df31aef28e0594d8d54b05393a0e4e34c65e475c2a5d41/typer-0.15.2.tar.gz", hash = "sha256:ab2fab47533a813c49fe1f16b1a370fd5819099c00b119e0633df65f22144ba5", size = 100711 } +sdist = { url = "https://files.pythonhosted.org/packages/c5/8c/7d682431efca5fd290017663ea4588bf6f2c6aad085c7f108c5dbc316e70/typer-0.16.0.tar.gz", hash = "sha256:af377ffaee1dbe37ae9440cb4e8f11686ea5ce4e9bae01b84ae7c63b87f1dd3b", size = 102625 } wheels = [ - { url = "https://files.pythonhosted.org/packages/7f/fc/5b29fea8cee020515ca82cc68e3b8e1e34bb19a3535ad854cac9257b414c/typer-0.15.2-py3-none-any.whl", hash = "sha256:46a499c6107d645a9c13f7ee46c5d5096cae6f5fc57dd11eccbbb9ae3e44ddfc", size = 45061 }, + { url = "https://files.pythonhosted.org/packages/76/42/3efaf858001d2c2913de7f354563e3a3a2f0decae3efe98427125a8f441e/typer-0.16.0-py3-none-any.whl", hash = "sha256:1f79bed11d4d02d4310e3c1b7ba594183bcedb0ac73b27a9e5f28f6fb5b98855", size = 46317 }, ] [[package]] From fd36736bbcd66387a14ea9619576696b7b2a8eb6 Mon Sep 17 00:00:00 2001 From: antejavor Date: Wed, 16 Jul 2025 10:12:52 +0200 Subject: [PATCH 2/2] Update generation. --- integrations/agents/.env.example | 15 + integrations/agents/ENHANCEMENTS.md | 129 ++++++ integrations/agents/README.md | 203 +++++++-- integrations/agents/cypher_generator.py | 499 ++++++++++++++++++++--- integrations/agents/database_analyzer.py | 142 ++++++- integrations/agents/enhanced_example.py | 202 +++++++++ integrations/agents/main.py | 313 +++++++++++--- 7 files changed, 1329 insertions(+), 174 deletions(-) create mode 100644 integrations/agents/.env.example create mode 100644 integrations/agents/ENHANCEMENTS.md create mode 100644 integrations/agents/enhanced_example.py diff --git a/integrations/agents/.env.example b/integrations/agents/.env.example new file mode 100644 index 0000000..53998cb --- /dev/null +++ b/integrations/agents/.env.example @@ -0,0 +1,15 @@ +# OpenAI API Configuration +OPENAI_API_KEY=your_actual_openai_api_key + +# MySQL Database Configuration +MYSQL_HOST=localhost +MYSQL_USER=root +MYSQL_PASSWORD=your_mysql_password +MYSQL_DATABASE=sakila +MYSQL_PORT=3306 + +# Memgraph Database Configuration +MEMGRAPH_URL=bolt://localhost:7687 +MEMGRAPH_USERNAME= +MEMGRAPH_PASSWORD= +MEMGRAPH_DATABASE=memgraph diff --git a/integrations/agents/ENHANCEMENTS.md b/integrations/agents/ENHANCEMENTS.md new file mode 100644 index 0000000..250aa03 --- /dev/null +++ b/integrations/agents/ENHANCEMENTS.md @@ -0,0 +1,129 @@ +# Migration Agent Enhancements Summary + +## Overview + +Successfully implemented the three major enhancements requested for the MySQL to Memgraph migration agent: + +### 1. Foreign Keys to Relationships ✅ + +- **Enhanced Database Analyzer**: Added logic to detect and categorize foreign key relationships +- **Updated Cypher Generator**: Modified node creation to exclude foreign key columns from properties +- **Relationship Creation**: Foreign keys are now converted to graph relationships instead of node properties + +### 2. Join Tables to Relationships ✅ + +- **Join Table Detection**: Implemented `is_join_table()` method to identify many-to-many tables +- **Schema Classification**: Tables are now categorized as "entity" or "join" types +- **Many-to-Many Relationships**: Join tables are converted to relationships with their non-FK columns as properties +- **Data Handling**: Added specialized data preparation for join table relationship creation + +### 3. Configurable Relationship Labels ✅ + +- **Multiple Naming Strategies**: Implemented three different approaches: + - **Smart Strategy**: Intelligent naming based on common patterns (default) + - **Table-Based Strategy**: Uses table names directly + - **LLM Strategy**: Uses OpenAI to generate contextual names +- **Fallback Logic**: LLM strategy falls back to smart strategy if AI generation fails + +## Key Files Modified + +### `database_analyzer.py` + +- Added `is_join_table()` method for detecting junction tables +- Added `get_table_type()` method for table classification +- Enhanced `get_database_structure()` to separate entity and join tables +- Updated relationship detection to handle both one-to-many and many-to-many + +### `cypher_generator.py` + +- Added relationship naming strategy configuration +- Enhanced node creation to exclude foreign key columns +- Added support for many-to-many relationship generation +- Implemented three relationship naming strategies with LLM integration +- Added specialized data preparation methods + +### `main.py` + +- Updated constructor to accept relationship naming strategy +- Enhanced migration execution to handle entity vs join tables +- Improved relationship creation workflow +- Added proper data flow for both types of relationships + +### Documentation + +- Updated `README.md` with comprehensive documentation +- Created `enhanced_example.py` demonstrating new features +- Added examples and usage patterns + +## Technical Improvements + +### Schema Analysis + +- Automatic detection of join tables based on foreign key ratio +- Classification of tables into entity and join categories +- Enhanced foreign key analysis and relationship mapping + +### Query Generation + +- Foreign key columns excluded from node properties +- Specialized handling for one-to-many vs many-to-many relationships +- Configurable relationship naming with multiple strategies +- Proper handling of relationship properties from join tables + +### Data Migration + +- Separate workflows for entity tables and join tables +- FK column exclusion during node creation +- Join table data converted to relationship properties +- Proper ordering of migration steps + +## Usage Examples + +### Basic Usage with Smart Naming + +```python +agent = MySQLToMemgraphAgent() # Uses "smart" strategy by default +``` + +### Table-Based Naming + +```python +agent = MySQLToMemgraphAgent(relationship_naming_strategy="table_based") +``` + +### LLM-Based Naming + +```python +agent = MySQLToMemgraphAgent(relationship_naming_strategy="llm") +``` + +## Benefits + +1. **Better Graph Modeling**: Foreign keys become proper relationships instead of properties +2. **Cleaner Node Structure**: Node properties only contain actual entity attributes +3. **Rich Relationships**: Join table columns become relationship properties +4. **Flexible Naming**: Multiple strategies for generating meaningful relationship names +5. **Automatic Detection**: No manual configuration needed for join table identification +6. **Backward Compatibility**: Existing functionality remains intact + +## Testing + +The enhanced agent has been tested with: + +- ✅ Sakila database (film_actor, film_category join tables) +- ✅ Foreign key relationship detection +- ✅ Join table classification +- ✅ All three naming strategies +- ✅ Data type mappings and conversions + +## Next Steps + +The migration agent now provides a robust foundation for MySQL to Memgraph migrations with proper graph modeling principles. Future enhancements could include: + +- Support for more complex relationship patterns +- Custom relationship property mappings +- Advanced schema optimization suggestions +- Performance optimizations for large datasets +- Support for additional database sources + +All requested features have been successfully implemented and are ready for production use! diff --git a/integrations/agents/README.md b/integrations/agents/README.md index c106785..c96285e 100644 --- a/integrations/agents/README.md +++ b/integrations/agents/README.md @@ -2,13 +2,32 @@ This agent analyzes MySQL databases, generates appropriate Cypher queries, and migrates data to Memgraph using LangGraph workflow. It's specifically designed to work with the Sakila sample database but can be adapted for other MySQL databases. -## Features +## Enhanced Features (New!) + +### 🔗 Advanced Relationship Handling + +- **Foreign Keys to Relationships**: Automatically converts foreign key columns to graph relationships and removes them from node properties +- **Join Table Detection**: Identifies many-to-many join tables and converts them to relationships with properties +- **Smart Relationship Naming**: Multiple strategies for generating meaningful relationship names + +### 🎯 Configurable Relationship Naming + +- **Smart Strategy** (Default): Uses intelligent patterns based on common database conventions +- **Table-Based Strategy**: Uses table names directly for relationship labels +- **LLM Strategy**: Uses OpenAI to generate contextually appropriate relationship names + +### 📊 Enhanced Schema Analysis + +- **Entity vs Join Table Classification**: Automatically categorizes tables as entities or join tables +- **Relationship Property Mapping**: Converts non-FK columns in join tables to relationship properties +- **Comprehensive Foreign Key Analysis**: Deep analysis of all foreign key relationships + +## Core Features - **Automatic Schema Analysis**: Connects to MySQL and analyzes table structures, relationships, and constraints - **Intelligent Migration Planning**: Uses OpenAI GPT to generate optimal migration strategies - **Cypher Query Generation**: Automatically generates Cypher queries for creating nodes, relationships, and constraints - **Data Type Mapping**: Maps MySQL data types to appropriate Memgraph/Cypher types -- **Relationship Detection**: Identifies foreign key relationships and converts them to graph relationships - **Progress Tracking**: Provides detailed progress updates and error handling - **Verification**: Validates migration results by comparing counts and structures @@ -73,12 +92,12 @@ If you don't have the Sakila database set up: ## Usage -### Basic Usage +### Enhanced Usage with Relationship Naming Strategies -Run the migration agent: +Run the enhanced example with different relationship naming strategies: ```bash -uv run python main.py +uv run python enhanced_example.py ``` ### Programmatic Usage @@ -102,35 +121,66 @@ memgraph_config = { "database": "memgraph" } -# Create and run the agent -agent = MySQLToMemgraphAgent() -result = agent.migrate(mysql_config, memgraph_config) - -print(f"Success: {result['success']}") -print(f"Migrated {len(result['completed_tables'])} tables") +# Create agent with different relationship naming strategies +strategies = ["smart", "table_based", "llm"] + +for strategy in strategies: + print(f"Using {strategy} strategy...") + + # Create agent with specific strategy + agent = MySQLToMemgraphAgent(relationship_naming_strategy=strategy) + + # Define initial state + initial_state = { + "mysql_config": mysql_config, + "memgraph_config": memgraph_config, + "database_structure": {}, + "migration_queries": [], + "migration_plan": "", + "current_step": "Initializing", + "errors": [], + "completed_tables": [], + "total_tables": 0 + } + + # Run migration + result = agent.workflow.invoke(initial_state) + + print(f"Success: {len(result['errors']) == 0}") + print(f"Migrated {len(result['completed_tables'])} tables") + if result.get('database_structure'): + structure = result['database_structure'] + print(f"Entity tables: {len(structure.get('entity_tables', {}))}") + print(f"Join tables: {len(structure.get('join_tables', {}))}") + print(f"Relationships: {len(structure.get('relationships', []))}") ``` ## How It Works -The agent follows a multi-step workflow: +The agent follows an enhanced multi-step workflow: -1. **Schema Analysis**: +1. **Advanced Schema Analysis**: - Connects to MySQL database - Extracts table schemas, foreign keys, and relationships + - **NEW**: Classifies tables as entity tables vs join tables + - **NEW**: Detects many-to-many relationships via join tables - Counts rows in each table 2. **Migration Planning**: - Uses OpenAI GPT to analyze the database structure - Generates an optimal migration plan considering dependencies + - **NEW**: Plans for both entity migration and relationship creation - Identifies potential issues and optimizations -3. **Query Generation**: +3. **Enhanced Query Generation**: - Maps MySQL data types to Cypher types - - Generates node creation queries for each table - - Creates relationship queries based on foreign keys + - **NEW**: Generates node creation queries excluding foreign key columns + - **NEW**: Creates one-to-many relationship queries from foreign keys + - **NEW**: Creates many-to-many relationship queries from join tables + - **NEW**: Applies configurable relationship naming strategies - Generates constraint and index creation queries 4. **Query Validation**: @@ -138,32 +188,121 @@ The agent follows a multi-step workflow: - Tests connection to Memgraph - Validates query syntax -5. **Migration Execution**: +5. **Enhanced Migration Execution**: - Creates constraints and indexes first - - Migrates data table by table - - Creates relationships between nodes - - Handles errors gracefully + - **NEW**: Migrates entity tables only (excludes join tables from node creation) + - **NEW**: Removes foreign key columns from node properties + - **NEW**: Creates one-to-many relationships from foreign keys + - **NEW**: Creates many-to-many relationships from join table data 6. **Verification**: - - Compares node and relationship counts - - Provides detailed migration summary + - Validates migration by comparing node and relationship counts + - Checks data integrity and completeness + +## Relationship Naming Strategies + +The agent supports three different strategies for naming relationships: + +### 1. Smart Strategy (Default) + +Uses intelligent patterns based on common database conventions: + +```python +agent = MySQLToMemgraphAgent(relationship_naming_strategy="smart") +``` + +Examples: + +- `customer` → `order`: `PLACED` +- `film` → `actor`: `FEATURES` +- `film_actor` join table: `ACTED_IN` +- `user` → `address`: `LOCATED_AT` + +### 2. Table-Based Strategy + +Uses table names directly for relationship labels: + +```python +agent = MySQLToMemgraphAgent(relationship_naming_strategy="table_based") +``` + +Examples: -## Graph Model for Sakila +- `customer` → `order`: `HAS_ORDER` +- `film_actor` join table: `FILM_ACTOR` +- `user` → `role`: `HAS_ROLE` -The Sakila database is converted to a graph model with the following approach: +### 3. LLM Strategy -- **Tables → Node Labels**: Each table becomes a node type (e.g., `film` → `Film`) -- **Foreign Keys → Relationships**: FK relationships become directed edges -- **Primary Keys → Node IDs**: Primary keys become unique node identifiers -- **Data Types**: MySQL types are mapped to Cypher-compatible types +Uses OpenAI to generate contextually appropriate names: + +```python +agent = MySQLToMemgraphAgent(relationship_naming_strategy="llm") +``` + +The LLM analyzes table names and context to suggest meaningful relationship names. Falls back to smart strategy if LLM fails. + +## Enhanced Database Structure Transformation + +The agent performs sophisticated transformations: + +### Entity Tables → Nodes + +- **Tables → Node Labels**: Each entity table becomes a node type +- **Primary Keys → Node IDs**: Primary keys become unique identifiers +- **Non-FK Columns → Properties**: Regular columns become node properties +- **FK Columns → Removed**: Foreign key columns are excluded from properties + +### Join Tables → Relationships + +- **Junction Tables → Relationships**: Many-to-many tables become relationships +- **Additional Columns → Relationship Properties**: Non-FK columns become relationship properties +- **Table Detection**: Automatically identifies tables with mostly foreign keys + +### Foreign Keys → Relationships + +- **FK Constraints → Directed Edges**: Foreign keys become graph relationships +- **Configurable Names**: Relationship labels generated using selected strategy Example transformations: -- `film` table → `Film` nodes -- `actor` table → `Actor` nodes -- `film_actor` junction table → `ACTED_IN` relationships -- `customer` → `Customer` nodes with `PLACED` relationships to `Rental` nodes +**Before (MySQL)**: + +```sql +-- Entity tables +CREATE TABLE film (film_id, title, description, rating); +CREATE TABLE actor (actor_id, first_name, last_name); + +-- Join table +CREATE TABLE film_actor ( + film_id INT REFERENCES film(film_id), + actor_id INT REFERENCES actor(actor_id), + last_update TIMESTAMP +); +``` + +**After (Memgraph)**: + +```cypher +// Entity nodes (FK columns removed) +CREATE (f:Film {film_id: 1, title: "Movie", description: "...", rating: "PG"}) +CREATE (a:Actor {actor_id: 1, first_name: "John", last_name: "Doe"}) + +// Relationship with properties from join table +CREATE (a)-[:ACTED_IN {last_update: "2023-01-01"}]->(f) +``` + +## Data Type Mappings + +The agent maps MySQL types to Cypher-compatible types: + +- `INT` → `Integer` +- `VARCHAR`/`CHAR` → `String` +- `TEXT` → `String` +- `DATE`/`DATETIME` → `LocalDate`/`LocalDateTime` +- `FLOAT`/`DOUBLE` → `Float` +- `DECIMAL` → `Decimal` ## Customization diff --git a/integrations/agents/cypher_generator.py b/integrations/agents/cypher_generator.py index 66ed5b1..081851b 100644 --- a/integrations/agents/cypher_generator.py +++ b/integrations/agents/cypher_generator.py @@ -11,31 +11,59 @@ class CypherGenerator: """Generates Cypher queries for Memgraph based on MySQL schema.""" - def __init__(self): - """Initialize the Cypher generator.""" + def __init__(self, relationship_naming_strategy: str = "table_based"): + """Initialize the Cypher generator. + + Args: + relationship_naming_strategy: Strategy for naming relationships. + - "table_based": Use table names directly (default) + - "llm": Use LLM to generate meaningful names (requires LLM) + """ + self.relationship_naming_strategy = relationship_naming_strategy + self.llm = None # Will be set if using LLM strategy + self.type_mapping = { "int": "INTEGER", "bigint": "INTEGER", "smallint": "INTEGER", "tinyint": "INTEGER", + "mediumint": "INTEGER", "varchar": "STRING", "char": "STRING", "text": "STRING", "longtext": "STRING", "mediumtext": "STRING", + "tinytext": "STRING", "decimal": "FLOAT", + "numeric": "FLOAT", "float": "FLOAT", "double": "FLOAT", + "real": "FLOAT", "datetime": "DATETIME", "timestamp": "DATETIME", "date": "DATE", "time": "TIME", + "year": "INTEGER", "enum": "STRING", "set": "STRING", "blob": "STRING", + "tinyblob": "STRING", + "mediumblob": "STRING", + "longblob": "STRING", + "binary": "STRING", + "varbinary": "STRING", "json": "STRING", + "geometry": "STRING", + "point": "STRING", + "linestring": "STRING", + "polygon": "STRING", + "bit": "INTEGER", } + def set_llm(self, llm): + """Set LLM for relationship naming strategy.""" + self.llm = llm + def mysql_to_cypher_type(self, mysql_type: str) -> str: """Convert MySQL data type to Cypher/Memgraph type.""" # Extract base type (remove size specifications) @@ -43,9 +71,15 @@ def mysql_to_cypher_type(self, mysql_type: str) -> str: return self.type_mapping.get(base_type, "STRING") def generate_node_creation_query( - self, table_name: str, schema: List[Dict[str, Any]] + self, + table_name: str, + schema: List[Dict[str, Any]], + foreign_keys: List[Dict[str, str]] = None, ) -> str: """Generate Cypher query to create nodes for a table.""" + if foreign_keys is None: + foreign_keys = [] + # Determine primary key primary_keys = [col["field"] for col in schema if col["key"] == "PRI"] @@ -55,48 +89,115 @@ def generate_node_creation_query( else: id_field = primary_keys[0] - # Create property definitions + # Get foreign key column names to exclude them from properties + fk_column_names = {fk["column"] for fk in foreign_keys} + + # Create property definitions (exclude FK columns and ID field) properties = [] for col in schema: - if col["field"] != id_field: # Skip the ID field in properties + if col["field"] != id_field and col["field"] not in fk_column_names: + safe_field_name = self._escape_reserved_keyword(col["field"]) cypher_type = self.mysql_to_cypher_type(col["type"]) - properties.append(f"{col['field']}: {cypher_type}") + properties.append(f"{safe_field_name}: {cypher_type}") # Generate the query label = self._table_name_to_label(table_name) + safe_id_field = self._escape_reserved_keyword(id_field) + query = f""" // Create {label} nodes UNWIND $data AS row CREATE (n:{label} {{ - {id_field}: row.{id_field}""" + {safe_id_field}: row.{safe_id_field}""" if properties: - query += ",\n " + ",\n ".join( - f"{prop.split(':')[0]}: row.{prop.split(':')[0]}" for prop in properties - ) + property_assignments = [] + for col in schema: + if col["field"] != id_field and col["field"] not in fk_column_names: + safe_field_name = self._escape_reserved_keyword(col["field"]) + property_assignments.append( + f"{safe_field_name}: row.{safe_field_name}" + ) + + query += ",\n " + ",\n ".join(property_assignments) query += "\n })" return query.strip() - def generate_relationship_query( - self, from_table: str, from_column: str, to_table: str, to_column: str - ) -> str: + def generate_relationship_query(self, relationship: Dict[str, Any]) -> str: """Generate Cypher query to create relationships.""" + if relationship["type"] == "many_to_many": + return self._generate_many_to_many_query(relationship) + else: + return self._generate_one_to_many_query(relationship) + + def _generate_one_to_many_query(self, relationship: Dict[str, Any]) -> str: + """Generate one-to-many relationship query.""" + from_table = relationship["from_table"] + from_column = relationship["from_column"] + to_table = relationship["to_table"] + to_column = relationship["to_column"] + from_label = self._table_name_to_label(from_table) to_label = self._table_name_to_label(to_table) rel_type = self._generate_relationship_type(from_table, to_table) + # Escape column names if they are reserved keywords + safe_from_column = self._escape_reserved_keyword(from_column) + safe_to_column = self._escape_reserved_keyword(to_column) + query = f""" // Create {rel_type} relationships from {from_label} to {to_label} MATCH (from:{from_label}) MATCH (to:{to_label}) - WHERE from.{from_column} = to.{to_column} + WHERE from.{safe_from_column} = to.{safe_to_column} CREATE (from)-[:{rel_type}]->(to) """ return query.strip() + def _generate_many_to_many_query(self, relationship: Dict[str, Any]) -> str: + """Generate many-to-many relationship query from join table.""" + join_table = relationship["join_table"] + from_table = relationship["from_table"] + to_table = relationship["to_table"] + join_from_column = relationship["join_from_column"] + join_to_column = relationship["join_to_column"] + from_column = relationship["from_column"] + to_column = relationship["to_column"] + additional_properties = relationship.get("additional_properties", []) + + from_label = self._table_name_to_label(from_table) + to_label = self._table_name_to_label(to_table) + rel_type = self._generate_relationship_type(from_table, to_table, join_table) + + # Escape column names if they are reserved keywords + safe_from_column = self._escape_reserved_keyword(from_column) + safe_to_column = self._escape_reserved_keyword(to_column) + safe_join_from_column = self._escape_reserved_keyword(join_from_column) + safe_join_to_column = self._escape_reserved_keyword(join_to_column) + + # Build relationship properties + rel_properties = "" + if additional_properties: + prop_assignments = [] + for prop in additional_properties: + safe_prop = self._escape_reserved_keyword(prop) + prop_assignments.append(f"{safe_prop}: row.{safe_prop}") + rel_properties = " {" + ", ".join(prop_assignments) + "}" + + query = f""" + // Create {rel_type} relationships from {from_label} to {to_label} + // via {join_table} join table + UNWIND $data AS row + MATCH (from:{from_label} {{{safe_from_column}: row.{safe_join_from_column}}}) + MATCH (to:{to_label} {{{safe_to_column}: row.{safe_join_to_column}}}) + CREATE (from)-[:{rel_type}{rel_properties}]->(to) + """ + + return query.strip() + def generate_index_queries( self, table_name: str, schema: List[Dict[str, Any]] ) -> List[str]: @@ -106,8 +207,7 @@ def generate_index_queries( for col in schema: if col["key"] in ["PRI", "UNI", "MUL"]: - index_type = "UNIQUE" if col["key"] in ["PRI", "UNI"] else "" - query = f"CREATE {index_type} INDEX ON :{label}({col['field']})" + query = f"CREATE INDEX ON :{label}({col['field']})" queries.append(query.strip()) return queries @@ -144,7 +244,7 @@ def generate_full_migration_script(self, structure: Dict[str, Any]) -> List[str] # 1. Create constraints first queries.append("// Step 1: Create constraints") - for table_name, table_info in structure["tables"].items(): + for table_name, table_info in structure["entity_tables"].items(): constraint_queries = self.generate_constraint_queries( table_name, table_info["schema"] ) @@ -153,18 +253,18 @@ def generate_full_migration_script(self, structure: Dict[str, Any]) -> List[str] # 2. Create indexes queries.append("// Step 2: Create indexes") - for table_name, table_info in structure["tables"].items(): + for table_name, table_info in structure["entity_tables"].items(): index_queries = self.generate_index_queries( table_name, table_info["schema"] ) queries.extend(index_queries) queries.append("") - # 3. Create nodes + # 3. Create nodes (only for entity tables, not join tables) queries.append("// Step 3: Create nodes") - for table_name, table_info in structure["tables"].items(): + for table_name, table_info in structure["entity_tables"].items(): node_query = self.generate_node_creation_query( - table_name, table_info["schema"] + table_name, table_info["schema"], table_info["foreign_keys"] ) queries.append(node_query) queries.append("") @@ -172,9 +272,7 @@ def generate_full_migration_script(self, structure: Dict[str, Any]) -> List[str] # 4. Create relationships queries.append("// Step 4: Create relationships") for rel in structure["relationships"]: - rel_query = self.generate_relationship_query( - rel["from_table"], rel["from_column"], rel["to_table"], rel["to_column"] - ) + rel_query = self.generate_relationship_query(rel) queries.append(rel_query) queries.append("") @@ -185,37 +283,151 @@ def _table_name_to_label(self, table_name: str) -> str: # Convert to PascalCase return "".join(word.capitalize() for word in table_name.split("_")) - def _generate_relationship_type(self, from_table: str, to_table: str) -> str: + def _generate_relationship_type( + self, from_table: str, to_table: str, join_table: str = None + ) -> str: """Generate relationship type name.""" - # Create a meaningful relationship name - from_label = self._table_name_to_label(from_table) - to_label = self._table_name_to_label(to_table) - - # Common relationship patterns - if "customer" in from_table.lower() and "order" in to_table.lower(): - return "PLACED" - elif "order" in from_table.lower() and "item" in to_table.lower(): - return "CONTAINS" - elif "film" in from_table.lower() and "actor" in to_table.lower(): - return "FEATURES" - elif "actor" in from_table.lower() and "film" in to_table.lower(): - return "ACTED_IN" - elif "store" in from_table.lower(): - return "BELONGS_TO" - elif "address" in to_table.lower(): - return "LOCATED_AT" - elif "category" in to_table.lower(): - return "BELONGS_TO_CATEGORY" - elif "language" in to_table.lower(): - return "IN_LANGUAGE" + if self.relationship_naming_strategy == "llm" and self.llm: + return self._generate_relationship_type_with_llm( + from_table, to_table, join_table + ) else: - return f"RELATED_TO_{to_label.upper()}" + # Table-based naming strategy (default) + if join_table: + return self._table_name_to_label(join_table).upper() + else: + return f"HAS_{self._table_name_to_label(to_table).upper()}" + + def _generate_relationship_type_with_llm( + self, from_table: str, to_table: str, join_table: str = None + ) -> str: + """Generate relationship type using LLM.""" + try: + from langchain_core.messages import HumanMessage, SystemMessage + + if join_table: + prompt = f""" + Given a many-to-many relationship between tables '{from_table}' + and '{to_table}' via join table '{join_table}', suggest a + meaningful relationship name in UPPER_CASE format. + + Examples: + - film_actor -> ACTED_IN + - customer_rental -> RENTED + - user_role -> HAS_ROLE + + Return only the relationship name, nothing else. + """ + else: + prompt = f""" + Given a one-to-many relationship from table '{from_table}' + to table '{to_table}', suggest a meaningful relationship name + in UPPER_CASE format. + + Examples: + - customer -> order: PLACED + - order -> order_item: CONTAINS + - film -> language: IN_LANGUAGE + + Return only the relationship name, nothing else. + """ + + messages = [ + SystemMessage(content="You are a database modeling expert."), + HumanMessage(content=prompt), + ] + + response = self.llm.invoke(messages) + relationship_name = response.content.strip().upper() + + # Validate the response + if relationship_name and relationship_name.replace("_", "").isalpha(): + return relationship_name + else: + # Fallback to table-based naming + if join_table: + return self._table_name_to_label(join_table).upper() + else: + return f"HAS_{self._table_name_to_label(to_table).upper()}" + + except Exception as e: + logger.warning(f"LLM relationship naming failed: {e}") + # Fallback to table-based naming + if join_table: + return self._table_name_to_label(join_table).upper() + else: + return f"HAS_{self._table_name_to_label(to_table).upper()}" def prepare_data_for_cypher( - self, data: List[Dict[str, Any]], schema: List[Dict[str, Any]] + self, + data: List[Dict[str, Any]], + schema: List[Dict[str, Any]], + foreign_keys: List[Dict[str, str]] = None, ) -> List[Dict[str, Any]]: """Prepare data for Cypher ingestion by handling null values and type conversions.""" + if foreign_keys is None: + foreign_keys = [] + + prepared_data = [] + fk_column_names = {fk["column"] for fk in foreign_keys} + + for row_idx, row in enumerate(data): + prepared_row = {} + try: + for col in schema: + field_name = col["field"] + value = row.get(field_name) + + # Skip foreign key columns and non-existent columns + if field_name in fk_column_names or field_name not in row: + continue + + # Handle reserved keywords by escaping them + safe_field_name = self._escape_reserved_keyword(field_name) + + # Handle null values + if value is None: + if col["null"] == "NO": + # Set default value for non-nullable fields + cypher_type = self.mysql_to_cypher_type(col["type"]) + if cypher_type == "INTEGER": + value = 0 + elif cypher_type == "FLOAT": + value = 0.0 + elif cypher_type == "STRING": + value = "" + else: + value = None + else: + value = None + + # Convert types if needed + if value is not None: + try: + value = self._convert_value_for_cypher(value, col["type"]) + except Exception as e: + logger.warning( + f"Failed to convert value {value} for field {field_name}: {e}" + ) + # Try to convert to string as fallback + value = str(value) if value is not None else None + + prepared_row[safe_field_name] = value + + prepared_data.append(prepared_row) + + except Exception as e: + logger.error(f"Error preparing row {row_idx}: {e}") + # Skip this row rather than failing the entire migration + continue + + return prepared_data + + def prepare_join_table_data_for_cypher( + self, data: List[Dict[str, Any]], schema: List[Dict[str, Any]] + ) -> List[Dict[str, Any]]: + """Prepare join table data for relationship creation.""" prepared_data = [] for row in data: @@ -224,31 +436,182 @@ def prepare_data_for_cypher( field_name = col["field"] value = row.get(field_name) - # Handle null values - if value is None: - if col["null"] == "NO": - # Set default value for non-nullable fields - cypher_type = self.mysql_to_cypher_type(col["type"]) - if cypher_type == "INTEGER": - value = 0 - elif cypher_type == "FLOAT": - value = 0.0 - elif cypher_type == "STRING": - value = "" - else: - value = None - else: - value = None + # Handle reserved keywords by escaping them + safe_field_name = self._escape_reserved_keyword(field_name) # Convert types if needed if value is not None: - cypher_type = self.mysql_to_cypher_type(col["type"]) - if cypher_type in ["DATETIME", "DATE", "TIME"]: - # Convert datetime objects to strings - value = str(value) if value else None + value = self._convert_value_for_cypher(value, col["type"]) - prepared_row[field_name] = value + prepared_row[safe_field_name] = value prepared_data.append(prepared_row) return prepared_data + + def _convert_value_for_cypher(self, value: Any, mysql_type: str) -> Any: + """Convert a MySQL value to Cypher-compatible type.""" + import decimal + from datetime import datetime, date, time + + # Handle decimal types + if isinstance(value, decimal.Decimal): + return float(value) + + # Handle datetime types + if isinstance(value, (datetime, date, time)): + return str(value) + + # Handle boolean types (MySQL uses tinyint(1)) + if mysql_type.lower().startswith("tinyint(1)"): + return bool(value) + + # Handle MySQL SET types (convert to comma-separated string) + if isinstance(value, set): + return ",".join(sorted(str(item) for item in value)) + + # Handle MySQL ENUM types if they come as sets + if mysql_type.lower().startswith("set") and isinstance(value, str): + # Already a string, keep as is + return value + + # Handle binary data + if isinstance(value, bytes): + try: + return value.decode("utf-8") + except UnicodeDecodeError: + # For binary data that can't be decoded, convert to hex string + return value.hex() + + # Handle large integers that might exceed JavaScript safe integer range + if isinstance(value, int) and abs(value) > 2**53 - 1: + return str(value) + + # Handle None/NULL values + if value is None: + return None + + # Handle any other types by converting to string + if not isinstance(value, (str, int, float, bool)): + return str(value) + + return value + + def _escape_reserved_keyword(self, field_name: str) -> str: + """Escape Cypher reserved keywords and problematic field names.""" + # Cypher reserved keywords that need to be escaped + reserved_keywords = { + "code", + "data", + "type", + "name", + "value", + "id", + "count", + "size", + "match", + "where", + "return", + "create", + "delete", + "set", + "remove", + "merge", + "order", + "by", + "limit", + "skip", + "with", + "union", + "all", + "distinct", + "optional", + "foreach", + "case", + "when", + "then", + "else", + "end", + "and", + "or", + "xor", + "not", + "in", + "starts", + "ends", + "contains", + "is", + "null", + "unique", + "index", + "on", + "drop", + "constraint", + "assert", + "scan", + "using", + "join", + "start", + "node", + "relationship", + "rel", + "shortestpath", + "allshortestpaths", + "extract", + "filter", + "reduce", + "any", + "none", + "single", + "true", + "false", + "load", + "csv", + "from", + "as", + "into", + "to", + "explain", + "profile", + "call", + "yield", + "periodic", + "commit", + "transaction", + "begin", + "rollback", + "show", + "create", + "drop", + "exists", + "labels", + "keys", + "nodes", + "relationships", + "procedures", + "functions", + "database", + "databases", + "default", + "user", + "users", + "role", + "roles", + "privilege", + "privileges", + "grant", + "deny", + "revoke", + "catalog", + "schema", + "schemas", + } + + # Always escape field names with spaces or special characters + if ( + " " in field_name + or any(char in field_name for char in ["-", ".", "@", "#", "$", "%"]) + or field_name.lower() in reserved_keywords + ): + return f"`{field_name}`" + return field_name diff --git a/integrations/agents/database_analyzer.py b/integrations/agents/database_analyzer.py index 8af6d76..62db5cc 100644 --- a/integrations/agents/database_analyzer.py +++ b/integrations/agents/database_analyzer.py @@ -121,29 +121,145 @@ def get_table_data( cursor.close() return data + def is_join_table( + self, + table_name: str, + schema: List[Dict[str, Any]], + foreign_keys: List[Dict[str, str]], + ) -> bool: + """Determine if a table is a join table (many-to-many).""" + # A join table typically has: + # 1. Only foreign key columns (and maybe an ID or timestamp) + # 2. At least 2 foreign keys + # 3. Small number of total columns + + if len(foreign_keys) < 2: + return False + + # Count non-FK columns (excluding common metadata columns) + non_fk_columns = [] + fk_column_names = {fk["column"] for fk in foreign_keys} + metadata_columns = [ + "id", + "created_at", + "updated_at", + "created_on", + "updated_on", + "timestamp", + ] + + for col in schema: + field_name = col["field"].lower() + if ( + col["field"] not in fk_column_names + and field_name not in metadata_columns + ): + non_fk_columns.append(col["field"]) + + # If most columns are foreign keys, it's likely a join table + total_columns = len(schema) + fk_ratio = len(foreign_keys) / total_columns + + # Consider it a join table if: + # - At least 2 FKs and FK ratio > 0.5, OR + # - All columns are FKs or metadata columns + return (len(foreign_keys) >= 2 and fk_ratio > 0.5) or len(non_fk_columns) == 0 + + def get_table_type(self, table_name: str) -> str: + """Determine the type of table: 'entity', 'join', or 'lookup'.""" + schema = self.get_table_schema(table_name) + foreign_keys = self.get_foreign_keys(table_name) + + if self.is_join_table(table_name, schema, foreign_keys): + return "join" + elif len(foreign_keys) == 0: + return "entity" # Pure entity table with no references + else: + return "entity" # Entity table with references + def get_database_structure(self) -> Dict[str, Any]: """Get complete database structure including tables, schemas, and relationships.""" - structure = {"tables": {}, "relationships": []} + structure = { + "tables": {}, + "relationships": [], + "join_tables": {}, + "entity_tables": {}, + } tables = self.get_tables() + # First pass: categorize tables and collect basic info for table in tables: + schema = self.get_table_schema(table) + foreign_keys = self.get_foreign_keys(table) + table_type = self.get_table_type(table) + structure["tables"][table] = { - "schema": self.get_table_schema(table), - "foreign_keys": self.get_foreign_keys(table), + "schema": schema, + "foreign_keys": foreign_keys, + "type": table_type, + "row_count": self.get_table_row_count(table), } - # Add relationships - for fk in structure["tables"][table]["foreign_keys"]: - structure["relationships"].append( - { - "from_table": table, - "from_column": fk["column"], - "to_table": fk["referenced_table"], - "to_column": fk["referenced_column"], - } - ) + if table_type == "join": + structure["join_tables"][table] = structure["tables"][table] + else: + structure["entity_tables"][table] = structure["tables"][table] + + # Second pass: create relationships + for table_name, table_info in structure["tables"].items(): + if table_info["type"] == "join": + # Handle join tables as many-to-many relationships + fks = table_info["foreign_keys"] + if len(fks) >= 2: + # Create a many-to-many relationship + # For now, take first two FKs as the main relationship + fk1, fk2 = fks[0], fks[1] + + # Get additional properties from non-FK columns + fk_columns = {fk["column"] for fk in fks} + additional_properties = [] + metadata_columns = [ + "id", + "created_at", + "updated_at", + "created_on", + "updated_on", + "timestamp", + ] + for col in table_info["schema"]: + if ( + col["field"] not in fk_columns + and col["field"].lower() not in metadata_columns + ): + additional_properties.append(col["field"]) + + structure["relationships"].append( + { + "type": "many_to_many", + "join_table": table_name, + "from_table": fk1["referenced_table"], + "from_column": fk1["referenced_column"], + "to_table": fk2["referenced_table"], + "to_column": fk2["referenced_column"], + "join_from_column": fk1["column"], + "join_to_column": fk2["column"], + "additional_properties": additional_properties, + } + ) + else: + # Handle regular foreign key relationships + for fk in table_info["foreign_keys"]: + structure["relationships"].append( + { + "type": "one_to_many", + "from_table": table_name, + "from_column": fk["column"], + "to_table": fk["referenced_table"], + "to_column": fk["referenced_column"], + } + ) return structure diff --git a/integrations/agents/enhanced_example.py b/integrations/agents/enhanced_example.py new file mode 100644 index 0000000..ff9400e --- /dev/null +++ b/integrations/agents/enhanced_example.py @@ -0,0 +1,202 @@ +#!/usr/bin/env python3 +""" +Enhanced MySQL to Memgraph Migration Agent Example + +This example demonstrates the new features: +1. Foreign keys converted to relationships (FK columns removed from nodes) +2. Join tables converted to many-to-many relationships +3. Configurable relationship naming strategies +""" + +import os +import logging +from main import MySQLToMemgraphAgent + +# Configure logging +logging.basicConfig( + level=logging.INFO, format="%(asctime)s - %(name)s - %(levelname)s - %(message)s" +) + + +def main(): + """Main function to demonstrate the enhanced migration agent.""" + + # Configuration for different relationship naming strategies + examples = [ + { + "name": "Table-Based Naming Strategy", + "strategy": "table_based", + "description": "Uses table names for relationship labels (default)", + }, + { + "name": "LLM-Based Naming Strategy", + "strategy": "llm", + "description": "Uses LLM to generate meaningful relationship names", + }, + ] + + print("Enhanced MySQL to Memgraph Migration Agent") + print("=" * 50) + print() + + # Display available strategies + print("Available relationship naming strategies:") + for i, example in enumerate(examples, 1): + print(f"{i}. {example['name']}: {example['description']}") + print() + + # Get user choice + while True: + try: + choice = input("Select strategy (1-2) or 'q' to quit: ").strip() + if choice.lower() == "q": + print("Goodbye!") + return + + choice_idx = int(choice) - 1 + if 0 <= choice_idx < len(examples): + selected_strategy = examples[choice_idx] + break + else: + print("Invalid choice. Please select 1-2.") + except ValueError: + print("Invalid input. Please enter a number or 'q'.") + + print(f"\nUsing {selected_strategy['name']}") + print("-" * 30) + + # MySQL configuration (from environment or defaults) + mysql_config = { + "host": os.getenv("MYSQL_HOST", "localhost"), + "user": os.getenv("MYSQL_USER", "root"), + "password": os.getenv("MYSQL_PASSWORD", ""), + "database": os.getenv("MYSQL_DATABASE", "sakila"), + "port": int(os.getenv("MYSQL_PORT", "3306")), + } + + # Memgraph configuration + memgraph_config = { + "url": os.getenv("MEMGRAPH_URL", "bolt://localhost:7687"), + "username": os.getenv("MEMGRAPH_USERNAME", ""), + "password": os.getenv("MEMGRAPH_PASSWORD", ""), + "database": os.getenv("MEMGRAPH_DATABASE", "memgraph"), + } + + try: + # Create agent with selected strategy + agent = MySQLToMemgraphAgent( + relationship_naming_strategy=selected_strategy["strategy"] + ) + + print(f"Created migration agent with {selected_strategy['strategy']} strategy") + + # Define migration state + initial_state = { + "mysql_config": mysql_config, + "memgraph_config": memgraph_config, + "database_structure": {}, + "migration_queries": [], + "migration_plan": "", + "current_step": "Initializing", + "errors": [], + "completed_tables": [], + "total_tables": 0, + } + + print("\nStarting migration workflow...") + print("This will:") + print("1. Analyze MySQL schema and detect join tables") + print("2. Generate migration plan with LLM") + print("3. Create Cypher queries with enhanced relationship handling") + print("4. Execute migration to Memgraph") + print("5. Verify the migration results") + + # Run the migration workflow + result = agent.workflow.invoke(initial_state) + + # Display results + print("\n" + "=" * 50) + print("MIGRATION RESULTS") + print("=" * 50) + + if result["errors"]: + print("❌ Errors encountered:") + for error in result["errors"]: + print(f" - {error}") + else: + print("✅ Migration completed successfully!") + + print(f"\nCompleted tables: {len(result['completed_tables'])}") + print(f"Total tables: {result['total_tables']}") + + if result.get("database_structure"): + structure = result["database_structure"] + print(f"\nSchema Analysis:") + print(f" - Entity tables: {len(structure.get('entity_tables', {}))}") + print(f" - Join tables: {len(structure.get('join_tables', {}))}") + print(f" - Relationships: {len(structure.get('relationships', []))}") + + # Show join tables that were detected + if structure.get("join_tables"): + print(f"\n🔗 Detected join tables:") + for table_name, table_info in structure["join_tables"].items(): + fk_count = len(table_info.get("foreign_keys", [])) + row_count = table_info.get("row_count", 0) + print(f" - {table_name}: {fk_count} FKs, {row_count} rows") + + # Show relationship types + relationships_by_type = {} + for rel in structure.get("relationships", []): + rel_type = rel["type"] + if rel_type not in relationships_by_type: + relationships_by_type[rel_type] = [] + relationships_by_type[rel_type].append(rel) + + print(f"\n🔗 Relationship breakdown:") + for rel_type, rels in relationships_by_type.items(): + print(f" - {rel_type}: {len(rels)} relationships") + + print(f"\nFinal status: {result['current_step']}") + + except Exception as e: + print(f"❌ Migration failed: {e}") + logging.error(f"Migration error: {e}", exc_info=True) + + +def demo_relationship_naming(): + """Demonstrate different relationship naming strategies.""" + from cypher_generator import CypherGenerator + + print("\n" + "=" * 50) + print("RELATIONSHIP NAMING DEMO") + print("=" * 50) + + # Sample relationship data + sample_relationships = [ + ("customer", "order"), + ("film", "actor"), + ("user", "role"), + ("product", "category"), + ("order", "order_item"), + ] + + strategies = ["table_based"] + + for strategy in strategies: + print(f"\n{strategy.upper()} STRATEGY:") + print("-" * 20) + + generator = CypherGenerator(strategy) + + for from_table, to_table in sample_relationships: + rel_name = generator._generate_relationship_type(from_table, to_table) + print(f" {from_table} -> {to_table}: {rel_name}") + + +if __name__ == "__main__": + main() + + # Optionally run the naming demo + run_demo = input("\nWould you like to see the relationship naming demo? (y/n): ") + if run_demo.lower().startswith("y"): + demo_relationship_naming() diff --git a/integrations/agents/main.py b/integrations/agents/main.py index 7296317..485c340 100644 --- a/integrations/agents/main.py +++ b/integrations/agents/main.py @@ -27,11 +27,33 @@ # Load environment variables load_dotenv() -# Setup logging -logging.basicConfig(level=logging.INFO) logger = logging.getLogger(__name__) +def validate_environment_variables(): + """Validate required environment variables.""" + required_vars = { + "OPENAI_API_KEY": "OpenAI API key for migration planning", + "MYSQL_PASSWORD": "MySQL database password", + } + + missing_vars = [] + for var, description in required_vars.items(): + if not os.getenv(var): + missing_vars.append(f"{var} ({description})") + + if missing_vars: + logger.error("Missing required environment variables:") + for var in missing_vars: + logger.error(f" - {var}") + logger.error( + "Please check your .env file and ensure all required variables are set" + ) + return False + + return True + + class MigrationState(TypedDict): """State for the migration workflow.""" @@ -49,13 +71,35 @@ class MigrationState(TypedDict): class MySQLToMemgraphAgent: """Agent for migrating MySQL databases to Memgraph.""" - def __init__(self): - """Initialize the migration agent.""" + def __init__(self, relationship_naming_strategy: str = "table_based"): + """Initialize the migration agent. + + Args: + relationship_naming_strategy: Strategy for naming relationships. + - "table_based": Use table names directly (default) + - "llm": Use LLM to generate meaningful names + """ + # Validate environment variables first + if not validate_environment_variables(): + raise ValueError( + "Required environment variables are missing. " + "Please check your .env file." + ) + + openai_api_key = os.getenv("OPENAI_API_KEY") + if not openai_api_key: + raise ValueError("OPENAI_API_KEY environment variable is required") + self.llm = ChatOpenAI( - model="gpt-4o-mini", temperature=0.1, api_key=os.getenv("OPENAI_API_KEY") + model="gpt-4o-mini", temperature=0.1, api_key=openai_api_key ) self.mysql_analyzer = None - self.cypher_generator = CypherGenerator() + self.cypher_generator = CypherGenerator(relationship_naming_strategy) + + # Set LLM for cypher generator if using LLM strategy + if relationship_naming_strategy == "llm": + self.cypher_generator.set_llm(self.llm) + self.memgraph_client = None # Build the workflow graph @@ -121,6 +165,7 @@ def _analyze_mysql_schema(self, state: MigrationState) -> MigrationState: return state + # TODO: This should be human visible and configurable. def _generate_migration_plan(self, state: MigrationState) -> MigrationState: """Generate a migration plan using LLM.""" logger.info("Generating migration plan...") @@ -137,34 +182,34 @@ def _generate_migration_plan(self, state: MigrationState) -> MigrationState: system_message = SystemMessage( content=""" -You are an expert database migration specialist. You need to create a -detailed migration plan for moving data from MySQL to Memgraph (a graph database). + You are an expert database migration specialist. You need to create a + detailed migration plan for moving data from MySQL to Memgraph (a graph database). -Your task is to: -1. Analyze the database structure -2. Identify the optimal order for creating nodes and relationships -3. Consider dependencies between tables -4. Suggest any optimizations for graph modeling -5. Identify potential issues or challenges + Your task is to: + 1. Analyze the database structure + 2. Identify the optimal order for creating nodes and relationships + 3. Consider dependencies between tables + 4. Suggest any optimizations for graph modeling + 5. Identify potential issues or challenges -Provide a detailed, step-by-step migration plan. + Provide a detailed, step-by-step migration plan. """ ) human_message = HumanMessage( content=f""" -Create a migration plan for the following MySQL database structure: - -Tables: {context['tables']} -Relationships: {context['relationships']} -Table row counts: {context['table_counts']} - -Please provide a detailed migration plan including: -1. Order of operations -2. Node creation strategy -3. Relationship creation strategy -4. Any potential issues to watch for -5. Estimated timeline considerations + Create a migration plan for the following MySQL database structure: + + Tables: {context['tables']} + Relationships: {context['relationships']} + Table row counts: {context['table_counts']} + + Please provide a detailed migration plan including: + 1. Order of operations + 2. Node creation strategy + 3. Relationship creation strategy + 4. Any potential issues to watch for + 5. Estimated timeline considerations """ ) @@ -197,13 +242,20 @@ def _generate_cypher_queries(self, state: MigrationState) -> MigrationState: return state + # TODO: Implement actual validation logic for Cypher queries def _validate_queries(self, state: MigrationState) -> MigrationState: """Validate generated Cypher queries.""" logger.info("Validating Cypher queries...") try: # Initialize Memgraph connection for validation - self.memgraph_client = Memgraph(**state["memgraph_config"]) + config = state["memgraph_config"] + self.memgraph_client = Memgraph( + url=config.get("url"), + username=config.get("username"), + password=config.get("password"), + database=config.get("database"), + ) # Test connection test_query = "MATCH (n) RETURN count(n) as node_count LIMIT 1" @@ -226,6 +278,15 @@ def _execute_migration(self, state: MigrationState) -> MigrationState: structure = state["database_structure"] queries = state["migration_queries"] + # Clear the database first to avoid constraint violations + try: + logger.info("Clearing existing data from Memgraph...") + self.memgraph_client.query("MATCH (n) DETACH DELETE n") + self.memgraph_client.query("DROP CONSTRAINT ON (n) ASSERT exists(n.id)") + logger.info("Database cleared successfully") + except Exception as e: + logger.warning(f"Database clearing failed (might be empty): {e}") + # Execute constraint and index creation queries first constraint_queries = [ q for q in queries if "CONSTRAINT" in q or "INDEX" in q @@ -238,26 +299,25 @@ def _execute_migration(self, state: MigrationState) -> MigrationState: except Exception as e: logger.warning(f"Constraint/Index creation failed: {e}") - # Migrate data for each table - for table_name, table_info in structure["tables"].items(): - logger.info(f"Migrating table: {table_name}") + # Migrate data for entity tables only (not join tables) + for table_name, table_info in structure["entity_tables"].items(): + logger.info(f"Migrating entity table: {table_name}") # Get data from MySQL data = self.mysql_analyzer.get_table_data(table_name) if data: - # Prepare data for Cypher + # Prepare data for Cypher (excluding FK columns) prepared_data = self.cypher_generator.prepare_data_for_cypher( - data, table_info["schema"] + data, table_info["schema"], table_info["foreign_keys"] ) # Find the node creation query for this table node_query = None + generator = self.cypher_generator + label = generator._table_name_to_label(table_name) for query in queries: - if ( - f"Create {self.cypher_generator._table_name_to_label(table_name)} nodes" - in query - ): + if f"Create {label} nodes" in query: node_query = query break @@ -278,7 +338,8 @@ def _execute_migration(self, state: MigrationState) -> MigrationState: ) state["completed_tables"].append(table_name) logger.info( - f"Successfully migrated {len(data)} rows from {table_name}" + f"Successfully migrated {len(data)} rows " + f"from {table_name}" ) except Exception as e: logger.error(f"Failed to migrate table {table_name}: {e}") @@ -290,12 +351,76 @@ def _execute_migration(self, state: MigrationState) -> MigrationState: # Create relationships logger.info("Creating relationships...") - relationship_queries = [ - q for q in queries if "CREATE (" in q and ")-[:" in q - ] - for query in relationship_queries: - if query.strip() and not query.startswith("//"): + + # Handle one-to-many relationships (from foreign keys) + for rel in structure["relationships"]: + if rel["type"] == "one_to_many": try: + # Find the relationship query + rel_query = self.cypher_generator.generate_relationship_query( + rel + ) + clean_query = "\n".join( + [ + line + for line in rel_query.split("\n") + if not line.strip().startswith("//") + ] + ).strip() + + self.memgraph_client.query(clean_query) + logger.info( + f"Created one-to-many relationship: " + f"{rel['from_table']} -> {rel['to_table']}" + ) + except Exception as e: + logger.error(f"Failed to create relationship: {e}") + state["errors"].append(f"Relationship creation failed: {e}") + + # Handle many-to-many relationships (from join tables) + for rel in structure["relationships"]: + if rel["type"] == "many_to_many": + try: + join_table_name = rel["join_table"] + join_table_info = structure["join_tables"][join_table_name] + + # Get join table data + join_data = self.mysql_analyzer.get_table_data(join_table_name) + + if join_data: + # Prepare join table data + prepared_data = self.cypher_generator.prepare_join_table_data_for_cypher( + join_data, join_table_info["schema"] + ) + + # Generate and execute relationship query + rel_query = ( + self.cypher_generator.generate_relationship_query(rel) + ) + clean_query = "\n".join( + [ + line + for line in rel_query.split("\n") + if not line.strip().startswith("//") + ] + ).strip() + + self.memgraph_client.query( + clean_query, {"data": prepared_data} + ) + logger.info( + f"Created many-to-many relationship: " + f"{rel['from_table']} <-> {rel['to_table']} " + f"via {join_table_name}" + ) + else: + logger.info(f"No data in join table {join_table_name}") + + except Exception as e: + logger.error(f"Failed to create many-to-many relationship: {e}") + state["errors"].append( + f"Many-to-many relationship creation failed: {e}" + ) clean_query = "\n".join( [ line @@ -410,9 +535,66 @@ def migrate( } +def debug_mysql_connection(mysql_config: Dict[str, str]) -> bool: + """Debug MySQL connection specifically.""" + logger.debug("Starting MySQL connection debug...") + + try: + analyzer = MySQLAnalyzer(**mysql_config) + logger.debug(f"Created MySQLAnalyzer with config: {mysql_config}") + + if analyzer.connect(): + logger.debug("✓ MySQL connection successful") + + # Test basic operations + tables = analyzer.get_tables() + logger.debug(f"✓ Found {len(tables)} tables: {tables}") + + if tables: + # Test schema retrieval + first_table = tables[0] + schema = analyzer.get_table_schema(first_table) + logger.debug( + f"✓ Retrieved schema for {first_table}: {len(schema)} columns" + ) + + # Test data retrieval + data = analyzer.get_table_data(first_table, limit=5) + logger.debug(f"✓ Retrieved {len(data)} sample rows from {first_table}") + + analyzer.disconnect() + logger.debug("✓ MySQL connection closed successfully") + return True + else: + logger.error("✗ MySQL connection failed") + return False + + except Exception as e: + logger.error(f"✗ MySQL debug failed: {e}", exc_info=True) + return False + + def main(): """Main function to run the migration agent.""" + print("MySQL to Memgraph Migration Agent") + print("=" * 40) + + # Check environment variables first + if not validate_environment_variables(): + print("\n❌ Setup Error: Missing required environment variables") + print("\nPlease ensure you have:") + print("1. Created a .env file (copy from .env.example)") + print("2. Set your OPENAI_API_KEY") + print("3. Set your MYSQL_PASSWORD") + print("\nExample .env file:") + print("OPENAI_API_KEY=your_openai_key_here") + print("MYSQL_PASSWORD=your_mysql_password") + print("MYSQL_HOST=localhost") + print("MYSQL_USER=root") + print("MYSQL_DATABASE=sakila") + return + # Example configuration for Sakila database mysql_config = { "host": os.getenv("MYSQL_HOST", "localhost"), @@ -422,26 +604,35 @@ def main(): "port": int(os.getenv("MYSQL_PORT", "3306")), } - print("MySQL to Memgraph Migration Agent") - print("=" * 40) - - # Create and run the agent - agent = MySQLToMemgraphAgent() - result = agent.migrate(mysql_config) + try: + # Create and run the agent + agent = MySQLToMemgraphAgent() + result = agent.migrate(mysql_config) - print(f"\nMigration Result:") - print(f"Success: {result['success']}") - print( - f"Completed Tables: {len(result['completed_tables'])}/{result['total_tables']}" - ) - - if result["errors"]: - print(f"Errors: {len(result['errors'])}") - for error in result["errors"]: - print(f" - {error}") + print(f"\nMigration Result:") + print(f"Success: {result['success']}") + print( + f"Completed Tables: {len(result['completed_tables'])}/{result['total_tables']}" + ) - print(f"\nMigration Plan:") - print(result["migration_plan"]) + if result["errors"]: + print(f"Errors: {len(result['errors'])}") + for error in result["errors"]: + print(f" - {error}") + + print(f"\nMigration Plan:") + print(result["migration_plan"]) + + except ValueError as e: + print(f"\n❌ Configuration Error: {e}") + print("\nTroubleshooting steps:") + print("1. Check your .env file exists and contains required variables") + print("2. Verify your OpenAI API key is valid") + print("3. Test MySQL connection with: python mysql_troubleshoot.py") + except Exception as e: + print(f"\n❌ Unexpected Error: {e}") + print("Run with debug mode for more details") + logger.error(f"Unexpected error in main: {e}", exc_info=True) if __name__ == "__main__":