diff --git a/.bumpversion.cfg b/.bumpversion.cfg index d9aab0b8..5d9ae2e3 100644 --- a/.bumpversion.cfg +++ b/.bumpversion.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 0.12.4 +current_version = 0.13.0 commit = False tag = False parse = (?P\d+)\.(?P\d+)\.(?P\d+)(-(?P[a-z]+)(?P\d+))? diff --git a/coverage-badge.svg b/coverage-badge.svg index 48f99f45..792dcf68 100644 --- a/coverage-badge.svg +++ b/coverage-badge.svg @@ -1 +1 @@ -coverage: 30.70%coverage30.70% +coverage: 30.44%coverage30.44% diff --git a/coverage.xml b/coverage.xml index d2c9fc91..daf64bd3 100644 --- a/coverage.xml +++ b/coverage.xml @@ -1,6 +1,6 @@ - - + + /github/workspace @@ -33,7 +33,6 @@ - @@ -56,7 +55,6 @@ - @@ -128,189 +126,187 @@ - - - - + + - - - - - + + + + - - - - - - - - - - - - - - - - - + + + + + + + + + + + + + + + + - - - - - + + + - - - - - - - + + + + + + - - - - - - - - - - - + + + + + + + + + + + - - - - - - - - - - - - - - - - - - + + + + + + + + + + + + + + + + + + - - - - - - - - - - - + + + + + + + + + + + - - - - + + + + + + + + - - - - - - - + + + + + - + + + + + + - + - - + + - + - + + + - - - - - - - + + + + + - - - - - + + + + + + - + - + - - - - + + + - - + + - - - - - - - - + + + + + + + + - + - - - + + - + - - - + + + + - - @@ -324,7 +320,6 @@ - @@ -353,7 +348,6 @@ - @@ -385,27 +379,25 @@ - + - + - - @@ -577,7 +569,6 @@ - @@ -633,7 +624,6 @@ - @@ -668,7 +658,6 @@ - @@ -699,14 +688,12 @@ - - @@ -731,7 +718,6 @@ - diff --git a/dsg_lib/__init__.py b/dsg_lib/__init__.py index f141a76d..20aa0748 100644 --- a/dsg_lib/__init__.py +++ b/dsg_lib/__init__.py @@ -1,3 +1,3 @@ # -*- coding: utf-8 -*- -__version__ = '0.12.4' +__version__ = '0.13.0' diff --git a/dsg_lib/async_database_functions/database_operations.py b/dsg_lib/async_database_functions/database_operations.py index a976e5c8..d7db98a0 100644 --- a/dsg_lib/async_database_functions/database_operations.py +++ b/dsg_lib/async_database_functions/database_operations.py @@ -1,24 +1,28 @@ # -*- coding: utf-8 -*- """ -This module contains tests for the DatabaseOperations class in the dsg_lib module. +This module provides the `DatabaseOperations` class for performing CRUD operations on a database using SQLAlchemy's asynchronous session. -The DatabaseOperations class provides methods for performing CRUD operations on a database using SQLAlchemy's asynchronous session. +The `DatabaseOperations` class includes the following methods: -The methods include: + - `create_one`: Creates a single record in the database. + - `create_many`: Creates multiple records in the database. + - `read_one`: Reads a single record from the database. + - `read_many`: Reads multiple records from the database. + - `update_one`: Updates a single record in the database. + - `update_many`: Updates multiple records in the database. + - `delete_one`: Deletes a single record from the database. + - `delete_many`: Deletes multiple records from the database. + - `read_query`: Executes a fetch query on the database and returns a list of records that match the query. + - `read_multi_query`: Executes multiple fetch queries on the database and returns a dictionary of results for each query. + - `count_query`: Counts the number of records that match a given query. + - `get_column_details`: Gets the details of the columns in a table. + - `get_primary_keys`: Gets the primary keys of a table. + - `get_table_names`: Gets the names of all tables in the database. -- `create_one`: Creates a single record in the database. -- `create_many`: Creates multiple records in the database. -- `read_one`: Reads a single record from the database. -- `read_many`: Reads multiple records from the database. -- `update_one`: Updates a single record in the database. -- `update_many`: Updates multiple records in the database. -- `delete_one`: Deletes a single record from the database. -- `delete_many`: Deletes multiple records from the database. -- `count_query`: Counts the number of records that match a given query. -Each method is tested to ensure it performs the expected operation and handles errors correctly. The tests use the pytest-asyncio plugin to run the asynchronous methods in an event loop, and the unittest.mock library to mock the database session and simulate errors. +Each method is designed to handle errors correctly and provide a simple interface for performing database operations. -The tests are organized into a single class, TestDatabaseOperations, which contains one test method for each method in the DatabaseOperations class. Each test method follows the Arrange-Act-Assert pattern: it sets up the necessary objects and state (Arrange), calls the method being tested (Act), and checks that the results are as expected (Assert). +This module also imports the necessary SQLAlchemy and loguru modules, and the `AsyncDatabase` class from the local `async_database` module. Author: Mike Ryan Date: 2024/05/16 @@ -113,6 +117,8 @@ class DatabaseOperations: - `update_many`: Updates multiple records in the database. - `delete_one`: Deletes a single record from the database. - `delete_many`: Deletes multiple records from the database. + - `read_query`: Executes a fetch query on the database and returns a list of records that match the query. + - `read_multi_query`: Executes multiple fetch queries on the database and returns a dictionary of results for each query. - `count_query`: Counts the number of records that match a given query. - `get_column_details`: Gets the details of the columns in a table. - `get_primary_keys`: Gets the primary keys of a table. @@ -120,19 +126,23 @@ class DatabaseOperations: Examples: ```python - + # Create a DBConfig instance + config = { + "database_uri": "sqlite+aiosqlite:///:memory:?cache=shared", + "echo": False, + "future": True, + "pool_recycle": 3600, + } + # create database configuration + db_config = database_config.DBConfig(config) + # Create an AsyncDatabase instance + async_db = async_database.AsyncDatabase(db_config) + # Create a DatabaseOperations instance + db_ops = database_operations.DatabaseOperations(async_db) + # create one record data = await db_ops.create_one(User(name='John Doe')) - data = await db_ops.create_many([User(name='John Doe'), User(name='Jane Doe')]) - data = await db_ops.read_one(User, 1) - data = await db_ops.read_many(User, [1, 2, 3]) - data = await db_ops.update_one(User, 1, {'name': 'John Smith'}) - data = await db_ops.update_many(User, [1, 2], [{'name': 'John Smith'}, {'name': 'Jane Smith'}]) - data = await db_ops.delete_one(User, 1) - data = await db_ops.delete_many(User, [1, 2, 3]) - data = await db_ops.count_query(select(User)) - data = await db_ops.get_column_details(User) - data = await db_ops.get_primary_keys(User) - data = await db_ops.get_table_names() + # read one record + record = await db_ops.read_one(User, 1) ``` """ @@ -181,7 +191,8 @@ def __init__(self, async_db: AsyncDatabase): self.async_db = async_db # Log the successful initialization - logger.info('DatabaseOperations instance initialized successfully') + logger.debug('DatabaseOperations instance initialized successfully') + async def get_columns_details(self, table): """ @@ -268,7 +279,7 @@ async def get_columns_details(self, table): } # Log the successful column retrieval - logger.info(f'Successfully retrieved columns for table: {table.__name__}') + logger.debug(f'Successfully retrieved columns for table: {table.__name__}') return columns except Exception as ex: # pragma: no cover @@ -278,6 +289,7 @@ async def get_columns_details(self, table): ) # pragma: no cover return handle_exceptions(ex) # pragma: no cover + async def get_primary_keys(self, table): """ Retrieves the primary keys of a given table. @@ -344,7 +356,7 @@ async def get_primary_keys(self, table): primary_keys = table.__table__.primary_key.columns.keys() # Log the successful primary key retrieval - logger.info(f'Primary keys retrieved successfully: {primary_keys}') + logger.debug(f'Primary keys retrieved successfully: {primary_keys}') return primary_keys @@ -353,6 +365,7 @@ async def get_primary_keys(self, table): logger.error(f'Exception occurred: {ex}') # pragma: no cover return handle_exceptions(ex) # pragma: no cover + async def get_table_names(self): """ Retrieves the names of all tables in the database. @@ -410,7 +423,7 @@ async def get_table_names(self): table_names = list(self.async_db.Base.metadata.tables.keys()) # Log the successful table name retrieval - logger.info(f'Table names retrieved successfully: {table_names}') + logger.debug(f'Table names retrieved successfully: {table_names}') return table_names @@ -419,82 +432,6 @@ async def get_table_names(self): logger.error(f'Exception occurred: {ex}') # pragma: no cover return handle_exceptions(ex) # pragma: no cover - async def read_one_record(self, query): - """ - Retrieves a single record from the database based on the provided query. - - This asynchronous method accepts a SQL query object and returns the - first record that matches the query. If no record matches the query, it - returns None. This method is useful for fetching specific data - when the expected result is a single record. - - Parameters: - query (Select): An instance of the SQLAlchemy Select class, - representing the query to be executed. - - Returns: - Result: The first record that matches the query or None if no record matches. - - Raises: - Exception: If any error occurs during the database operation. - - Example: - ```python - from dsg_lib.async_database_functions import ( - async_database, - base_schema, - database_config, - database_operations, - ) - # Create a DBConfig instance - config = { - # "database_uri": "postgresql+asyncpg://postgres:postgres@postgresdb/postgres", - "database_uri": "sqlite+aiosqlite:///:memory:?cache=shared", - "echo": False, - "future": True, - # "pool_pre_ping": True, - # "pool_size": 10, - # "max_overflow": 10, - "pool_recycle": 3600, - # "pool_timeout": 30, - } - # create database configuration - db_config = database_config.DBConfig(config) - # Create an AsyncDatabase instance - async_db = async_database.AsyncDatabase(db_config) - # Create a DatabaseOperations instance - db_ops = database_operations.DatabaseOperations(async_db) - # read one record - record = await db_ops.read_one_record(select(User).where(User.name == 'John Doe')) - ``` - """ - # Log the start of the operation - logger.debug(f'Starting read_one_record operation for {query}') - - try: - # Start a new database session - async with self.async_db.get_db_session() as session: - # Log the start of the record retrieval - logger.debug(f'Getting record with query: {query}') - - # Execute the query and retrieve the first record - result = await session.execute(query) - record = result.scalar_one() - - # Log the successful record retrieval - logger.info(f'Record retrieved successfully: {record}') - - return record - - except NoResultFound: - # No record was found - logger.info('No record found') - return None - - except Exception as ex: # pragma: no cover - # Handle any exceptions that occur during the record retrieval - logger.error(f'Exception occurred: {ex}') # pragma: no cover - return handle_exceptions(ex) # pragma: no cover async def create_one(self, record): """ @@ -558,7 +495,7 @@ async def create_one(self, record): await session.commit() # Log the successful record addition - logger.info(f'Record added successfully: {record}') + logger.debug(f'Record added successfully: {record}') return record @@ -567,6 +504,7 @@ async def create_one(self, record): logger.error(f'Exception occurred: {ex}') return handle_exceptions(ex) + async def create_many(self, records): """ Adds multiple records to the database. @@ -642,7 +580,7 @@ async def create_many(self, records): # addition num_records = len(records) t1 = time.time() - t0 - logger.info( + logger.debug( f'Record operations were successful. {num_records} records were created in {t1:.4f} seconds.' ) @@ -653,6 +591,7 @@ async def create_many(self, records): logger.error(f'Exception occurred: {ex}') return handle_exceptions(ex) + async def count_query(self, query): """ Executes a count query on the database and returns the number of records @@ -717,7 +656,7 @@ async def count_query(self, query): count = result.scalar() # Log the successful query execution - logger.info(f'Count query executed successfully. Result: {count}') + logger.debug(f'Count query executed successfully. Result: {count}') return count @@ -726,29 +665,25 @@ async def count_query(self, query): logger.error(f'Exception occurred: {ex}') return handle_exceptions(ex) - async def read_query(self, query, limit=500, offset=0): + + async def read_one_record(self, query): """ - Executes a fetch query on the database and returns a list of records - that match the query. + Retrieves a single record from the database based on the provided query. - This asynchronous method accepts a SQLAlchemy `Select` query object - along with optional limit and offset parameters. It returns a list of - records that match the query, with the number of records controlled by - the limit, and the starting point of the records determined by the - offset. + This asynchronous method accepts a SQL query object and returns the + first record that matches the query. If no record matches the query, it + returns None. This method is useful for fetching specific data + when the expected result is a single record. Parameters: - query (Select): A SQLAlchemy `Select` query object specifying the - conditions to fetch records for. limit (int, optional): The maximum - number of records to return. Defaults to 500. offset (int, - optional): The number of records to skip before starting to return - records. Defaults to 0. + query (Select): An instance of the SQLAlchemy Select class, + representing the query to be executed. Returns: - list: A list of records that match the query. + Result: The first record that matches the query or None if no record matches. Raises: - Exception: If any error occurs during the execution of the query. + Exception: If any error occurs during the database operation. Example: ```python @@ -776,8 +711,80 @@ async def read_query(self, query, limit=500, offset=0): async_db = async_database.AsyncDatabase(db_config) # Create a DatabaseOperations instance db_ops = database_operations.DatabaseOperations(async_db) + # read one record + record = await db_ops.read_one_record(select(User).where(User.name == 'John Doe')) + ``` + """ + # Log the start of the operation + logger.debug(f'Starting read_one_record operation for {query}') + + try: + # Start a new database session + async with self.async_db.get_db_session() as session: + # Log the start of the record retrieval + logger.debug(f'Getting record with query: {query}') + + # Execute the query and retrieve the first record + result = await session.execute(query) + record = result.scalar_one() + + # Log the successful record retrieval + logger.debug(f'Record retrieved successfully: {record}') + + return record + + except NoResultFound: + # No record was found + logger.debug('No record found') + return None + + except Exception as ex: # pragma: no cover + # Handle any exceptions that occur during the record retrieval + logger.error(f'Exception occurred: {ex}') # pragma: no cover + return handle_exceptions(ex) # pragma: no cover + + + async def read_query(self, query): + """ + Executes a fetch query on the database and returns a list of records + that match the query. + + This asynchronous method accepts a SQLAlchemy `Select` query object. + It returns a list of records that match the query. + + Parameters: + query (Select): A SQLAlchemy `Select` query object specifying the + conditions to fetch records for. + + Returns: + list: A list of records that match the query. + + Raises: + Exception: If any error occurs during the execution of the query. + + Example: + ```python + from dsg_lib.async_database_functions import ( + async_database, + base_schema, + database_config, + database_operations, + ) + # Create a DBConfig instance + config = { + "database_uri": "sqlite+aiosqlite:///:memory:?cache=shared", + "echo": False, + "future": True, + "pool_recycle": 3600, + } + # create database configuration + db_config = database_config.DBConfig(config) + # Create an AsyncDatabase instance + async_db = async_database.AsyncDatabase(db_config) + # Create a DatabaseOperations instance + db_ops = database_operations.DatabaseOperations(async_db) # read query - records = await db_ops.read_query(select(User).where(User.age > 30), limit=10) + records = await db_ops.read_query(select(User).where(User.age > 30)) ``` """ # Log the start of the operation @@ -788,11 +795,11 @@ async def read_query(self, query, limit=500, offset=0): async with self.async_db.get_db_session() as session: # Log the query being executed logger.debug( - f'Executing fetch query: {query} with limit: {limit} and offset: {offset}' + f'Executing fetch query: {query}' ) # Execute the fetch query and retrieve the records - result = await session.execute(query.limit(limit).offset(offset)) + result = await session.execute(query) records = result.scalars().all() logger.debug(f'read_query result: {records}') # Log the successful query execution @@ -808,7 +815,7 @@ async def read_query(self, query, limit=500, offset=0): # Otherwise, try to convert the records to dictionaries using the __dict__ attribute records_data = [record.__dict__ for record in records] - logger.info(f'Fetch query executed successfully. Records: {records_data}') + logger.debug(f'Fetch query executed successfully. Records: {records_data}') return records @@ -817,23 +824,20 @@ async def read_query(self, query, limit=500, offset=0): logger.error(f'Exception occurred: {ex}') return handle_exceptions(ex) - async def read_multi_query(self, queries: Dict[str, str], limit=500, offset=0): + + async def read_multi_query(self, queries: Dict[str, str]): """ Executes multiple fetch queries on the database and returns a dictionary of results for each query. This asynchronous method takes a dictionary where each key is a query - name and each value is a SQLAlchemy `Select` query object. It also - accepts optional limit and offset parameters. The method executes each + name and each value is a SQLAlchemy `Select` query object. The method executes each query and returns a dictionary where each key is the query name, and the corresponding value is a list of records that match that query. Parameters: queries (Dict[str, Select]): A dictionary of SQLAlchemy `Select` - query objects. limit (int, optional): The maximum number of records - to return for each query. Defaults to 500. offset (int, optional): - The number of records to skip before returning records for each - query. Defaults to 0. + query objects. Returns: dict: A dictionary where each key is a query name and each value is @@ -852,15 +856,10 @@ async def read_multi_query(self, queries: Dict[str, str], limit=500, offset=0): ) # Create a DBConfig instance config = { - # "database_uri": "postgresql+asyncpg://postgres:postgres@postgresdb/postgres", "database_uri": "sqlite+aiosqlite:///:memory:?cache=shared", "echo": False, "future": True, - # "pool_pre_ping": True, - # "pool_size": 10, - # "max_overflow": 10, "pool_recycle": 3600, - # "pool_timeout": 30, } # create database configuration db_config = database_config.DBConfig(config) @@ -873,7 +872,7 @@ async def read_multi_query(self, queries: Dict[str, str], limit=500, offset=0): "query1": select(User).where(User.age > 30), "query2": select(User).where(User.age < 20), } - results = await db_ops.read_multi_query(queries, limit=10) + results = await db_ops.read_multi_query(queries) ``` """ # Log the start of the operation @@ -888,18 +887,13 @@ async def read_multi_query(self, queries: Dict[str, str], limit=500, offset=0): logger.debug(f'Executing fetch query: {query}') # Execute the fetch query and retrieve the records - result = await session.execute(query.limit(limit).offset(offset)) + result = await session.execute(query) data = result.scalars().all() # Convert the records to dictionaries for logging data_dicts = [record.__dict__ for record in data] logger.debug(f"Fetch result for query '{query_name}': {data_dicts}") - # Log the successful query execution - logger.info( - f'Fetch query executed successfully: {query_name} with {len(data)} records' - ) - # Store the records in the results dictionary results[query_name] = data return results @@ -909,6 +903,7 @@ async def read_multi_query(self, queries: Dict[str, str], limit=500, offset=0): logger.error(f'Exception occurred: {ex}') return handle_exceptions(ex) + async def update_one(self, table, record_id: str, new_values: dict): """ Updates a single record in the database identified by its ID. @@ -994,7 +989,7 @@ async def update_one(self, table, record_id: str, new_values: dict): await session.commit() # Log the successful record update - logger.info(f'Record updated successfully: {record.pkid}') + logger.debug(f'Record updated successfully: {record.pkid}') return record except Exception as ex: @@ -1002,6 +997,7 @@ async def update_one(self, table, record_id: str, new_values: dict): logger.error(f'Exception occurred: {ex}') return handle_exceptions(ex) + async def delete_one(self, table, record_id: str): """ Deletes a single record from the database based on the provided table @@ -1094,7 +1090,7 @@ async def delete_one(self, table, record_id: str): await session.commit() # Log the successful record deletion - logger.info(f'Record deleted successfully: {record_id}') + logger.debug(f'Record deleted successfully: {record_id}') return {'success': 'Record deleted successfully'} @@ -1103,6 +1099,7 @@ async def delete_one(self, table, record_id: str): logger.error(f'Exception occurred: {ex}') return handle_exceptions(ex) + async def delete_many( self, table: Type[DeclarativeMeta], @@ -1176,7 +1173,7 @@ async def delete_many( # Calculate the operation time and log the successful record deletion t1 = time.time() - t0 - logger.info( + logger.debug( f'Record operations were successful. {deleted_count} records were deleted in {t1:.4f} seconds.' ) diff --git a/dsg_lib/common_functions/email_validation.py b/dsg_lib/common_functions/email_validation.py index 6883e1f7..b5ee6b1a 100644 --- a/dsg_lib/common_functions/email_validation.py +++ b/dsg_lib/common_functions/email_validation.py @@ -159,8 +159,7 @@ def validate_email_address( email_dict["email_data"] = dict(sorted(vars(emailinfo).items())) email_dict["parameters"]=dict(sorted(locals().items())) - # Print and return the dictionary - print(email_dict) + # return the dictionary return email_dict # Handle EmailUndeliverableError diff --git a/pyproject.toml b/pyproject.toml index 215bf465..7d9889f6 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -8,9 +8,22 @@ build-backend = "hatchling.build" [project] name = "devsetgo_lib" -version = "0.12.4" +version = "0.13.0" requires-python = ">=3.9" -description = "DevSetGo Common Library provides reusable Python functions for enhanced code efficiency. It includes utilities for file operations, calendar, pattern matching, logging, FastAPI endpoints, and async database handling with CRUD operations." +description = """ +The devsetgo_lib is a comprehensive Python library that provides a collection of reusable functions designed to increase coding efficiency and enhance code reusability across multiple applications. This library aims to save developers time and effort by reducing the need for repetitive code, allowing defects to be addressed quickly and propagated across projects. The key features of devsetgo_lib include: + +1. **File Operations**: Functions for reading, writing, and managing CSV, JSON, and text files, as well as directory handling operations. +2. **Calendar Utilities**: Functions for handling dates and times, including converting between month names and numbers. +3. **Pattern Matching**: Functions for matching and manipulating strings using regular expressions, helping to simplify text processing tasks. +4. **Logging**: Advanced logging configuration and management using the loguru library, allowing for customizable and robust logging solutions. +5. **FastAPI Endpoints**: Functions for creating and managing endpoints in FastAPI applications, including system health checks and HTTP response code generation. +6. **Async Database Handling**: Asynchronous CRUD operations for databases, with support for various databases including SQLite and PostgreSQL. +7. **Email Validation**: Functions to validate and handle email addresses, ensuring data integrity and correctness in applications. + +The devsetgo_lib is designed to be easy to use and versatile, making it a valuable tool for any Python developer looking to improve their workflow and maintain high-quality code across their projects. +""" +keywords = ["python", "library", "reusable functions", "file operations", "calendar utilities", "pattern matching", "logging", "loguru", "FastAPI", "async database", "CRUD operations", "email validation", "development tools"] readme = "README.md" authors = [{ name = "Mike Ryan", email = "mikeryan56@gmail.com" }] maintainers = [ diff --git a/requirements.txt b/requirements.txt index 8656a961..40b84259 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,41 +1,40 @@ -aiomysql==0.2.0 # Vulnerabilities: None -aiosqlite==0.20.0 # Vulnerabilities: None -asyncpg==0.29.0 # Vulnerabilities: None -autoflake==2.3.1 # Vulnerabilities: None -autopep8==2.1.0 # Vulnerabilities: None -black==24.4.2 # From 24.4.0 | Vulnerabilities: None -bump2version==1.0.1 # Vulnerabilities: None -Click==8.1.7 # Vulnerabilities: None -cx_Oracle==8.3.0 # Vulnerabilities: None -fastapi[all]==0.111.0 # From 0.110.2 | Vulnerabilities: None -flake8==7.0.0 # Vulnerabilities: None -genbadge[all]==1.1.1 # Vulnerabilities: None -hatchling==1.24.2 # Vulnerabilities: None -loguru==0.7.2 # Vulnerabilities: None -mkdocs-material==9.5.23 # From 9.5.18 | Vulnerabilities: None -mkdocs-print-site-plugin==2.4.1 # From 2.4.0 | Vulnerabilities: None -mkdocstrings[python,shell]==0.25.1 # From 0.24.3 | Vulnerabilities: None - -packaging==24.0 # Vulnerabilities: None -pre-commit==3.7.1 # From 3.7.0 | Vulnerabilities: None -psycopg2==2.9.9 # Vulnerabilities: None -Pygments==2.18.0 # From 2.17.2 | Vulnerabilities: None -pylint==3.2.0 # From 3.1.0 | Vulnerabilities: None -pymdown-extensions==10.8.1 # From 10.8 | Vulnerabilities: None -pytest==8.2.0 # From 8.1.1 | Vulnerabilities: None -pytest-asyncio==0.23.6 # Vulnerabilities: None -pytest-cov==5.0.0 # Vulnerabilities: None -pytest-mock==3.14.0 # Vulnerabilities: None -pytest-runner==6.0.1 # Vulnerabilities: None -pytest-xdist==3.6.1 # From 3.5.0 | Vulnerabilities: None -pytz==2024.1 # Vulnerabilities: None -pyyaml==6.0.1 # Vulnerabilities: None -ruff==0.4.4 # From 0.4.1 | Vulnerabilities: None -SQLAlchemy==2.0.30 # From 2.0.29 | Vulnerabilities: None -toml==0.10.2 # Vulnerabilities: None -tox==4.15.0 # From 4.14.2 | Vulnerabilities: None -tqdm==4.66.4 # From 4.66.2 | Vulnerabilities: None -twine==5.1.0 # From 5.0.0 | Vulnerabilities: None -watchdog==4.0.0 # Vulnerabilities: None -wheel==0.43.0 # Vulnerabilities: None -xmltodict==0.13.0 # Vulnerabilities: None +aiomysql==0.2.0 # Vulnerabilities: None +aiosqlite==0.20.0 # Vulnerabilities: None +asyncpg==0.29.0 # Vulnerabilities: None +autoflake==2.3.1 # Vulnerabilities: None +autopep8==2.1.1 # From 2.1.0 | Vulnerabilities: None +black==24.4.2 # Vulnerabilities: None +bump2version==1.0.1 # Vulnerabilities: None +click==8.1.7 # Vulnerabilities: None +cx-Oracle==8.3.0 # Vulnerabilities: None +fastapi[all]==0.111.0 # Vulnerabilities: None +flake8==7.0.0 # Vulnerabilities: None +genbadge[all]==1.1.1 # Vulnerabilities: None +hatchling==1.24.2 # Vulnerabilities: None +loguru==0.7.2 # Vulnerabilities: None +mkdocs-material==9.5.24 # From 9.5.23 | Vulnerabilities: None +mkdocs-print-site-plugin==2.4.1 # Vulnerabilities: None +mkdocstrings[python,shell]==0.25.1 # Vulnerabilities: None +packaging==24.0 # Vulnerabilities: None +pre-commit==3.7.1 # Vulnerabilities: None +psycopg2==2.9.9 # Vulnerabilities: None +Pygments==2.18.0 # Vulnerabilities: None +pylint==3.2.2 # From 3.2.0 | Vulnerabilities: None +pymdown-extensions==10.8.1 # Vulnerabilities: None +pytest==8.2.1 # From 8.2.0 | Vulnerabilities: None +pytest-asyncio==0.23.7 # From 0.23.6 | Vulnerabilities: None +pytest-cov==5.0.0 # Vulnerabilities: None +pytest-mock==3.14.0 # Vulnerabilities: None +pytest-runner==6.0.1 # Vulnerabilities: None +pytest-xdist==3.6.1 # Vulnerabilities: None +pytz==2024.1 # Vulnerabilities: None +PyYAML==6.0.1 # Vulnerabilities: None +ruff==0.4.5 # From 0.4.4 | Vulnerabilities: None +SQLAlchemy==2.0.30 # Vulnerabilities: None +toml==0.10.2 # Vulnerabilities: None +tox==4.15.0 # Vulnerabilities: None +tqdm==4.66.4 # Vulnerabilities: None +twine==5.1.0 # Vulnerabilities: None +watchdog==4.0.1 # From 4.0.0 | Vulnerabilities: None +wheel==0.43.0 # Vulnerabilities: None +xmltodict==0.13.0 # Vulnerabilities: None