From 435b25059bb0fc4fbab62fd8c693e9b00d35077f Mon Sep 17 00:00:00 2001 From: Ricardo Dahis Date: Sat, 22 Feb 2025 13:24:23 +1100 Subject: [PATCH 001/181] feat: data API (read from previous PRs and commits before dev reset) --- backend/apps/api/v1/admin.py | 16 +- .../0054_alter_organization_area.py | 25 + ...ey_alter_dictionarykey_options_and_more.py | 46 ++ backend/apps/api/v1/models.py | 81 ++- backend/apps/api/v1/schemas.py | 2 + backend/apps/api/v1/search_indexes.py | 5 + backend/apps/api/v1/search_views.py | 7 +- backend/apps/data_api/__init__.py | 0 backend/apps/data_api/admin.py | 235 +++++++++ backend/apps/data_api/apps.py | 8 + backend/apps/data_api/decorators.py | 53 ++ backend/apps/data_api/graphql.py | 133 +++++ .../apps/data_api/migrations/0001_initial.py | 361 +++++++++++++ backend/apps/data_api/migrations/__init__.py | 0 backend/apps/data_api/models.py | 296 +++++++++++ backend/apps/data_api/translation.py | 22 + backend/apps/data_api/urls.py | 44 ++ backend/apps/data_api/views.py | 477 ++++++++++++++++++ backend/apps/schema.py | 2 +- backend/settings/base.py | 1 + backend/urls.py | 1 + 21 files changed, 1776 insertions(+), 39 deletions(-) create mode 100644 backend/apps/api/v1/migrations/0054_alter_organization_area.py create mode 100644 backend/apps/api/v1/migrations/0055_rename_key_dictionarykey_alter_dictionarykey_options_and_more.py create mode 100644 backend/apps/data_api/__init__.py create mode 100644 backend/apps/data_api/admin.py create mode 100644 backend/apps/data_api/apps.py create mode 100644 backend/apps/data_api/decorators.py create mode 100644 backend/apps/data_api/graphql.py create mode 100644 backend/apps/data_api/migrations/0001_initial.py create mode 100644 backend/apps/data_api/migrations/__init__.py create mode 100644 backend/apps/data_api/models.py create mode 100644 backend/apps/data_api/translation.py create mode 100644 backend/apps/data_api/urls.py create mode 100644 backend/apps/data_api/views.py diff --git a/backend/apps/api/v1/admin.py b/backend/apps/api/v1/admin.py index 339dae66..8700ca18 100644 --- a/backend/apps/api/v1/admin.py +++ b/backend/apps/api/v1/admin.py @@ -52,10 +52,10 @@ Dataset, DateTimeRange, Dictionary, + DictionaryKey, Entity, EntityCategory, InformationRequest, - Key, Language, License, MeasurementUnit, @@ -615,6 +615,7 @@ class DatasetAdmin(OrderedInlineModelAdminMixin, TabbedTranslationAdmin): "contains_raw_data_sources", "contains_information_requests", "contains_closed_data", + "contains_data_api_endpoint_tables", "page_views", "created_at", "updated_at", @@ -718,6 +719,7 @@ class TableAdmin(OrderedInlineModelAdminMixin, TabbedTranslationAdmin): "dataset", "get_publishers", "get_data_cleaners", + "is_data_api_endpoint", "created_at", "updated_at", ] @@ -961,7 +963,7 @@ def lookups(self, request, model_admin): ("column", "Column"), ("raw_data_source", "Raw Data Source"), ("information_request", "Information Request"), - ("key", "Key"), + ("dictionary_key", "Dictionary Key"), ) def queryset(self, request, queryset): @@ -973,8 +975,8 @@ def queryset(self, request, queryset): return queryset.filter(raw_data_source__isnull=False) if self.value() == "information_request": return queryset.filter(information_request__isnull=False) - if self.value() == "key": - return queryset.filter(key__isnull=False) + if self.value() == "dictionary_key": + return queryset.filter(dictionary_key__isnull=False) class UnitsInline(admin.TabularInline): @@ -1233,7 +1235,7 @@ class AnalysisAdmin(TabbedTranslationAdmin): filter_horizontal = ["datasets", "themes", "tags"] -class KeyAdmin(admin.ModelAdmin): +class DictionaryKeyAdmin(admin.ModelAdmin): readonly_fields = [ "id", ] @@ -1267,7 +1269,7 @@ class QualityCheckAdmin(TabbedTranslationAdmin): "dataset", "table", "column", - "key", + "dictionary_key", "raw_data_source", "information_request", ] @@ -1324,10 +1326,10 @@ class PipelineAdmin(admin.ModelAdmin): admin.site.register(Dataset, DatasetAdmin) admin.site.register(DateTimeRange, DateTimeRangeAdmin) admin.site.register(Dictionary) +admin.site.register(DictionaryKey, DictionaryKeyAdmin) admin.site.register(Entity, EntityAdmin) admin.site.register(EntityCategory, EntityCategoryAdmin) admin.site.register(InformationRequest, InformationRequestAdmin) -admin.site.register(Key, KeyAdmin) admin.site.register(Language, LanguageAdmin) admin.site.register(License, LicenseAdmin) admin.site.register(MeasurementUnit, MeasurementUnitAdmin) diff --git a/backend/apps/api/v1/migrations/0054_alter_organization_area.py b/backend/apps/api/v1/migrations/0054_alter_organization_area.py new file mode 100644 index 00000000..9e2a2d30 --- /dev/null +++ b/backend/apps/api/v1/migrations/0054_alter_organization_area.py @@ -0,0 +1,25 @@ +# -*- coding: utf-8 -*- +# Generated by Django 4.2.19 on 2025-02-22 01:20 + +import django.db.models.deletion +from django.db import migrations, models + + +class Migration(migrations.Migration): + dependencies = [ + ("v1", "0053_rename_required_requires"), + ] + + operations = [ + migrations.AlterField( + model_name="organization", + name="area", + field=models.ForeignKey( + blank=True, + null=True, + on_delete=django.db.models.deletion.SET_NULL, + related_name="organizations", + to="v1.area", + ), + ), + ] diff --git a/backend/apps/api/v1/migrations/0055_rename_key_dictionarykey_alter_dictionarykey_options_and_more.py b/backend/apps/api/v1/migrations/0055_rename_key_dictionarykey_alter_dictionarykey_options_and_more.py new file mode 100644 index 00000000..e0c7c7dd --- /dev/null +++ b/backend/apps/api/v1/migrations/0055_rename_key_dictionarykey_alter_dictionarykey_options_and_more.py @@ -0,0 +1,46 @@ +# -*- coding: utf-8 -*- +# Generated by Django 4.2.19 on 2025-02-22 01:43 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + dependencies = [ + ("v1", "0054_alter_organization_area"), + ] + + operations = [ + migrations.RenameModel( + old_name="Key", + new_name="DictionaryKey", + ), + migrations.AlterModelOptions( + name="dictionarykey", + options={ + "ordering": ["name"], + "verbose_name": "Dictionary Key", + "verbose_name_plural": "Dictionary Keys", + }, + ), + migrations.RenameField( + model_name="coverage", + old_name="key", + new_name="dictionary_key", + ), + migrations.RenameField( + model_name="qualitycheck", + old_name="key", + new_name="dictionary_key", + ), + migrations.AddField( + model_name="table", + name="is_data_api_endpoint", + field=models.BooleanField( + default=False, help_text="Table is served as an endpoint in the Data API app." + ), + ), + migrations.AlterModelTable( + name="dictionarykey", + table="dictionary_key", + ), + ] diff --git a/backend/apps/api/v1/models.py b/backend/apps/api/v1/models.py index 71cc7671..400cfbb9 100644 --- a/backend/apps/api/v1/models.py +++ b/backend/apps/api/v1/models.py @@ -131,8 +131,8 @@ class Coverage(BaseModel): on_delete=models.CASCADE, related_name="coverages", ) - key = models.ForeignKey( - "Key", + dictionary_key = models.ForeignKey( + "DictionaryKey", blank=True, null=True, on_delete=models.CASCADE, @@ -175,8 +175,8 @@ def __str__(self): return f"Raw data source: {self.raw_data_source} - {self.area}" if self.coverage_type() == "information_request": return f"Information request: {self.information_request} - {self.area}" - if self.coverage_type() == "key": - return f"Key: {self.key} - {self.area}" + if self.coverage_type() == "dictionary_key": + return f"Dictionary key: {self.dictionary_key} - {self.area}" if self.coverage_type() == "analysis": return f"Analysis: {self.analysis} - {self.area}" return str(self.id) @@ -196,8 +196,8 @@ def coverage_type(self): return "raw_data_source" if self.information_request: return "information_request" - if self.key: - return "key" + if self.dictionary_key: + return "dictionary_key" if self.analysis: return "analysis" return "" @@ -225,7 +225,7 @@ def get_similarity_of_datetime(self, other: "Coverage"): def clean(self) -> None: """ Assert that only one of "table", "raw_data_source", - "information_request", "column" or "key" is set + "information_request", "column" or "dictionary_key" is set """ count = 0 if self.table: @@ -238,14 +238,14 @@ def clean(self) -> None: count += 1 if self.information_request: count += 1 - if self.key: + if self.dictionary_key: count += 1 if self.analysis: count += 1 if count != 1: raise ValidationError( "One and only one of 'table', 'raw_data_source', " - "'information_request', 'column', 'key', 'analysis' must be set." + "'information_request', 'column', 'dictionary_key', 'analysis' must be set." ) @@ -271,9 +271,9 @@ class Meta: ordering = ["slug"] -class Key(BaseModel): +class DictionaryKey(BaseModel): """ - Key model + DictionaryKey model Sets a name and a value of a dictionary key """ @@ -288,11 +288,11 @@ def __str__(self): return str(self.name) class Meta: - """Meta definition for Key.""" + """Meta definition for DictionaryKey.""" - db_table = "keys" - verbose_name = "Key" - verbose_name_plural = "Keys" + db_table = "dictionary_key" + verbose_name = "Dictionary Key" + verbose_name_plural = "Dictionary Keys" ordering = ["name"] @@ -670,7 +670,8 @@ def contains_open_data(self): @property def contains_closed_data(self): - """Returns true if there are tables or columns with closed coverages, or if the uncompressed file size is above 1 GB""" + """Returns true if there are tables or columns with closed coverages, + or if the uncompressed file size is above 1 GB""" for table in ( self.tables.exclude(status__slug="under_review") .exclude(slug__in=["dicionario", "dictionary"]) @@ -773,7 +774,12 @@ def first_information_request_id(self): @property def table_last_updated_at(self): updates = [ - u.last_updated_at for u in self.tables.exclude(status__slug="under_review").exclude(slug__in=["dicionario", "dictionary"]).all() + u.last_updated_at + for u in ( + self.tables.exclude(status__slug="under_review") + .exclude(slug__in=["dicionario", "dictionary"]) + .all() + ) if u.last_updated_at ] # fmt: skip return max(updates) if updates else None @@ -781,7 +787,12 @@ def table_last_updated_at(self): @property def raw_data_source_last_polled_at(self): polls = [ - u.last_polled_at for u in self.raw_data_sources.exclude(status__slug="under_review").all() + u.last_polled_at + for u in ( + self.raw_data_sources + .exclude(status__slug="under_review") + .all() + ) if u.last_polled_at ] # fmt: skip return max(polls) if polls else None @@ -789,11 +800,20 @@ def raw_data_source_last_polled_at(self): @property def raw_data_source_last_updated_at(self): updates = [ - u.last_updated_at for u in self.raw_data_sources.exclude(status__slug="under_review").all() + u.last_updated_at + for u in ( + self.raw_data_sources + .exclude(status__slug="under_review") + .all() + ) if u.last_updated_at ] # fmt: skip return max(updates) if updates else None + @property + def contains_data_api_endpoint_tables(self): + return self.tables.filter(is_data_api_endpoint=True).exists() + class Update(BaseModel): id = models.UUIDField(primary_key=True, default=uuid4) @@ -945,7 +965,10 @@ class Table(BaseModel, OrderedModel): ) is_deprecated = models.BooleanField( default=False, - help_text="We stopped maintaining this table for some reason. Examples: raw data deprecated, new version elsewhere, etc.", + help_text=( + "We stopped maintaining this table for some reason. " + "Examples: raw data deprecated, new version elsewhere, etc." + ), ) license = models.ForeignKey( "License", @@ -969,6 +992,9 @@ class Table(BaseModel, OrderedModel): null=True, ) is_directory = models.BooleanField(default=False, blank=True, null=True) + is_data_api_endpoint = models.BooleanField( + default=False, help_text="Table is served as an endpoint in the Data API app." + ) created_at = models.DateTimeField(auto_now_add=True) updated_at = models.DateTimeField(auto_now=True) published_by = models.ManyToManyField( @@ -996,9 +1022,7 @@ class Table(BaseModel, OrderedModel): compressed_file_size = models.BigIntegerField(blank=True, null=True) number_rows = models.BigIntegerField(blank=True, null=True) number_columns = models.BigIntegerField(blank=True, null=True) - is_closed = models.BooleanField( - default=False, help_text="Table is for BD Pro subscribers only" - ) + is_closed = models.BooleanField(default=False, help_text="Table is for BD Pro subscribers only") page_views = models.BigIntegerField( default=0, help_text="Number of page views by Google Analytics", @@ -2093,8 +2117,8 @@ class QualityCheck(BaseModel): on_delete=models.CASCADE, related_name="quality_checks", ) - key = models.ForeignKey( - "Key", + dictionary_key = models.ForeignKey( + "DictionaryKey", blank=True, null=True, on_delete=models.CASCADE, @@ -2139,7 +2163,7 @@ def clean(self) -> None: count += 1 if self.column: count += 1 - if self.key: + if self.dictionary_key: count += 1 if self.raw_data_source: count += 1 @@ -2148,7 +2172,7 @@ def clean(self) -> None: if count != 1: raise ValidationError( "One and only one of 'analysis', 'dataset, 'table', " - "'column', 'key, 'raw_data_source', 'information_request' must be set." + "'column', 'dictionary_key', 'raw_data_source', 'information_request' must be set." ) return super().clean() @@ -2226,7 +2250,8 @@ def get_full_temporal_coverage(resources: list) -> dict: def get_spatial_coverage(resources: list) -> list: - """Get spatial coverage of resources by returning unique area slugs, keeping only the highest level in each branch + """Get spatial coverage of resources by returning unique area slugs, + keeping only the highest level in each branch For example: - If areas = [br_mg_3100104, br_mg_3100104] -> returns [br_mg_3100104] diff --git a/backend/apps/api/v1/schemas.py b/backend/apps/api/v1/schemas.py index eaa20a66..6c02f760 100644 --- a/backend/apps/api/v1/schemas.py +++ b/backend/apps/api/v1/schemas.py @@ -81,6 +81,8 @@ class Dataset(BaseModel): contains_open_data: bool contains_closed_data: bool # + contains_data_api_endpoint_tables: bool + # themes: List[Theme] organization: List[Organization] temporal_coverage: List[str] diff --git a/backend/apps/api/v1/search_indexes.py b/backend/apps/api/v1/search_indexes.py index 6e256909..1bb47336 100644 --- a/backend/apps/api/v1/search_indexes.py +++ b/backend/apps/api/v1/search_indexes.py @@ -255,6 +255,11 @@ class DatasetIndex(indexes.SearchIndex, indexes.Indexable): indexed=False, ) + contains_data_api_endpoint_tables = indexes.BooleanField( + model_attr="contains_data_api_endpoint_tables", + indexed=False, + ) + n_tables = indexes.IntegerField( model_attr="n_tables", indexed=False, diff --git a/backend/apps/api/v1/search_views.py b/backend/apps/api/v1/search_views.py index c7306cce..56a80e1c 100644 --- a/backend/apps/api/v1/search_views.py +++ b/backend/apps/api/v1/search_views.py @@ -30,6 +30,9 @@ def search(self): # Start with all results sqs = self.searchqueryset.all() + # Filter out datasets that contain data API endpoints + sqs = sqs.exclude(contains_data_api_endpoint_tables=True) + # Debug print to see all form data print( "DEBUG: Form data:", @@ -296,9 +299,7 @@ def as_search_result(result: SearchResult, locale="pt"): ) entities = [] - for slug, name in zip( - result.entity_slug or [], getattr(result, f"entity_name_{locale}") or [] - ): + for slug, name in zip(result.entity_slug or [], getattr(result, f"entity_name_{locale}") or []): entities.append( { "slug": slug, diff --git a/backend/apps/data_api/__init__.py b/backend/apps/data_api/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/backend/apps/data_api/admin.py b/backend/apps/data_api/admin.py new file mode 100644 index 00000000..421d7eee --- /dev/null +++ b/backend/apps/data_api/admin.py @@ -0,0 +1,235 @@ +# -*- coding: utf-8 -*- +from django.contrib import admin, messages +from django.urls import reverse +from django.utils.html import format_html + +from .models import ( + Credit, + Endpoint, + EndpointCategory, + EndpointParameter, + EndpointPricingTier, + Key, + Request, +) + + +class KeyInline(admin.TabularInline): + model = Key + extra = 0 + readonly_fields = ( + "id", + "name", + "prefix", + "is_active", + "expires_at", + "created_at", + "updated_at", + ) + fields = readonly_fields + can_delete = False + show_change_link = True + + def has_add_permission(self, request, obj=None): + return False + + def has_change_permission(self, request, obj=None): + return False + + +class EndpointParameterInline(admin.TabularInline): + model = EndpointParameter + extra = 0 + readonly_fields = ( + "id", + "name", + "description", + "type", + "is_required", + "column", + "created_at", + "updated_at", + "parameter_actions", + ) + fields = readonly_fields + can_delete = False + + def has_add_permission(self, request, obj=None): + return False + + def has_change_permission(self, request, obj=None): + return False + + def parameter_actions(self, obj): + if not obj.pk: + return "-" + edit_url = reverse( + "admin:data_api_endpointparameter_change", + args=[obj.pk], + ) + return format_html( + 'Edit', + edit_url, + ) + + parameter_actions.short_description = "Actions" + + +class EndpointPricingTierInline(admin.TabularInline): + model = EndpointPricingTier + extra = 0 + readonly_fields = ( + "id", + "min_requests", + "max_requests", + "price_per_request", + "currency", + "created_at", + "updated_at", + "pricing_actions", + ) + fields = readonly_fields + can_delete = False + + def has_add_permission(self, request, obj=None): + return False + + def has_change_permission(self, request, obj=None): + return False + + def pricing_actions(self, obj): + if not obj.pk: + return "-" + edit_url = reverse( + "admin:data_api_endpointpricingtier_change", + args=[obj.pk], + ) + return format_html( + 'Edit', + edit_url, + ) + + pricing_actions.short_description = "Actions" + + +class KeyAdmin(admin.ModelAdmin): + list_display = ( + "name", + "account", + "prefix", + "balance", + "is_active", + "expires_at", + "created_at", + ) + list_filter = ("is_active",) + search_fields = ("name", "prefix", "account__email", "account__full_name") + readonly_fields = ("id", "prefix", "hash", "balance", "created_at", "updated_at") + fieldsets = ( + ( + None, + { + "fields": ( + "name", + "account", + "prefix", + "balance", + "is_active", + "expires_at", + ) + }, + ), + ) + ordering = ["-created_at"] + + def has_add_permission(self, request): + return True + + def save_model(self, request, obj, form, change): + if not change: # Only when creating new object + obj, key = Key.create_key(**form.cleaned_data) + messages.success( + request, + f"API Key generated successfully. " + f"Please copy this key now as it won't be shown again: {key}", + ) + else: + super().save_model(request, obj, form, change) + + +class EndpointCategoryAdmin(admin.ModelAdmin): + list_display = ("slug", "name", "description", "dataset") + list_filter = ("slug", "name", "description", "dataset") + search_fields = ("slug", "name", "description", "dataset__name") + readonly_fields = ("id", "created_at", "updated_at") + + +class EndpointParameterAdmin(admin.ModelAdmin): + list_display = ("name", "description", "endpoint", "column") + list_filter = ("name", "endpoint", "is_required") + search_fields = ("name", "description", "endpoint__name", "column__name") + readonly_fields = ("id", "created_at", "updated_at") + + +class EndpointPricingTierAdmin(admin.ModelAdmin): + list_display = ("endpoint", "min_requests", "max_requests", "price_per_request") + list_filter = ("endpoint", "min_requests", "max_requests", "price_per_request") + search_fields = ("endpoint__name", "min_requests", "max_requests", "price_per_request") + readonly_fields = ("id", "created_at", "updated_at") + + +class EndpointAdmin(admin.ModelAdmin): + list_display = ( + "slug", + "name", + "description", + "category", + "table", + "is_active", + "is_deprecated", + ) + list_filter = ("slug", "name", "category", "is_active", "is_deprecated") + search_fields = ("slug", "name", "description", "category__name", "table__name") + readonly_fields = ("id", "created_at", "updated_at", "full_slug", "full_name") + inlines = [EndpointParameterInline, EndpointPricingTierInline] + + +class CreditAdmin(admin.ModelAdmin): + list_display = ("key", "amount", "currency", "created_at") + list_filter = ("key", "currency") + search_fields = ("key__name", "currency__name") + readonly_fields = ("id", "created_at", "updated_at") + + def has_add_permission(self, request): + return False + + def has_change_permission(self, request, obj=None): + return False + + def has_delete_permission(self, request, obj=None): + return False + + +class RequestAdmin(admin.ModelAdmin): + list_display = ("key", "endpoint", "created_at") + list_filter = ("key", "endpoint") + search_fields = ("key__name", "endpoint__name") + readonly_fields = ("id", "created_at", "updated_at") + + def has_add_permission(self, request): + return False + + def has_change_permission(self, request, obj=None): + return False + + def has_delete_permission(self, request, obj=None): + return False + + +admin.site.register(Key, KeyAdmin) +admin.site.register(Endpoint, EndpointAdmin) +admin.site.register(EndpointCategory, EndpointCategoryAdmin) +admin.site.register(EndpointParameter, EndpointParameterAdmin) +admin.site.register(EndpointPricingTier, EndpointPricingTierAdmin) +admin.site.register(Credit, CreditAdmin) +admin.site.register(Request, RequestAdmin) diff --git a/backend/apps/data_api/apps.py b/backend/apps/data_api/apps.py new file mode 100644 index 00000000..c2f98894 --- /dev/null +++ b/backend/apps/data_api/apps.py @@ -0,0 +1,8 @@ +# -*- coding: utf-8 -*- +from django.apps import AppConfig + + +class DataAPIConfig(AppConfig): + name = "backend.apps.data_api" + verbose_name = "Data API" + default_auto_field = "django.db.models.BigAutoField" diff --git a/backend/apps/data_api/decorators.py b/backend/apps/data_api/decorators.py new file mode 100644 index 00000000..8765320d --- /dev/null +++ b/backend/apps/data_api/decorators.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +import os +from functools import wraps + +import stripe +from django.http import JsonResponse + + +def cloud_function_only(view_func): + @wraps(view_func) + def wrapped_view(view_instance, request, *args, **kwargs): + # Get the Cloud Function's secret key from environment variables + cloud_function_key = os.getenv("DATA_API_CLOUD_FUNCTION") + + # Get the authorization header + auth_header = request.headers.get("X-Data-API-Cloud-Function") + + if not auth_header or auth_header != cloud_function_key: + return JsonResponse({"error": "Unauthorized access", "success": False}, status=403) + + return view_func(view_instance, request, *args, **kwargs) + + return wrapped_view + + +def stripe_webhook_only(view_func): + @wraps(view_func) + def wrapped_view(view_instance, request, *args, **kwargs): + # Get the Stripe webhook secret from environment variables + stripe_webhook_secret = os.getenv("DATA_API_STRIPE_WEBHOOK") + + # Get the Stripe signature header + stripe_signature = request.headers.get("X-Data-API-Stripe-Signature") + + if not stripe_signature: + return JsonResponse({"error": "Missing Stripe signature", "success": False}, status=403) + + try: + # Verify the event using the signature and webhook secret + event = stripe.Webhook.construct_event( + request.body, stripe_signature, stripe_webhook_secret + ) + except stripe.error.SignatureVerificationError: + return JsonResponse({"error": "Invalid Stripe signature", "success": False}, status=403) + except Exception: + return JsonResponse({"error": "Invalid webhook request", "success": False}, status=400) + + # Add the verified Stripe event to the request + request.stripe_event = event + + return view_func(view_instance, request, *args, **kwargs) + + return wrapped_view diff --git a/backend/apps/data_api/graphql.py b/backend/apps/data_api/graphql.py new file mode 100644 index 00000000..c9908de0 --- /dev/null +++ b/backend/apps/data_api/graphql.py @@ -0,0 +1,133 @@ +# -*- coding: utf-8 -*- +from graphene import ObjectType +from graphene_django import DjangoObjectType +from graphene_django.filter import DjangoFilterConnectionField + +from backend.apps.data_api.models import ( + Credit, + Endpoint, + EndpointCategory, + EndpointParameter, + EndpointPricingTier, + Key, + Request, +) +from backend.custom.graphql_base import CountableConnection, PlainTextNode + + +class KeyNode(DjangoObjectType): + class Meta: + model = Key + fields = "__all__" + filter_fields = { + "id": ["exact"], + "name": ["exact", "icontains"], + "is_active": ["exact"], + } + interfaces = (PlainTextNode,) + connection_class = CountableConnection + + +class EndpointNode(DjangoObjectType): + class Meta: + model = Endpoint + fields = "__all__" + filter_fields = { + "id": ["exact"], + "slug": ["exact", "icontains"], + "name": ["exact", "icontains"], + "is_active": ["exact"], + "is_deprecated": ["exact"], + } + interfaces = (PlainTextNode,) + connection_class = CountableConnection + + +class EndpointCategoryNode(DjangoObjectType): + class Meta: + model = EndpointCategory + fields = "__all__" + filter_fields = { + "id": ["exact"], + "slug": ["exact", "icontains"], + "name": ["exact", "icontains"], + } + interfaces = (PlainTextNode,) + connection_class = CountableConnection + + +class EndpointParameterNode(DjangoObjectType): + class Meta: + model = EndpointParameter + fields = "__all__" + filter_fields = { + "id": ["exact"], + "name": ["exact", "icontains"], + "required": ["exact"], + "type": ["exact"], + } + interfaces = (PlainTextNode,) + connection_class = CountableConnection + + +class EndpointPricingTierNode(DjangoObjectType): + class Meta: + model = EndpointPricingTier + fields = "__all__" + filter_fields = { + "id": ["exact"], + "name": ["exact", "icontains"], + "price": ["exact", "lt", "lte", "gt", "gte"], + "is_active": ["exact"], + } + interfaces = (PlainTextNode,) + connection_class = CountableConnection + + +class RequestNode(DjangoObjectType): + class Meta: + model = Request + fields = "__all__" + filter_fields = { + "id": ["exact"], + "status": ["exact"], + "created_at": ["exact", "lt", "lte", "gt", "gte"], + } + interfaces = (PlainTextNode,) + connection_class = CountableConnection + + +class CreditNode(DjangoObjectType): + class Meta: + model = Credit + fields = "__all__" + filter_fields = { + "id": ["exact"], + "amount": ["exact", "lt", "lte", "gt", "gte"], + "created_at": ["exact", "lt", "lte", "gt", "gte"], + } + interfaces = (PlainTextNode,) + connection_class = CountableConnection + + +class Query(ObjectType): + key = PlainTextNode.Field(KeyNode) + all_keys = DjangoFilterConnectionField(KeyNode) + + endpoint = PlainTextNode.Field(EndpointNode) + all_endpoints = DjangoFilterConnectionField(EndpointNode) + + endpoint_category = PlainTextNode.Field(EndpointCategoryNode) + all_endpoint_categories = DjangoFilterConnectionField(EndpointCategoryNode) + + endpoint_parameter = PlainTextNode.Field(EndpointParameterNode) + all_endpoint_parameters = DjangoFilterConnectionField(EndpointParameterNode) + + endpoint_pricing_tier = PlainTextNode.Field(EndpointPricingTierNode) + all_endpoint_pricing_tiers = DjangoFilterConnectionField(EndpointPricingTierNode) + + request = PlainTextNode.Field(RequestNode) + all_requests = DjangoFilterConnectionField(RequestNode) + + credit = PlainTextNode.Field(CreditNode) + all_credits = DjangoFilterConnectionField(CreditNode) diff --git a/backend/apps/data_api/migrations/0001_initial.py b/backend/apps/data_api/migrations/0001_initial.py new file mode 100644 index 00000000..ae5e53ea --- /dev/null +++ b/backend/apps/data_api/migrations/0001_initial.py @@ -0,0 +1,361 @@ +# -*- coding: utf-8 -*- +# Generated by Django 4.2.19 on 2025-02-22 01:52 + +import uuid + +import django.core.validators +import django.db.models.deletion +from django.conf import settings +from django.db import migrations, models + +import backend.apps.data_api.models + + +class Migration(migrations.Migration): + initial = True + + dependencies = [ + migrations.swappable_dependency(settings.AUTH_USER_MODEL), + ("v1", "0054_alter_organization_area"), + ] + + operations = [ + migrations.CreateModel( + name="Endpoint", + fields=[ + ("id", models.UUIDField(default=uuid.uuid4, primary_key=True, serialize=False)), + ("slug", models.SlugField(max_length=100)), + ("name", models.CharField(max_length=100)), + ("name_pt", models.CharField(max_length=100, null=True)), + ("name_en", models.CharField(max_length=100, null=True)), + ("name_es", models.CharField(max_length=100, null=True)), + ("description", models.TextField(blank=True)), + ("description_pt", models.TextField(blank=True, null=True)), + ("description_en", models.TextField(blank=True, null=True)), + ("description_es", models.TextField(blank=True, null=True)), + ("is_active", models.BooleanField(default=True)), + ("is_deprecated", models.BooleanField(default=False)), + ("created_at", models.DateTimeField(auto_now_add=True)), + ("updated_at", models.DateTimeField(auto_now=True)), + ], + options={ + "verbose_name": "Endpoint", + "verbose_name_plural": "Endpoints", + "ordering": ["created_at"], + }, + ), + migrations.CreateModel( + name="Key", + fields=[ + ("id", models.UUIDField(default=uuid.uuid4, primary_key=True, serialize=False)), + ( + "name", + models.CharField( + blank=True, + help_text="A friendly name to identify this key", + max_length=100, + null=True, + ), + ), + ( + "hash", + models.CharField( + blank=True, + help_text="The hashed key", + max_length=64, + null=True, + unique=True, + ), + ), + ( + "prefix", + models.CharField( + blank=True, + help_text="First 8 characters of the key", + max_length=8, + null=True, + unique=True, + ), + ), + ("is_active", models.BooleanField(default=True)), + ( + "balance", + models.DecimalField( + decimal_places=2, + default=0, + help_text="The balance of the key in BRL", + max_digits=12, + ), + ), + ( + "expires_at", + models.DateTimeField( + blank=True, help_text="Optional expiration date", null=True + ), + ), + ("created_at", models.DateTimeField(auto_now_add=True)), + ("updated_at", models.DateTimeField(auto_now=True)), + ( + "account", + models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, + related_name="keys", + to=settings.AUTH_USER_MODEL, + ), + ), + ], + options={ + "verbose_name": "Key", + "verbose_name_plural": "Keys", + "ordering": ["created_at"], + }, + ), + migrations.CreateModel( + name="Request", + fields=[ + ("id", models.UUIDField(default=uuid.uuid4, primary_key=True, serialize=False)), + ("parameters", models.JSONField(default=dict)), + ("error_message", models.TextField(blank=True)), + ("response_time", models.FloatField(default=0)), + ("bytes_processed", models.BigIntegerField(default=0)), + ("created_at", models.DateTimeField(auto_now_add=True)), + ("updated_at", models.DateTimeField(auto_now=True)), + ( + "endpoint", + models.ForeignKey( + on_delete=django.db.models.deletion.DO_NOTHING, + related_name="requests", + to="data_api.endpoint", + ), + ), + ( + "key", + models.ForeignKey( + on_delete=django.db.models.deletion.DO_NOTHING, + related_name="requests", + to="data_api.key", + ), + ), + ], + options={ + "verbose_name": "Request", + "verbose_name_plural": "Requests", + "ordering": ["created_at"], + }, + ), + migrations.CreateModel( + name="EndpointPricingTier", + fields=[ + ("id", models.UUIDField(default=uuid.uuid4, primary_key=True, serialize=False)), + ( + "min_requests", + models.PositiveIntegerField( + help_text="Minimum number of requests for this tier" + ), + ), + ( + "max_requests", + models.PositiveIntegerField( + blank=True, help_text="Maximum number of requests for this tier", null=True + ), + ), + ( + "price_per_request", + models.DecimalField( + decimal_places=4, + help_text="Price per request", + max_digits=10, + validators=[django.core.validators.MinValueValidator(0)], + ), + ), + ("created_at", models.DateTimeField(auto_now_add=True)), + ("updated_at", models.DateTimeField(auto_now=True)), + ( + "currency", + models.ForeignKey( + blank=True, + limit_choices_to={"category__slug": "currency"}, + null=True, + on_delete=django.db.models.deletion.SET_NULL, + related_name="endpoint_pricing_tiers", + to="v1.measurementunit", + ), + ), + ( + "endpoint", + models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, + related_name="pricing_tiers", + to="data_api.endpoint", + ), + ), + ], + options={ + "verbose_name": "Endpoint Pricing Tier", + "verbose_name_plural": "Endpoint Pricing Tiers", + "ordering": ["min_requests"], + }, + ), + migrations.CreateModel( + name="EndpointParameter", + fields=[ + ("id", models.UUIDField(default=uuid.uuid4, primary_key=True, serialize=False)), + ("name", models.CharField(max_length=100)), + ("name_pt", models.CharField(max_length=100, null=True)), + ("name_en", models.CharField(max_length=100, null=True)), + ("name_es", models.CharField(max_length=100, null=True)), + ("description", models.TextField(blank=True)), + ("description_pt", models.TextField(blank=True, null=True)), + ("description_en", models.TextField(blank=True, null=True)), + ("description_es", models.TextField(blank=True, null=True)), + ("is_required", models.BooleanField(default=False)), + ("created_at", models.DateTimeField(auto_now_add=True)), + ("updated_at", models.DateTimeField(auto_now=True)), + ( + "column", + models.ForeignKey( + blank=True, + limit_choices_to=backend.apps.data_api.models.limit_column_choices, + null=True, + on_delete=django.db.models.deletion.SET_NULL, + related_name="parameters", + to="v1.column", + ), + ), + ( + "endpoint", + models.ForeignKey( + blank=True, + null=True, + on_delete=django.db.models.deletion.SET_NULL, + related_name="parameters", + to="data_api.endpoint", + ), + ), + ( + "type", + models.ForeignKey( + blank=True, + null=True, + on_delete=django.db.models.deletion.SET_NULL, + related_name="parameters", + to="v1.bigquerytype", + ), + ), + ], + options={ + "verbose_name": "Endpoint Parameter", + "verbose_name_plural": "Endpoint Parameters", + }, + ), + migrations.CreateModel( + name="EndpointCategory", + fields=[ + ("id", models.UUIDField(default=uuid.uuid4, primary_key=True, serialize=False)), + ("slug", models.SlugField(max_length=100)), + ("name", models.CharField(max_length=100)), + ("name_pt", models.CharField(max_length=100, null=True)), + ("name_en", models.CharField(max_length=100, null=True)), + ("name_es", models.CharField(max_length=100, null=True)), + ("description", models.TextField(blank=True)), + ("description_pt", models.TextField(blank=True, null=True)), + ("description_en", models.TextField(blank=True, null=True)), + ("description_es", models.TextField(blank=True, null=True)), + ("created_at", models.DateTimeField(auto_now_add=True)), + ("updated_at", models.DateTimeField(auto_now=True)), + ( + "dataset", + models.ForeignKey( + blank=True, + null=True, + on_delete=django.db.models.deletion.SET_NULL, + related_name="endpoint_categories", + to="v1.dataset", + ), + ), + ], + options={ + "verbose_name": "Endpoint Category", + "verbose_name_plural": "Endpoint Categories", + "ordering": ["created_at"], + }, + ), + migrations.AddField( + model_name="endpoint", + name="category", + field=models.ForeignKey( + blank=True, + null=True, + on_delete=django.db.models.deletion.SET_NULL, + related_name="endpoints", + to="data_api.endpointcategory", + ), + ), + migrations.AddField( + model_name="endpoint", + name="table", + field=models.ForeignKey( + blank=True, + limit_choices_to=backend.apps.data_api.models.limit_table_choices, + null=True, + on_delete=django.db.models.deletion.SET_NULL, + related_name="endpoints", + to="v1.table", + ), + ), + migrations.CreateModel( + name="Credit", + fields=[ + ("id", models.UUIDField(default=uuid.uuid4, primary_key=True, serialize=False)), + ( + "amount", + models.DecimalField( + decimal_places=2, + max_digits=12, + validators=[ + django.core.validators.MinValueValidator( + 0.01, message="Amount must be greater than zero" + ) + ], + ), + ), + ("created_at", models.DateTimeField(auto_now_add=True)), + ("updated_at", models.DateTimeField(auto_now=True)), + ( + "currency", + models.ForeignKey( + blank=True, + limit_choices_to={"category__slug": "currency"}, + null=True, + on_delete=django.db.models.deletion.SET_NULL, + related_name="credits", + to="v1.measurementunit", + ), + ), + ( + "key", + models.ForeignKey( + on_delete=django.db.models.deletion.DO_NOTHING, + related_name="credits", + to="data_api.key", + ), + ), + ], + options={ + "verbose_name": "Credit", + "verbose_name_plural": "Credits", + "ordering": ["created_at"], + }, + ), + migrations.AddConstraint( + model_name="endpointpricingtier", + constraint=models.CheckConstraint( + check=models.Q( + ("max_requests__gt", models.F("min_requests")), + ("max_requests__isnull", True), + _connector="OR", + ), + name="max_requests_greater_than_min", + ), + ), + ] diff --git a/backend/apps/data_api/migrations/__init__.py b/backend/apps/data_api/migrations/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/backend/apps/data_api/models.py b/backend/apps/data_api/models.py new file mode 100644 index 00000000..ecd0428a --- /dev/null +++ b/backend/apps/data_api/models.py @@ -0,0 +1,296 @@ +# -*- coding: utf-8 -*- +from hashlib import sha256 +from uuid import uuid4 + +from django.core.validators import MinValueValidator +from django.db import models + +from backend.apps.account.models import Account +from backend.apps.api.v1.models import BigQueryType, Column, Dataset, MeasurementUnit, Table +from backend.custom.model import BaseModel + + +class Key(BaseModel): + id = models.UUIDField(primary_key=True, default=uuid4) + account = models.ForeignKey(Account, on_delete=models.CASCADE, related_name="keys") + name = models.CharField( + max_length=100, null=True, blank=True, help_text="A friendly name to identify this key" + ) + hash = models.CharField( + max_length=64, unique=True, null=True, blank=True, help_text="The hashed key" + ) + prefix = models.CharField( + max_length=8, + unique=True, + null=True, + blank=True, + help_text="First 8 characters of the key", + ) + is_active = models.BooleanField(default=True) + balance = models.DecimalField( + max_digits=12, + decimal_places=2, + default=0, + help_text="The balance of the key in BRL", + ) + expires_at = models.DateTimeField(null=True, blank=True, help_text="Optional expiration date") + created_at = models.DateTimeField(auto_now_add=True) + updated_at = models.DateTimeField(auto_now=True) + + class Meta: + verbose_name = "Key" + verbose_name_plural = "Keys" + ordering = ["created_at"] + + def __str__(self): + return f"{self.name} ({self.prefix}...)" + + @classmethod + def create_key(cls, **kwargs): + key = str(uuid4()) + obj = cls(**kwargs) + obj.prefix = key[:8] + obj.hash = sha256(key.encode()).hexdigest() + obj.save() + return obj, key + + +class EndpointCategory(BaseModel): + id = models.UUIDField(primary_key=True, default=uuid4) + slug = models.SlugField(max_length=100) + name = models.CharField(max_length=100) + description = models.TextField(blank=True) + dataset = models.ForeignKey( + Dataset, + on_delete=models.SET_NULL, + related_name="endpoint_categories", + null=True, + blank=True, + ) + created_at = models.DateTimeField(auto_now_add=True) + updated_at = models.DateTimeField(auto_now=True) + + class Meta: + verbose_name = "Endpoint Category" + verbose_name_plural = "Endpoint Categories" + ordering = ["created_at"] + + def __str__(self): + return self.name + + +def limit_table_choices(*args): + if models.OuterRef("dataset") is not None: + return {"dataset": models.OuterRef("dataset")} + return {"id": None} + + +class Endpoint(BaseModel): + id = models.UUIDField(primary_key=True, default=uuid4) + slug = models.SlugField(max_length=100) + name = models.CharField(max_length=100) + description = models.TextField(blank=True) + table = models.ForeignKey( + Table, + on_delete=models.SET_NULL, + related_name="endpoints", + null=True, + blank=True, + limit_choices_to=limit_table_choices, # only show tables in the endpoint category's dataset + ) + category = models.ForeignKey( + EndpointCategory, + on_delete=models.SET_NULL, + related_name="endpoints", + null=True, + blank=True, + ) + is_active = models.BooleanField(default=True) + is_deprecated = models.BooleanField(default=False) + created_at = models.DateTimeField(auto_now_add=True) + updated_at = models.DateTimeField(auto_now=True) + + class Meta: + verbose_name = "Endpoint" + verbose_name_plural = "Endpoints" + ordering = ["created_at"] + + def __str__(self): + return self.name + + @property + def full_name(self): + return f"{self.category.name}.{self.name}" + + @property + def full_slug(self): + return f"{self.category.slug}.{self.slug}" + + @property + def parameters(self): + return self.parameters.all() + + def clean(self): + super().clean() + # TODO: Add validation for pricing tiers to ensure: + # 1. No overlapping tiers + # 2. No gaps between tiers + # 3. Only one unlimited tier + # Note: Consider implementing this at the form/admin level to allow individual tier edits + + def save(self, *args, **kwargs): + self.full_clean() + super().save(*args, **kwargs) + + def get_pricing_tier(self, request_count: int): + """ + Get the pricing tier for a given number of requests. + + Args: + request_count (int): Number of requests made in the current period + + Returns: + EndpointPricingTier: The pricing tier object that matches the request count, + or None if no matching tier is found + """ + return self.pricing_tiers.filter( + models.Q(min_requests__lte=request_count) + & (models.Q(max_requests__gte=request_count) | models.Q(max_requests__isnull=True)) + ).first() + + +def limit_column_choices(*args): + if models.OuterRef("table") is not None: + return {"table": models.OuterRef("table")} + return {"id": None} + + +class EndpointParameter(BaseModel): + id = models.UUIDField(primary_key=True, default=uuid4) + name = models.CharField(max_length=100) + description = models.TextField(blank=True) + type = models.ForeignKey( + BigQueryType, + on_delete=models.SET_NULL, + related_name="parameters", + null=True, + blank=True, + ) + is_required = models.BooleanField(default=False) + endpoint = models.ForeignKey( + Endpoint, + on_delete=models.SET_NULL, + related_name="parameters", + null=True, + blank=True, + ) + column = models.ForeignKey( + Column, + on_delete=models.SET_NULL, + related_name="parameters", + null=True, + blank=True, + limit_choices_to=limit_column_choices, # only show columns in the endpoint's table + ) + created_at = models.DateTimeField(auto_now_add=True) + updated_at = models.DateTimeField(auto_now=True) + + class Meta: + verbose_name = "Endpoint Parameter" + verbose_name_plural = "Endpoint Parameters" + + def __str__(self): + return f"{self.endpoint.name} - {self.name}" + + +class Request(BaseModel): + id = models.UUIDField(primary_key=True, default=uuid4) + key = models.ForeignKey(Key, on_delete=models.DO_NOTHING, related_name="requests") + endpoint = models.ForeignKey(Endpoint, on_delete=models.DO_NOTHING, related_name="requests") + parameters = models.JSONField(default=dict) + error_message = models.TextField(blank=True) + response_time = models.FloatField(default=0) + bytes_processed = models.BigIntegerField(default=0) + created_at = models.DateTimeField(auto_now_add=True) + updated_at = models.DateTimeField(auto_now=True) + + class Meta: + verbose_name = "Request" + verbose_name_plural = "Requests" + ordering = ["created_at"] + + def __str__(self): + return f"{self.key.name} - {self.endpoint.name}" + + +class Credit(BaseModel): + id = models.UUIDField(primary_key=True, default=uuid4) + key = models.ForeignKey(Key, on_delete=models.DO_NOTHING, related_name="credits") + amount = models.DecimalField( + max_digits=12, + decimal_places=2, + validators=[MinValueValidator(0.01, message="Amount must be greater than zero")], + ) + currency = models.ForeignKey( + MeasurementUnit, + on_delete=models.SET_NULL, + related_name="credits", + limit_choices_to={"category__slug": "currency"}, + null=True, + blank=True, + ) + created_at = models.DateTimeField(auto_now_add=True) + updated_at = models.DateTimeField(auto_now=True) + + class Meta: + verbose_name = "Credit" + verbose_name_plural = "Credits" + ordering = ["created_at"] + + def __str__(self): + return f"{self.key.name} - {self.amount} {self.currency.name}" + + +class EndpointPricingTier(BaseModel): + id = models.UUIDField(primary_key=True, default=uuid4) + endpoint = models.ForeignKey(Endpoint, on_delete=models.CASCADE, related_name="pricing_tiers") + min_requests = models.PositiveIntegerField(help_text="Minimum number of requests for this tier") + max_requests = models.PositiveIntegerField( + help_text="Maximum number of requests for this tier", null=True, blank=True + ) + price_per_request = models.DecimalField( + max_digits=10, + decimal_places=4, + help_text="Price per request", + validators=[MinValueValidator(0)], + ) + currency = models.ForeignKey( + MeasurementUnit, + on_delete=models.SET_NULL, + related_name="endpoint_pricing_tiers", + limit_choices_to={"category__slug": "currency"}, + null=True, + blank=True, + ) + created_at = models.DateTimeField(auto_now_add=True) + updated_at = models.DateTimeField(auto_now=True) + + class Meta: + verbose_name = "Endpoint Pricing Tier" + verbose_name_plural = "Endpoint Pricing Tiers" + ordering = ["min_requests"] + constraints = [ + models.CheckConstraint( + check=models.Q(max_requests__gt=models.F("min_requests")) + | models.Q(max_requests__isnull=True), + name="max_requests_greater_than_min", + ) + ] + + def __str__(self): + if self.max_requests: + return ( + f"{self.endpoint.name}: {self.min_requests}-{self.max_requests} " + f"requests @ R${self.price_per_request}" + ) + return f"{self.endpoint.name}: {self.min_requests}+ requests @ R${self.price_per_request}" diff --git a/backend/apps/data_api/translation.py b/backend/apps/data_api/translation.py new file mode 100644 index 00000000..9df33b58 --- /dev/null +++ b/backend/apps/data_api/translation.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- + +from modeltranslation.translator import TranslationOptions, translator + +from .models import Endpoint, EndpointCategory, EndpointParameter + + +class EndpointTranslationOptions(TranslationOptions): + fields = ("name", "description") + + +class EndpointParameterTranslationOptions(TranslationOptions): + fields = ("name", "description") + + +class EndpointCategoryTranslationOptions(TranslationOptions): + fields = ("name", "description") + + +translator.register(Endpoint, EndpointTranslationOptions) +translator.register(EndpointParameter, EndpointParameterTranslationOptions) +translator.register(EndpointCategory, EndpointCategoryTranslationOptions) diff --git a/backend/apps/data_api/urls.py b/backend/apps/data_api/urls.py new file mode 100644 index 00000000..be279702 --- /dev/null +++ b/backend/apps/data_api/urls.py @@ -0,0 +1,44 @@ +# -*- coding: utf-8 -*- +from django.urls import path + +from .views import ( + DataAPICreditAddView, + DataAPICreditDeductView, + DataAPICurrentTierView, + DataAPIEndpointValidateView, + DataAPIKeyValidateView, + DataAPIRequestRegisterView, +) + +urlpatterns = [ + path( + "data_api/keys/validate", + DataAPIKeyValidateView.as_view(), + name="validate_api_key", + ), + path( + "data_api/credits/add", + DataAPICreditAddView.as_view(), + name="add_credit", + ), + path( + "data_api/credits/deduct", + DataAPICreditDeductView.as_view(), + name="deduct_credit", + ), + path( + "data_api/endpoints/validate", + DataAPIEndpointValidateView.as_view(), + name="validate_endpoint", + ), + path( + "data_api/requests/current_tier", + DataAPICurrentTierView.as_view(), + name="current_tier", + ), + path( + "data_api/requests/register", + DataAPIRequestRegisterView.as_view(), + name="register_request", + ), +] diff --git a/backend/apps/data_api/views.py b/backend/apps/data_api/views.py new file mode 100644 index 00000000..90c4b8bc --- /dev/null +++ b/backend/apps/data_api/views.py @@ -0,0 +1,477 @@ +# -*- coding: utf-8 -*- +import json +from decimal import Decimal +from hashlib import sha256 + +from django.http import JsonResponse +from django.utils import timezone +from django.utils.decorators import method_decorator +from django.views import View +from django.views.decorators.csrf import csrf_exempt + +from backend.apps.api.v1.models import MeasurementUnit + +from .decorators import cloud_function_only, stripe_webhook_only +from .models import Credit, Endpoint, Key, Request + + +class DataAPIKeyValidateView(View): + def get(self, request): + key = request.GET.get("key") + if not key: + return JsonResponse({"error": "API key not provided", "success": False}, status=400) + + # Hash the API key + hashed_key = sha256(key.encode()).hexdigest() + + try: + key = Key.objects.get(hash=hashed_key) + + # Check if key is expired + is_expired = False + if key.expires_at and key.expires_at < timezone.now(): + is_expired = True + + return JsonResponse( + { + "success": True, + "resource": { + "isActive": key.is_active and not is_expired, + "createdAt": key.created_at, + "expiresAt": key.expires_at, + "balance": float(key.balance), + }, + } + ) + except Key.DoesNotExist: + return JsonResponse({"error": "API key not found", "success": False}, status=404) + + +class DataAPICurrentTierView(View): + def get(self, request): + key = request.GET.get("key") + category_slug = request.GET.get("category") + endpoint_slug = request.GET.get("endpoint") + + if not all([key, category_slug, endpoint_slug]): + return JsonResponse( + {"error": "Missing required parameters", "success": False}, status=400 + ) + + # Hash the API key + hashed_key = sha256(key.encode()).hexdigest() + + try: + key = Key.objects.get(hash=hashed_key) + endpoint = Endpoint.objects.get(category__slug=category_slug, slug=endpoint_slug) + + # Get the first day of current month + today = timezone.now() + first_day = today.replace(day=1, hour=0, minute=0, second=0, microsecond=0) + + # Count requests for current month + monthly_requests = Request.objects.filter( + key=key, endpoint=endpoint, created_at__gte=first_day + ).count() + + # Get current pricing tier + current_tier = endpoint.get_pricing_tier(monthly_requests) + + if not current_tier: + return JsonResponse( + {"error": "No pricing tier found for this request volume", "success": False}, + status=404, + ) + + return JsonResponse( + { + "success": True, + "resource": { + "monthly_requests": monthly_requests, + "current_tier": { + "min_requests": current_tier.min_requests, + "max_requests": current_tier.max_requests, + "price_per_request": float(current_tier.price_per_request), + }, + }, + } + ) + + except Key.DoesNotExist: + return JsonResponse({"error": "API key not found", "success": False}, status=404) + except Endpoint.DoesNotExist: + return JsonResponse({"error": "Endpoint not found", "success": False}, status=404) + + +@method_decorator(csrf_exempt, name="dispatch") +class DataAPICreditAddView(View): + # TODO: remove GET method when in production + def get(self, request): + key = request.GET.get("key") + amount = request.GET.get("amount") + currency = request.GET.get("currency") + + if not all([key, amount, currency]): + return JsonResponse( + {"error": "Missing required parameters", "success": False}, status=400 + ) + + # Validate currency is BRL + if currency != "BRL": + return JsonResponse( + {"error": "Only BRL currency is supported", "success": False}, status=400 + ) + + try: + amount = float(amount) + except ValueError: + return JsonResponse({"error": "Invalid amount format", "success": False}, status=400) + + # Hash the API key + hashed_key = sha256(key.encode()).hexdigest() + + try: + amount = Decimal(str(amount)) + key = Key.objects.get(hash=hashed_key) + currency_obj = MeasurementUnit.objects.get(slug=currency.lower()) + + # Create credit record + Credit.objects.create(key=key, amount=amount, currency=currency_obj) + + # Update API key balance + key.balance += amount + key.save() + + return JsonResponse({"success": True, "new_balance": float(key.balance)}) + + except Key.DoesNotExist: + return JsonResponse({"error": "API key not found", "success": False}, status=404) + except MeasurementUnit.DoesNotExist: + return JsonResponse({"error": "Currency not found", "success": False}, status=404) + + @stripe_webhook_only + def post(self, request): + event = request.stripe_event + + # Only process successful payment events + if event.type != "payment_intent.succeeded": + return JsonResponse({"success": True, "message": f"Ignored event type {event.type}"}) + + try: + payment_intent = event.data.object + metadata = payment_intent.metadata + + key = metadata.get("key") + amount = float(payment_intent.amount) / 100 # Convert from cents to BRL + currency = payment_intent.currency.upper() + + if not key: + raise ValueError("API key not found in payment metadata") + + # Hash the API key + hashed_key = sha256(key.encode()).hexdigest() + + try: + amount = Decimal(str(amount)) + key = Key.objects.get(hash=hashed_key) + currency_obj = MeasurementUnit.objects.get(slug=currency.lower()) + + # Create credit record + Credit.objects.create(key=key, amount=amount, currency=currency_obj) + + # Update API key balance + key.balance += amount + key.save() + + return JsonResponse({"success": True, "new_balance": float(key.balance)}) + + except Key.DoesNotExist: + return JsonResponse({"error": "API key not found", "success": False}, status=404) + except MeasurementUnit.DoesNotExist: + return JsonResponse({"error": "Currency not found", "success": False}, status=404) + + except Exception as e: + return JsonResponse({"error": str(e), "success": False}, status=400) + + +@method_decorator(csrf_exempt, name="dispatch") +class DataAPICreditDeductView(View): + # TODO: remove GET method when in production + def get(self, request): + key = request.GET.get("key") + amount = request.GET.get("amount") + currency = request.GET.get("currency") + + if not all([key, amount, currency]): + return JsonResponse( + {"error": "Missing required parameters", "success": False}, status=400 + ) + + # Validate currency is BRL + if currency != "BRL": + return JsonResponse( + {"error": "Only BRL currency is supported", "success": False}, status=400 + ) + + try: + amount = float(amount) + except ValueError: + return JsonResponse({"error": "Invalid amount format", "success": False}, status=400) + + # Hash the API key + hashed_key = sha256(key.encode()).hexdigest() + + try: + amount = Decimal(str(amount)) + key = Key.objects.get(hash=hashed_key) + currency = MeasurementUnit.objects.get(slug="brl") + + # Check if there's enough balance + if key.balance < amount: + return JsonResponse({"error": "Insufficient balance", "success": False}, status=400) + + # Update API key balance + key.balance -= amount + key.save() + + return JsonResponse({"success": True, "new_balance": float(key.balance)}) + + except Key.DoesNotExist: + return JsonResponse({"error": "API key not found", "success": False}, status=404) + except MeasurementUnit.DoesNotExist: + return JsonResponse({"error": "Currency not found", "success": False}, status=404) + + @cloud_function_only + def post(self, request): + event = request.stripe_event + + # Only process successful payment events + if event.type != "payment_intent.succeeded": + return JsonResponse({"success": True, "message": f"Ignored event type {event.type}"}) + + try: + payment_intent = event.data.object + metadata = payment_intent.metadata + + key = metadata.get("key") + amount = float(payment_intent.amount) / 100 # Convert from cents to currency units + currency = payment_intent.currency.upper() + + if not key: + raise ValueError("API key not found in payment metadata") + + # Hash the API key + hashed_key = sha256(key.encode()).hexdigest() + + try: + amount = Decimal(str(amount)) + key = Key.objects.get(hash=hashed_key) + currency_obj = MeasurementUnit.objects.get(slug=currency.lower()) + + # Create credit record + Credit.objects.create(key=key, amount=amount, currency=currency_obj) + + # Update API key balance + key.balance += amount + key.save() + + return JsonResponse({"success": True, "new_balance": float(key.balance)}) + + except Key.DoesNotExist: + return JsonResponse({"error": "API key not found", "success": False}, status=404) + except MeasurementUnit.DoesNotExist: + return JsonResponse({"error": "Currency not found", "success": False}, status=404) + + except Exception as e: + return JsonResponse({"error": str(e), "success": False}, status=400) + + +class DataAPIEndpointValidateView(View): + def get(self, request): + category_slug = request.GET.get("category") + endpoint_slug = request.GET.get("endpoint") + + if not all([category_slug, endpoint_slug]): + return JsonResponse( + {"error": "Both category and endpoint slugs are required", "success": False}, + status=400, + ) + + try: + endpoint = Endpoint.objects.get(category__slug=category_slug, slug=endpoint_slug) + + return JsonResponse( + { + "success": True, + "resource": { + "isActive": endpoint.is_active and not endpoint.is_deprecated, + "isDeprecated": endpoint.is_deprecated, + "createdAt": endpoint.created_at, + }, + } + ) + + except Endpoint.DoesNotExist: + return JsonResponse({"error": "Endpoint not found", "success": False}, status=404) + + +@method_decorator(csrf_exempt, name="dispatch") +class DataAPIRequestRegisterView(View): + # TODO: remove GET method when in production + def get(self, request): + key = request.GET.get("key") + category_slug = request.GET.get("category") + endpoint_slug = request.GET.get("endpoint") + parameters_str = request.GET.get("parameters", "") + error_message = request.GET.get("error_message", "") + response_time = request.GET.get("response_time", "0.0") # Changed default to "0.0" + bytes_processed = request.GET.get("bytes_processed", "0") + + if not all([key, category_slug, endpoint_slug]): + return JsonResponse( + {"error": "Missing required parameters", "success": False}, status=400 + ) + + # Parse parameters from x:2,y:tfwas format to dict + parameters = {} + if parameters_str: + try: + for param in parameters_str.split(","): + if ":" in param: + k, v = param.split(":", 1) + parameters[k.strip()] = v.strip() + except Exception: + return JsonResponse( + { + "error": ( + "Invalid parameters format. " "Use format: param1:value1,param2:value2" + ), + "success": False, + }, + status=400, + ) + + # Hash the API key + hashed_key = sha256(key.encode()).hexdigest() + + try: + # Get API key and endpoint first + key = Key.objects.get(hash=hashed_key) + endpoint = Endpoint.objects.get(category__slug=category_slug, slug=endpoint_slug) + + # Get required parameters for this endpoint + required_params = endpoint.parameters.filter(is_required=True).values_list( + "name", flat=True + ) + + # Check if all required parameters are present + missing_params = [param for param in required_params if param not in parameters] + if missing_params: + return JsonResponse( + { + "error": ( + f"Missing required endpoint parameters: " f"{', '.join(missing_params)}" + ), + "success": False, + }, + status=400, + ) + + # Convert numeric values after parameter validation + try: + response_time = float(response_time) + bytes_processed = int(bytes_processed) + except (ValueError, TypeError): + return JsonResponse( + { + "error": "Invalid numeric values for response_time or bytes_processed", + "success": False, + }, + status=400, + ) + + # Create request record + Request.objects.create( + key=key, + endpoint=endpoint, + parameters=parameters, + error_message=error_message, + response_time=response_time, + bytes_processed=bytes_processed, + ) + + return JsonResponse({"success": True}) + + except Key.DoesNotExist: + return JsonResponse({"error": "API key not found", "success": False}, status=404) + except Endpoint.DoesNotExist: + return JsonResponse({"error": "Endpoint not found", "success": False}, status=404) + + @cloud_function_only + def post(self, request): + key = request.POST.get("key") + category_slug = request.POST.get("category") + endpoint_slug = request.POST.get("endpoint") + parameters = request.POST.get("parameters", "{}") + error_message = request.POST.get("error_message", "") + response_time = request.POST.get("response_time", "0") + bytes_processed = request.POST.get("bytes_processed", "0") + + if not all([key, category_slug, endpoint_slug, parameters]): + return JsonResponse( + {"error": "Missing required parameters", "success": False}, status=400 + ) + + # Hash the API key + hashed_key = sha256(key.encode()).hexdigest() + + try: + # Convert parameters + parameters = json.loads(parameters) + response_time = float(response_time) + bytes_processed = int(bytes_processed) + + # Get API key and endpoint + key = Key.objects.get(hash=hashed_key) + endpoint = Endpoint.objects.get(category__slug=category_slug, slug=endpoint_slug) + + # Get required parameters for this endpoint + required_params = endpoint.parameters.filter(is_required=True).values_list( + "name", flat=True + ) + + # Check if all required parameters are present + missing_params = [param for param in required_params if param not in parameters] + if missing_params: + return JsonResponse( + { + "error": ( + f"Missing required endpoint parameters: " f"{', '.join(missing_params)}" + ), + "success": False, + }, + status=400, + ) + + # Create request record + Request.objects.create( + key=key, + endpoint=endpoint, + parameters=parameters, + error_message=error_message, + response_time=response_time, + bytes_processed=bytes_processed, + ) + + return JsonResponse({"success": True}) + + except json.JSONDecodeError: + return JsonResponse( + {"error": "Invalid parameters JSON format", "success": False}, status=400 + ) + except (ValueError, TypeError): + return JsonResponse({"error": "Invalid numeric values", "success": False}, status=400) + except Key.DoesNotExist: + return JsonResponse({"error": "API key not found", "success": False}, status=404) + except Endpoint.DoesNotExist: + return JsonResponse({"error": "Endpoint not found", "success": False}, status=404) diff --git a/backend/apps/schema.py b/backend/apps/schema.py index 971c4c30..dc399de1 100644 --- a/backend/apps/schema.py +++ b/backend/apps/schema.py @@ -6,7 +6,7 @@ from backend.custom.graphql_auto import build_schema schema = build_schema( - applications=["account", "v1"], + applications=["account", "v1", "data_api"], extra_queries=[ APIQuery, PaymentQuery, diff --git a/backend/settings/base.py b/backend/settings/base.py index 72e25fbf..7b69df91 100644 --- a/backend/settings/base.py +++ b/backend/settings/base.py @@ -65,6 +65,7 @@ "backend.apps.account_auth", "backend.apps.account_payment.apps.PaymentConfig", "backend.apps.api.v1", + "backend.apps.data_api", "backend.apps.core", ] diff --git a/backend/urls.py b/backend/urls.py index 9c86c1df..2cf3c793 100644 --- a/backend/urls.py +++ b/backend/urls.py @@ -26,5 +26,6 @@ path("", include("backend.apps.account.urls")), path("", include("backend.apps.account_auth.urls")), path("", include("backend.apps.account_payment.urls")), + path("", include("backend.apps.data_api.urls")), ] urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT) From bdd3ba33a0d1651d93dffaad88787fe172c5da22 Mon Sep 17 00:00:00 2001 From: Ricardo Dahis Date: Sat, 22 Feb 2025 14:18:01 +1100 Subject: [PATCH 002/181] fix: search_views, list tables --- backend/apps/api/v1/admin.py | 1 - backend/apps/api/v1/search_views.py | 4 ++-- 2 files changed, 2 insertions(+), 3 deletions(-) diff --git a/backend/apps/api/v1/admin.py b/backend/apps/api/v1/admin.py index 8700ca18..3c438613 100644 --- a/backend/apps/api/v1/admin.py +++ b/backend/apps/api/v1/admin.py @@ -719,7 +719,6 @@ class TableAdmin(OrderedInlineModelAdminMixin, TabbedTranslationAdmin): "dataset", "get_publishers", "get_data_cleaners", - "is_data_api_endpoint", "created_at", "updated_at", ] diff --git a/backend/apps/api/v1/search_views.py b/backend/apps/api/v1/search_views.py index 56a80e1c..af991508 100644 --- a/backend/apps/api/v1/search_views.py +++ b/backend/apps/api/v1/search_views.py @@ -30,8 +30,8 @@ def search(self): # Start with all results sqs = self.searchqueryset.all() - # Filter out datasets that contain data API endpoints - sqs = sqs.exclude(contains_data_api_endpoint_tables=True) + # Filter out datasets that contain data API endpoint tables + sqs = sqs.exclude(contains_data_api_endpoint_tables="true") # Debug print to see all form data print( From 0fc037f0ef448f2e389d08a4781b45d7ccefe4b6 Mon Sep 17 00:00:00 2001 From: Ricardo Dahis Date: Sat, 22 Feb 2025 14:50:26 +1100 Subject: [PATCH 003/181] fix: allow any table and column in endpoints --- backend/apps/api/v1/admin.py | 2 -- backend/apps/data_api/models.py | 14 -------------- 2 files changed, 16 deletions(-) diff --git a/backend/apps/api/v1/admin.py b/backend/apps/api/v1/admin.py index 3c438613..d21c4d5c 100644 --- a/backend/apps/api/v1/admin.py +++ b/backend/apps/api/v1/admin.py @@ -717,8 +717,6 @@ class TableAdmin(OrderedInlineModelAdminMixin, TabbedTranslationAdmin): list_display = [ "name", "dataset", - "get_publishers", - "get_data_cleaners", "created_at", "updated_at", ] diff --git a/backend/apps/data_api/models.py b/backend/apps/data_api/models.py index ecd0428a..71f07a6c 100644 --- a/backend/apps/data_api/models.py +++ b/backend/apps/data_api/models.py @@ -79,12 +79,6 @@ def __str__(self): return self.name -def limit_table_choices(*args): - if models.OuterRef("dataset") is not None: - return {"dataset": models.OuterRef("dataset")} - return {"id": None} - - class Endpoint(BaseModel): id = models.UUIDField(primary_key=True, default=uuid4) slug = models.SlugField(max_length=100) @@ -96,7 +90,6 @@ class Endpoint(BaseModel): related_name="endpoints", null=True, blank=True, - limit_choices_to=limit_table_choices, # only show tables in the endpoint category's dataset ) category = models.ForeignKey( EndpointCategory, @@ -159,12 +152,6 @@ def get_pricing_tier(self, request_count: int): ).first() -def limit_column_choices(*args): - if models.OuterRef("table") is not None: - return {"table": models.OuterRef("table")} - return {"id": None} - - class EndpointParameter(BaseModel): id = models.UUIDField(primary_key=True, default=uuid4) name = models.CharField(max_length=100) @@ -190,7 +177,6 @@ class EndpointParameter(BaseModel): related_name="parameters", null=True, blank=True, - limit_choices_to=limit_column_choices, # only show columns in the endpoint's table ) created_at = models.DateTimeField(auto_now_add=True) updated_at = models.DateTimeField(auto_now=True) From ec6adf8cacd62518d64da1da3945609892fd3e49 Mon Sep 17 00:00:00 2001 From: Ricardo Dahis Date: Sat, 22 Feb 2025 15:10:42 +1100 Subject: [PATCH 004/181] chore: view return endpoint cloud table --- backend/apps/data_api/views.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/backend/apps/data_api/views.py b/backend/apps/data_api/views.py index 90c4b8bc..5252fa38 100644 --- a/backend/apps/data_api/views.py +++ b/backend/apps/data_api/views.py @@ -300,6 +300,12 @@ def get(self, request): try: endpoint = Endpoint.objects.get(category__slug=category_slug, slug=endpoint_slug) + # Get cloud table information if available + cloud_table = None + if endpoint.table and endpoint.table.cloud_tables.first(): + ct = endpoint.table.cloud_tables.first() + cloud_table = f"{ct.gcp_project_id}.{ct.gcp_dataset_id}.{ct.gcp_table_id}" + return JsonResponse( { "success": True, @@ -307,6 +313,7 @@ def get(self, request): "isActive": endpoint.is_active and not endpoint.is_deprecated, "isDeprecated": endpoint.is_deprecated, "createdAt": endpoint.created_at, + "cloudTable": cloud_table, }, } ) From 49598f25bca498ab44d4c7458e2dadeb2cf98b5b Mon Sep 17 00:00:00 2001 From: Ricardo Dahis Date: Sat, 22 Feb 2025 15:36:12 +1100 Subject: [PATCH 005/181] chore: parameter to search for columns --- backend/apps/api/v1/admin.py | 8 +++++++- backend/apps/data_api/admin.py | 1 + 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/backend/apps/api/v1/admin.py b/backend/apps/api/v1/admin.py index d21c4d5c..1a33c55b 100644 --- a/backend/apps/api/v1/admin.py +++ b/backend/apps/api/v1/admin.py @@ -829,12 +829,18 @@ class ColumnAdmin(TabbedTranslationAdmin): "spatial_coverage", "temporal_coverage", ] - search_fields = ["name", "table__name"] + search_fields = ["name", "table__name", "table__dataset__name"] inlines = [ CoverageInline, ColumnOriginalNameInline, ] + def get_search_results(self, request, queryset, search_term): + """Optimize the query by selecting related fields""" + queryset, use_distinct = super().get_search_results(request, queryset, search_term) + queryset = queryset.select_related("table", "table__dataset") + return queryset, use_distinct + class ColumnOriginalNameAdmin(TabbedTranslationAdmin): readonly_fields = [ diff --git a/backend/apps/data_api/admin.py b/backend/apps/data_api/admin.py index 421d7eee..72753917 100644 --- a/backend/apps/data_api/admin.py +++ b/backend/apps/data_api/admin.py @@ -169,6 +169,7 @@ class EndpointParameterAdmin(admin.ModelAdmin): list_filter = ("name", "endpoint", "is_required") search_fields = ("name", "description", "endpoint__name", "column__name") readonly_fields = ("id", "created_at", "updated_at") + autocomplete_fields = ["column"] class EndpointPricingTierAdmin(admin.ModelAdmin): From 3cc6056fd67cca4aedcee62b8dbe1accf68866af Mon Sep 17 00:00:00 2001 From: Ricardo Dahis Date: Sat, 22 Feb 2025 16:04:12 +1100 Subject: [PATCH 006/181] chore: enlarge column search --- backend/apps/data_api/admin.py | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/backend/apps/data_api/admin.py b/backend/apps/data_api/admin.py index 72753917..d5c78fc9 100644 --- a/backend/apps/data_api/admin.py +++ b/backend/apps/data_api/admin.py @@ -167,7 +167,15 @@ class EndpointCategoryAdmin(admin.ModelAdmin): class EndpointParameterAdmin(admin.ModelAdmin): list_display = ("name", "description", "endpoint", "column") list_filter = ("name", "endpoint", "is_required") - search_fields = ("name", "description", "endpoint__name", "column__name") + search_fields = ( + "name", + "description", + "endpoint__name", + "column__name", + "column__table__name", + "column__table__dataset__slug", + "column__table__slug", + ) readonly_fields = ("id", "created_at", "updated_at") autocomplete_fields = ["column"] From 51f6df0640d1b44f242e30cc136e1b78920d2f69 Mon Sep 17 00:00:00 2001 From: Ricardo Dahis Date: Sat, 22 Feb 2025 16:41:07 +1100 Subject: [PATCH 007/181] chore: return endpoint parameters --- backend/apps/data_api/views.py | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/backend/apps/data_api/views.py b/backend/apps/data_api/views.py index 5252fa38..f8d26d72 100644 --- a/backend/apps/data_api/views.py +++ b/backend/apps/data_api/views.py @@ -306,6 +306,17 @@ def get(self, request): ct = endpoint.table.cloud_tables.first() cloud_table = f"{ct.gcp_project_id}.{ct.gcp_dataset_id}.{ct.gcp_table_id}" + # Get parameters information + parameters = [] + for param in endpoint.parameters.all(): + parameters.append( + { + "name": param.name, + "type": param.type.name if param.type else None, + "isRequired": param.is_required, + } + ) + return JsonResponse( { "success": True, @@ -314,6 +325,7 @@ def get(self, request): "isDeprecated": endpoint.is_deprecated, "createdAt": endpoint.created_at, "cloudTable": cloud_table, + "parameters": parameters, }, } ) From 5c65bbb7240e2e3cbfa5b749b62b6ea8c495c5ca Mon Sep 17 00:00:00 2001 From: isabelmeister Date: Mon, 31 Mar 2025 11:19:56 -0300 Subject: [PATCH 008/181] fix(Email): debbug for email activation and email reset password --- backend/apps/account/views.py | 12 ++++++++++-- 1 file changed, 10 insertions(+), 2 deletions(-) diff --git a/backend/apps/account/views.py b/backend/apps/account/views.py index 21ebd018..3c1952c4 100644 --- a/backend/apps/account/views.py +++ b/backend/apps/account/views.py @@ -33,12 +33,14 @@ def post(self, request, uidb64): try: uid = force_str(urlsafe_base64_decode(uidb64)) user = user_model.objects.get(id=uid) + logger.info(f'Send Activation Email - User: {user}') except (TypeError, ValueError, OverflowError, user_model.DoesNotExist) as e: - logger.error(e) + logger.error(f'Send Activation Email - Error: {e}') user = None if user: send_activation_email(user) + logger.info('Send Activation Email - Sended email for activation') return JsonResponse({}, status=200) else: return JsonResponse({}, status=422) @@ -84,11 +86,13 @@ def dispatch(self, request, uidb64): try: uid = force_str(urlsafe_base64_decode(uidb64)) user = user_model.objects.get(id=uid) + logger.info(f"ResetPassword user: {user}") except (TypeError, ValueError, OverflowError, user_model.DoesNotExist) as e: - logger.error(e) + logger.error(f"ResetPassword - Error PasswordResetView\n Error:{e}") user = None if user: + logger.info(f"ResetPassword - Entered in IF user condition to send email to: {user}") to_email = user.email from_email = settings.EMAIL_HOST_USER subject = "Base dos Dados: Redefinição de Senha" @@ -110,8 +114,12 @@ def dispatch(self, request, uidb64): msg.attach_alternative(content, "text/html") msg.send() + logger.info(f"Successfully send reset password email to: {to_email}") return JsonResponse({}, status=200) else: + logger.error( + f"PasswordResetView - account/views.py: Error in send reset email to: {to_email}" + ) return JsonResponse({}, status=422) From ca947d0af3d290fcc60e0d3332db3903c1a8d2e6 Mon Sep 17 00:00:00 2001 From: Ricardo Dahis Date: Fri, 4 Apr 2025 14:12:41 +1100 Subject: [PATCH 009/181] feat: initial commit chatbot --- backend/apps/chatbot/admin.py | 41 ++++++++++++++++++++++++++++++++++ backend/apps/chatbot/models.py | 25 +++++++++++++++++++++ backend/apps/chatbot/urls.py | 38 +++++++++++++++++++++++++++++++ backend/apps/chatbot/views.py | 32 ++++++++++++++++++++++++++ 4 files changed, 136 insertions(+) create mode 100644 backend/apps/chatbot/admin.py create mode 100644 backend/apps/chatbot/models.py create mode 100644 backend/apps/chatbot/urls.py create mode 100644 backend/apps/chatbot/views.py diff --git a/backend/apps/chatbot/admin.py b/backend/apps/chatbot/admin.py new file mode 100644 index 00000000..1c21715f --- /dev/null +++ b/backend/apps/chatbot/admin.py @@ -0,0 +1,41 @@ +# -*- coding: utf-8 -*- +from django.contrib import admin + +from .models import ChatInteraction, Feedback + + +class ChatInteractionAdmin(admin.ModelAdmin): + list_display = [ + "question", + "created_at", + ] + search_fields = [ + "question", + "answer", + ] + readonly_fields = [ + "created_at", + ] + ordering = ["-created_at"] + + +class FeedbackAdmin(admin.ModelAdmin): + list_display = [ + "chat_interaction", + "rating", + "created_at", + "updated_at", + ] + search_fields = [ + "comment", + "chat_interaction__question", + ] + readonly_fields = [ + "created_at", + "updated_at", + ] + ordering = ["-created_at"] + + +admin.site.register(ChatInteraction, ChatInteractionAdmin) +admin.site.register(Feedback, FeedbackAdmin) diff --git a/backend/apps/chatbot/models.py b/backend/apps/chatbot/models.py new file mode 100644 index 00000000..104864c4 --- /dev/null +++ b/backend/apps/chatbot/models.py @@ -0,0 +1,25 @@ +# -*- coding: utf-8 -*- +import uuid + +from django.db import models + +from backend.apps.account.models import User + + +class ChatInteraction(models.Model): + id = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False) + user = models.ForeignKey(User, on_delete=models.CASCADE) + question = models.TextField() + answer = models.TextField() + generated_queries = models.JSONField(null=True, blank=True) + created_at = models.DateTimeField(auto_now_add=True) + model_url = models.URLField(null=True, blank=True) + + +class Feedback(models.Model): + id = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False) + chat_interaction = models.ForeignKey(ChatInteraction, on_delete=models.CASCADE) + number = models.IntegerField(null=True, blank=True) + comment = models.TextField(null=True, blank=True) + created_at = models.DateTimeField(auto_now_add=True) + updated_at = models.DateTimeField(auto_now=True) diff --git a/backend/apps/chatbot/urls.py b/backend/apps/chatbot/urls.py new file mode 100644 index 00000000..1fa130b9 --- /dev/null +++ b/backend/apps/chatbot/urls.py @@ -0,0 +1,38 @@ +# -*- coding: utf-8 -*- +from django.urls import path + +from .views import ( + ChatbotAskView, + ChatInteractionSaveView, + ClearAssistantMemoryView, + FeedbackSaveView, + FeedbackUpdateView, +) + +urlpatterns = [ + path( + "chatbot/ask", + ChatbotAskView.as_view(), + name="chatbot_ask", + ), + path( + "chatbot/interactions/save", + ChatInteractionSaveView.as_view(), + name="save_chat_interaction", + ), + path( + "chatbot/feedback/save", + FeedbackSaveView.as_view(), + name="save_feedback", + ), + path( + "chatbot/feedback/update", + FeedbackUpdateView.as_view(), + name="update_feedback", + ), + path( + "chatbot/memory/clear", + ClearAssistantMemoryView.as_view(), + name="clear_assistant_memory", + ), +] diff --git a/backend/apps/chatbot/views.py b/backend/apps/chatbot/views.py new file mode 100644 index 00000000..f103c2f6 --- /dev/null +++ b/backend/apps/chatbot/views.py @@ -0,0 +1,32 @@ +# -*- coding: utf-8 -*- +from rest_framework.views import APIView + + +class ChatbotAskView(APIView): + def post(self, request): + # Implementation for handling questions + pass + + +class ChatInteractionSaveView(APIView): + def post(self, request): + # Implementation for saving chat interactions + pass + + +class FeedbackSaveView(APIView): + def post(self, request): + # Implementation for saving feedback + pass + + +class FeedbackUpdateView(APIView): + def put(self, request): + # Implementation for updating feedback + pass + + +class ClearAssistantMemoryView(APIView): + def post(self, request): + # Implementation for clearing chat history + pass From c5f06a354c017677324ca2d10e0a4a0d0773aa94 Mon Sep 17 00:00:00 2001 From: isabelmeister Date: Tue, 8 Apr 2025 14:15:37 -0300 Subject: [PATCH 010/181] fix: all changes to fix dev environment --- Dockerfile | 1 + .../0054_alter_organization_area.py | 24 + .../0055_alter_type_fields_many_tables.py | 543 ++++++++++++++++ .../management/commands/fetch_metabase.py | 48 +- .../apps/core/management/commands/populate.py | 601 +++++++++++------- backend/settings/local.py | 8 +- 6 files changed, 972 insertions(+), 253 deletions(-) create mode 100644 backend/apps/api/v1/migrations/0054_alter_organization_area.py create mode 100644 backend/apps/api/v1/migrations/0055_alter_type_fields_many_tables.py diff --git a/Dockerfile b/Dockerfile index 0f22234e..0bcb1b89 100644 --- a/Dockerfile +++ b/Dockerfile @@ -17,6 +17,7 @@ RUN apt-get update \ && apt-get clean \ && rm -rf /var/lib/apt/lists/* \ && rm /etc/nginx/sites-enabled/default +RUN apt-get update && apt-get install -y postgresql postgresql-contrib COPY nginx.conf /etc/nginx/nginx.conf # Prevents Python from writing .pyc files to disc diff --git a/backend/apps/api/v1/migrations/0054_alter_organization_area.py b/backend/apps/api/v1/migrations/0054_alter_organization_area.py new file mode 100644 index 00000000..1fb07dc0 --- /dev/null +++ b/backend/apps/api/v1/migrations/0054_alter_organization_area.py @@ -0,0 +1,24 @@ +# Generated by Django 4.2.19 on 2025-03-04 01:33 + +import django.db.models.deletion +from django.db import migrations, models + + +class Migration(migrations.Migration): + dependencies = [ + ("v1", "0053_rename_required_requires"), + ] + + operations = [ + migrations.AlterField( + model_name="organization", + name="area", + field=models.ForeignKey( + blank=True, + null=True, + on_delete=django.db.models.deletion.SET_NULL, + related_name="organizations", + to="v1.area", + ), + ), + ] diff --git a/backend/apps/api/v1/migrations/0055_alter_type_fields_many_tables.py b/backend/apps/api/v1/migrations/0055_alter_type_fields_many_tables.py new file mode 100644 index 00000000..4bdf4bd3 --- /dev/null +++ b/backend/apps/api/v1/migrations/0055_alter_type_fields_many_tables.py @@ -0,0 +1,543 @@ +# Generated by Django 4.2.10 on 2025-03-01 03:05 + +from django.db import migrations + + +def alter_columns_of_area(apps, schema_editor): + with schema_editor.connection.cursor() as cursor: + cursor.execute( + "ALTER TABLE area " + "ALTER COLUMN id TYPE UUID USING id::uuid, " + "ALTER COLUMN slug TYPE varchar(100), " + "ALTER COLUMN name TYPE varchar(255), " + "ALTER COLUMN name_pt TYPE varchar(255), " + "ALTER COLUMN name_en TYPE varchar(255), " + "ALTER COLUMN name_es TYPE varchar(255), " + "ALTER COLUMN entity_id TYPE UUID USING entity_id::uuid, " + "ALTER COLUMN parent_id TYPE UUID USING parent_id::uuid;" + ) + + +def alter_columns_of_availability(apps, schema_editor): + with schema_editor.connection.cursor() as cursor: + cursor.execute( + "ALTER TABLE availability " + "ALTER COLUMN id TYPE UUID USING id::uuid, " + "ALTER COLUMN name TYPE varchar(100), " + "ALTER COLUMN name_pt TYPE varchar(100), " + "ALTER COLUMN name_en TYPE varchar(100), " + "ALTER COLUMN name_es TYPE varchar(100);" + ) + + +def alter_columns_of_bigquery_type(apps, schema_editor): + with schema_editor.connection.cursor() as cursor: + cursor.execute( + "ALTER TABLE bigquery_type " + "ALTER COLUMN id TYPE UUID USING id::uuid, " + "ALTER COLUMN name TYPE varchar(50);" + ) + + +def alter_columns_of_cloud_table_columns(apps, schema_editor): + with schema_editor.connection.cursor() as cursor: + cursor.execute( + "ALTER TABLE cloud_table_columns " + "ALTER COLUMN cloudtable_id TYPE UUID USING cloudtable_id::uuid, " + "ALTER COLUMN column_id TYPE UUID USING column_id::uuid;" + ) + + +def alter_columns_of_cloud_table(apps, schema_editor): + with schema_editor.connection.cursor() as cursor: + cursor.execute( + "ALTER TABLE cloud_table " + "ALTER COLUMN id TYPE UUID USING id::uuid, " + "ALTER COLUMN gcp_project_id TYPE varchar(100), " + "ALTER COLUMN gcp_dataset_id TYPE varchar(100), " + "ALTER COLUMN gcp_table_id TYPE varchar(100), " + "ALTER COLUMN table_id TYPE UUID USING table_id::uuid;" + ) + + +def alter_columns_of_column(apps, schema_editor): + with schema_editor.connection.cursor() as cursor: + cursor.execute( + """ALTER TABLE "column" """ + "ALTER COLUMN id TYPE UUID USING id::uuid, " + "ALTER COLUMN name TYPE varchar(255), " + "ALTER COLUMN name_pt TYPE varchar(255), " + "ALTER COLUMN name_en TYPE varchar(255), " + "ALTER COLUMN name_es TYPE varchar(255), " + "ALTER COLUMN name_staging TYPE varchar(255), " + "ALTER COLUMN name_staging_en TYPE varchar(255), " + "ALTER COLUMN name_staging_es TYPE varchar(255), " + "ALTER COLUMN name_staging_pt TYPE varchar(255), " + "ALTER COLUMN measurement_unit TYPE varchar(100), " + "ALTER COLUMN description TYPE text, " + "ALTER COLUMN description_pt TYPE text, " + "ALTER COLUMN description_en TYPE text, " + "ALTER COLUMN description_es TYPE text, " + "ALTER COLUMN observations TYPE text, " + "ALTER COLUMN observations_pt TYPE text, " + "ALTER COLUMN observations_en TYPE text, " + "ALTER COLUMN observations_es TYPE text, " + "ALTER COLUMN bigquery_type_id TYPE UUID USING bigquery_type_id::uuid, " + "ALTER COLUMN directory_primary_key_id TYPE UUID USING directory_primary_key_id::uuid, " + "ALTER COLUMN observation_level_id TYPE UUID USING observation_level_id::uuid, " + "ALTER COLUMN status_id TYPE UUID USING status_id::uuid, " + "ALTER COLUMN table_id TYPE UUID USING table_id::uuid;" + ) + + +def alter_columns_of_coverage(apps, schema_editor): + with schema_editor.connection.cursor() as cursor: + cursor.execute( + "ALTER TABLE coverage " + "ALTER COLUMN id TYPE UUID USING id::uuid, " + "ALTER COLUMN area_id TYPE UUID USING area_id::uuid, " + "ALTER COLUMN column_id TYPE UUID USING column_id::uuid, " + "ALTER COLUMN information_request_id TYPE UUID USING information_request_id::uuid, " + "ALTER COLUMN key_id TYPE UUID USING key_id::uuid, " + "ALTER COLUMN raw_data_source_id TYPE UUID USING raw_data_source_id::uuid, " + "ALTER COLUMN table_id TYPE UUID USING table_id::uuid, " + "ALTER COLUMN analysis_id TYPE UUID USING analysis_id::uuid, " + "ALTER COLUMN column_original_name_id TYPE UUID USING column_original_name_id::uuid;" + ) + + +def alter_columns_of_dataset_organizations(apps, schema_editor): + with schema_editor.connection.cursor() as cursor: + cursor.execute( + "ALTER TABLE dataset_organizations " + "ALTER COLUMN dataset_id TYPE UUID USING dataset_id::uuid, " + "ALTER COLUMN organization_id TYPE UUID USING organization_id::uuid;" + ) + + +def alter_columns_of_dataset_tags(apps, schema_editor): + with schema_editor.connection.cursor() as cursor: + cursor.execute( + "ALTER TABLE dataset_tags " + "ALTER COLUMN dataset_id TYPE UUID USING dataset_id::uuid, " + "ALTER COLUMN tag_id TYPE UUID USING tag_id::uuid;" + ) + + +def alter_columns_of_dataset_themes(apps, schema_editor): + with schema_editor.connection.cursor() as cursor: + cursor.execute( + "ALTER TABLE dataset_themes " + "ALTER COLUMN dataset_id TYPE UUID USING dataset_id::uuid, " + "ALTER COLUMN theme_id TYPE UUID USING theme_id::uuid;" + ) + + +def alter_columns_of_dataset(apps, schema_editor): + with schema_editor.connection.cursor() as cursor: + cursor.execute( + "ALTER TABLE dataset " + "ALTER COLUMN id TYPE UUID USING id::uuid, " + "ALTER COLUMN slug TYPE varchar(100), " + "ALTER COLUMN name TYPE varchar(255), " + "ALTER COLUMN name_pt TYPE varchar(255), " + "ALTER COLUMN name_en TYPE varchar(255), " + "ALTER COLUMN name_es TYPE varchar(255), " + "ALTER COLUMN description TYPE text, " + "ALTER COLUMN description_pt TYPE text, " + "ALTER COLUMN description_en TYPE text, " + "ALTER COLUMN description_es TYPE text, " + "ALTER COLUMN status_id TYPE UUID USING status_id::uuid, " + "ALTER COLUMN usage_guide TYPE varchar(255);" + ) + + +def alter_columns_of_datetime_range_units(apps, schema_editor): + with schema_editor.connection.cursor() as cursor: + cursor.execute( + "ALTER TABLE datetime_range_units " + "ALTER COLUMN datetimerange_id TYPE UUID USING datetimerange_id::uuid, " + "ALTER COLUMN column_id TYPE UUID USING column_id::uuid;" + ) + + +def alter_columns_of_datetime_range(apps, schema_editor): + with schema_editor.connection.cursor() as cursor: + cursor.execute( + "ALTER TABLE datetime_range " + "ALTER COLUMN id TYPE UUID USING id::uuid, " + "ALTER COLUMN coverage_id TYPE UUID USING coverage_id::uuid;" + ) + + +def alter_columns_of_entity_category(apps, schema_editor): + with schema_editor.connection.cursor() as cursor: + cursor.execute( + "ALTER TABLE entity_category " + "ALTER COLUMN id TYPE UUID USING id::uuid, " + "ALTER COLUMN slug TYPE varchar(100), " + "ALTER COLUMN name TYPE varchar(100), " + "ALTER COLUMN name_pt TYPE varchar(100), " + "ALTER COLUMN name_en TYPE varchar(100), " + "ALTER COLUMN name_es TYPE varchar(100);" + ) + + +def alter_columns_of_entity(apps, schema_editor): + with schema_editor.connection.cursor() as cursor: + cursor.execute( + "ALTER TABLE entity " + "ALTER COLUMN id TYPE UUID USING id::uuid, " + "ALTER COLUMN slug TYPE varchar(100), " + "ALTER COLUMN name TYPE varchar(255), " + "ALTER COLUMN name_pt TYPE varchar(255), " + "ALTER COLUMN name_en TYPE varchar(255), " + "ALTER COLUMN name_es TYPE varchar(255), " + "ALTER COLUMN category_id TYPE UUID USING category_id::uuid;" + ) + + +def alter_columns_of_information_request(apps, schema_editor): + with schema_editor.connection.cursor() as cursor: + cursor.execute( + "ALTER TABLE information_request " + "ALTER COLUMN id TYPE UUID USING id::uuid, " + "ALTER COLUMN origin TYPE text, " + "ALTER COLUMN number TYPE varchar(255), " + "ALTER COLUMN url TYPE text, " + "ALTER COLUMN data_url TYPE text, " + "ALTER COLUMN observations TYPE text, " + "ALTER COLUMN observations_pt TYPE text, " + "ALTER COLUMN observations_en TYPE text, " + "ALTER COLUMN observations_es TYPE text, " + "ALTER COLUMN dataset_id TYPE UUID USING dataset_id::uuid, " + "ALTER COLUMN started_by_id TYPE int4 USING started_by_id::integer , " + "ALTER COLUMN status_id TYPE UUID USING status_id::uuid;" + ) + + +def alter_columns_of_language(apps, schema_editor): + with schema_editor.connection.cursor() as cursor: + cursor.execute( + "ALTER TABLE language " + "ALTER COLUMN id TYPE UUID USING id::uuid, " + "ALTER COLUMN slug TYPE varchar(50), " + "ALTER COLUMN name TYPE varchar(100), " + "ALTER COLUMN name_pt TYPE varchar(100), " + "ALTER COLUMN name_en TYPE varchar(100), " + "ALTER COLUMN name_es TYPE varchar(100);" + ) + + +def alter_columns_of_license(apps, schema_editor): + with schema_editor.connection.cursor() as cursor: + cursor.execute( + "ALTER TABLE license " + "ALTER COLUMN id TYPE UUID USING id::uuid, " + "ALTER COLUMN slug TYPE varchar(100), " + "ALTER COLUMN name TYPE varchar(255), " + "ALTER COLUMN name_pt TYPE varchar(255), " + "ALTER COLUMN name_en TYPE varchar(255), " + "ALTER COLUMN name_es TYPE varchar(255);" + ) + + +def alter_columns_of_measurement_unit_category(apps, schema_editor): + with schema_editor.connection.cursor() as cursor: + cursor.execute( + "ALTER TABLE measurement_unit_category " + "ALTER COLUMN id TYPE UUID USING id::uuid, " + "ALTER COLUMN slug TYPE varchar(50), " + "ALTER COLUMN name TYPE varchar(50), " + "ALTER COLUMN name_pt TYPE varchar(50), " + "ALTER COLUMN name_en TYPE varchar(50), " + "ALTER COLUMN name_es TYPE varchar(50);" + ) + + +def alter_columns_of_measurement_unit(apps, schema_editor): + with schema_editor.connection.cursor() as cursor: + cursor.execute( + "ALTER TABLE measurement_unit " + "ALTER COLUMN id TYPE UUID USING id::uuid, " + "ALTER COLUMN slug TYPE varchar(50), " + "ALTER COLUMN name TYPE varchar(50), " + "ALTER COLUMN name_pt TYPE varchar(50), " + "ALTER COLUMN name_en TYPE varchar(50), " + "ALTER COLUMN name_es TYPE varchar(50), " + "ALTER COLUMN tex TYPE varchar(50), " + "ALTER COLUMN category_id TYPE UUID USING category_id::uuid;" + ) + + +def alter_columns_of_observation_level(apps, schema_editor): + with schema_editor.connection.cursor() as cursor: + cursor.execute( + "ALTER TABLE observation_level " + "ALTER COLUMN id TYPE UUID USING id::uuid, " + "ALTER COLUMN entity_id TYPE UUID USING entity_id::uuid, " + "ALTER COLUMN information_request_id TYPE UUID USING information_request_id::uuid, " + "ALTER COLUMN raw_data_source_id TYPE UUID USING raw_data_source_id::uuid, " + "ALTER COLUMN table_id TYPE UUID USING table_id::uuid;" + ) + + +def alter_columns_of_organization(apps, schema_editor): + with schema_editor.connection.cursor() as cursor: + cursor.execute( + "ALTER TABLE organization " + "ALTER COLUMN id TYPE UUID USING id::uuid, " + "ALTER COLUMN slug TYPE varchar(255), " + "ALTER COLUMN name TYPE varchar(255), " + "ALTER COLUMN name_pt TYPE varchar(255), " + "ALTER COLUMN name_en TYPE varchar(255), " + "ALTER COLUMN name_es TYPE varchar(255), " + "ALTER COLUMN description TYPE text, " + "ALTER COLUMN description_pt TYPE text, " + "ALTER COLUMN description_en TYPE text, " + "ALTER COLUMN description_es TYPE text, " + "ALTER COLUMN website TYPE varchar(255), " + "ALTER COLUMN twitter TYPE varchar(255), " + "ALTER COLUMN facebook TYPE varchar(255), " + "ALTER COLUMN linkedin TYPE varchar(255), " + "ALTER COLUMN instagram TYPE varchar(255), " + "ALTER COLUMN picture TYPE varchar(100), " + "ALTER COLUMN area_id TYPE UUID USING area_id::uuid;" + ) + + +def alter_columns_of_pipeline(apps, schema_editor): + with schema_editor.connection.cursor() as cursor: + cursor.execute( + "ALTER TABLE pipeline " + "ALTER COLUMN id TYPE UUID USING id::uuid, " + "ALTER COLUMN github_url TYPE varchar(255);" + ) + + +def alter_columns_of_poll(apps, schema_editor): + with schema_editor.connection.cursor() as cursor: + cursor.execute( + "ALTER TABLE poll " + "ALTER COLUMN id TYPE UUID USING id::uuid, " + "ALTER COLUMN entity_id TYPE UUID USING entity_id::uuid, " + "ALTER COLUMN information_request_id TYPE UUID USING information_request_id::uuid, " + "ALTER COLUMN raw_data_source_id TYPE UUID USING raw_data_source_id::uuid;" + ) + + +def alter_columns_of_quality_check(apps, schema_editor): + with schema_editor.connection.cursor() as cursor: + cursor.execute( + "ALTER TABLE quality_check " + "ALTER COLUMN id TYPE UUID USING id::uuid, " + "ALTER COLUMN name TYPE varchar(255), " + "ALTER COLUMN name_pt TYPE varchar(255), " + "ALTER COLUMN name_en TYPE varchar(255), " + "ALTER COLUMN name_es TYPE varchar(255), " + "ALTER COLUMN description TYPE text, " + "ALTER COLUMN description_pt TYPE text, " + "ALTER COLUMN description_en TYPE text, " + "ALTER COLUMN description_es TYPE text, " + "ALTER COLUMN table_id TYPE UUID USING table_id::uuid;" + ) + + +def alter_columns_of_raw_data_source_area_ip_address_required(apps, schema_editor): + with schema_editor.connection.cursor() as cursor: + cursor.execute( + "ALTER TABLE raw_data_source_area_ip_address_required " + "ALTER COLUMN rawdatasource_id TYPE UUID USING rawdatasource_id::uuid, " + "ALTER COLUMN area_id TYPE UUID USING area_id::uuid;" + ) + + +def alter_columns_of_raw_data_source_languages(apps, schema_editor): + with schema_editor.connection.cursor() as cursor: + cursor.execute( + "ALTER TABLE raw_data_source_languages " + "ALTER COLUMN rawdatasource_id TYPE UUID USING rawdatasource_id::uuid, " + "ALTER COLUMN language_id TYPE UUID USING language_id::uuid;" + ) + + +def alter_columns_of_raw_data_source(apps, schema_editor): + with schema_editor.connection.cursor() as cursor: + cursor.execute( + "ALTER TABLE raw_data_source " + "ALTER COLUMN id TYPE UUID USING id::uuid, " + "ALTER COLUMN name TYPE varchar(255), " + "ALTER COLUMN name_pt TYPE varchar(255), " + "ALTER COLUMN name_en TYPE varchar(255), " + "ALTER COLUMN name_es TYPE varchar(255), " + "ALTER COLUMN description TYPE text, " + "ALTER COLUMN description_pt TYPE text, " + "ALTER COLUMN description_en TYPE text, " + "ALTER COLUMN description_es TYPE text, " + "ALTER COLUMN availability_id TYPE UUID USING availability_id::uuid, " + "ALTER COLUMN dataset_id TYPE UUID USING dataset_id::uuid, " + "ALTER COLUMN license_id TYPE UUID USING license_id::uuid, " + "ALTER COLUMN status_id TYPE UUID USING status_id::uuid;" + ) + + +def alter_columns_of_status(apps, schema_editor): + with schema_editor.connection.cursor() as cursor: + cursor.execute( + "ALTER TABLE status " + "ALTER COLUMN id TYPE UUID USING id::uuid, " + "ALTER COLUMN slug TYPE varchar(100), " + "ALTER COLUMN name TYPE varchar(255), " + "ALTER COLUMN name_pt TYPE varchar(255), " + "ALTER COLUMN name_en TYPE varchar(255), " + "ALTER COLUMN name_es TYPE varchar(255);" + ) + + +def alter_columns_of_table_data_cleaned_by(apps, schema_editor): + with schema_editor.connection.cursor() as cursor: + cursor.execute( + "ALTER TABLE table_data_cleaned_by " + "ALTER COLUMN table_id TYPE UUID USING table_id::uuid, " + "ALTER COLUMN account_id TYPE bigint USING account_id::bigint;" + ) + + +def alter_columns_of_table_neighbor(apps, schema_editor): + with schema_editor.connection.cursor() as cursor: + cursor.execute( + "ALTER TABLE table_neighbor " + "ALTER COLUMN id TYPE int4 USING id::integer, " + "ALTER COLUMN table_a_id TYPE UUID USING table_a_id::uuid, " + "ALTER COLUMN table_b_id TYPE UUID USING table_b_id::uuid;" + ) + + +def alter_columns_of_table_published_by(apps, schema_editor): + with schema_editor.connection.cursor() as cursor: + cursor.execute( + "ALTER TABLE table_published_by " + "ALTER COLUMN table_id TYPE UUID USING table_id::uuid, " + "ALTER COLUMN account_id TYPE bigint USING account_id::bigint;" + ) + + +def alter_columns_of_table_raw_data_source(apps, schema_editor): + with schema_editor.connection.cursor() as cursor: + cursor.execute( + "ALTER TABLE table_raw_data_source " + "ALTER COLUMN table_id TYPE UUID USING table_id::uuid, " + "ALTER COLUMN rawdatasource_id TYPE UUID USING rawdatasource_id::uuid;" + ) + + +def alter_columns_of_table(apps, schema_editor): + with schema_editor.connection.cursor() as cursor: + cursor.execute( + """ALTER TABLE "table" """ + "ALTER COLUMN id TYPE UUID USING id::uuid, " + "ALTER COLUMN slug TYPE varchar(255), " + "ALTER COLUMN name TYPE varchar(255), " + "ALTER COLUMN name_pt TYPE varchar(255), " + "ALTER COLUMN name_en TYPE varchar(255), " + "ALTER COLUMN name_es TYPE varchar(255), " + "ALTER COLUMN description TYPE text, " + "ALTER COLUMN description_pt TYPE text, " + "ALTER COLUMN description_en TYPE text, " + "ALTER COLUMN description_es TYPE text, " + "ALTER COLUMN data_cleaning_description TYPE text, " + "ALTER COLUMN source_bucket_name TYPE varchar(255), " + "ALTER COLUMN uncompressed_file_size TYPE bigint USING uncompressed_file_size::bigint, " + "ALTER COLUMN compressed_file_size TYPE bigint USING compressed_file_size::bigint, " + "ALTER COLUMN number_rows TYPE bigint USING number_rows::bigint, " + "ALTER COLUMN number_columns TYPE int4 USING number_columns::int4, " + "ALTER COLUMN dataset_id TYPE UUID USING dataset_id::uuid, " + "ALTER COLUMN license_id TYPE UUID USING license_id::uuid, " + "ALTER COLUMN partner_organization_id TYPE UUID USING partner_organization_id::uuid, " + "ALTER COLUMN pipeline_id TYPE UUID USING pipeline_id::uuid, " + "ALTER COLUMN status_id TYPE UUID USING status_id::uuid;" + ) + + +def alter_columns_of_tag(apps, schema_editor): + with schema_editor.connection.cursor() as cursor: + cursor.execute( + "ALTER TABLE tag " + "ALTER COLUMN id TYPE UUID USING id::uuid, " + "ALTER COLUMN slug TYPE varchar(100), " + "ALTER COLUMN name TYPE varchar(255), " + "ALTER COLUMN name_pt TYPE varchar(255), " + "ALTER COLUMN name_en TYPE varchar(255), " + "ALTER COLUMN name_es TYPE varchar(255);" + ) + + +def alter_columns_of_theme(apps, schema_editor): + with schema_editor.connection.cursor() as cursor: + cursor.execute( + "ALTER TABLE theme " + "ALTER COLUMN id TYPE UUID USING id::uuid, " + "ALTER COLUMN slug TYPE varchar(100), " + "ALTER COLUMN name TYPE varchar(255), " + "ALTER COLUMN name_pt TYPE varchar(255), " + "ALTER COLUMN name_en TYPE varchar(255), " + "ALTER COLUMN name_es TYPE varchar(255);" + ) + + +def alter_columns_of_update(apps, schema_editor): + with schema_editor.connection.cursor() as cursor: + cursor.execute( + "ALTER TABLE update " + "ALTER COLUMN id TYPE UUID USING id::uuid, " + "ALTER COLUMN entity_id TYPE UUID USING entity_id::uuid, " + "ALTER COLUMN information_request_id TYPE UUID USING information_request_id::uuid, " + "ALTER COLUMN raw_data_source_id TYPE UUID USING raw_data_source_id::uuid, " + "ALTER COLUMN table_id TYPE UUID USING table_id::uuid;" + ) + + +class Migration(migrations.Migration): + dependencies = [ + ("v1", "0054_alter_organization_area"), + ] + + operations = [ + migrations.RunPython(alter_columns_of_area), + migrations.RunPython(alter_columns_of_availability), + migrations.RunPython(alter_columns_of_bigquery_type), + migrations.RunPython(alter_columns_of_cloud_table_columns), + migrations.RunPython(alter_columns_of_cloud_table), + migrations.RunPython(alter_columns_of_coverage), + migrations.RunPython(alter_columns_of_dataset_organizations), + migrations.RunPython(alter_columns_of_dataset_tags), + migrations.RunPython(alter_columns_of_dataset_themes), + migrations.RunPython(alter_columns_of_dataset), + migrations.RunPython(alter_columns_of_datetime_range_units), + migrations.RunPython(alter_columns_of_datetime_range), + migrations.RunPython(alter_columns_of_entity_category), + migrations.RunPython(alter_columns_of_entity), + migrations.RunPython(alter_columns_of_information_request), + migrations.RunPython(alter_columns_of_language), + migrations.RunPython(alter_columns_of_license), + migrations.RunPython(alter_columns_of_measurement_unit_category), + migrations.RunPython(alter_columns_of_measurement_unit), + migrations.RunPython(alter_columns_of_observation_level), + migrations.RunPython(alter_columns_of_organization), + migrations.RunPython(alter_columns_of_pipeline), + migrations.RunPython(alter_columns_of_poll), + migrations.RunPython(alter_columns_of_quality_check), + migrations.RunPython(alter_columns_of_raw_data_source_area_ip_address_required), + migrations.RunPython(alter_columns_of_raw_data_source_languages), + migrations.RunPython(alter_columns_of_raw_data_source), + migrations.RunPython(alter_columns_of_status), + migrations.RunPython(alter_columns_of_table_data_cleaned_by), + migrations.RunPython(alter_columns_of_table_neighbor), + migrations.RunPython(alter_columns_of_table_published_by), + migrations.RunPython(alter_columns_of_table_raw_data_source), + migrations.RunPython(alter_columns_of_tag), + migrations.RunPython(alter_columns_of_theme), + migrations.RunPython(alter_columns_of_update), + migrations.RunPython(alter_columns_of_table), + migrations.RunPython(alter_columns_of_column), + ] diff --git a/backend/apps/core/management/commands/fetch_metabase.py b/backend/apps/core/management/commands/fetch_metabase.py index 930def1c..7e379e16 100644 --- a/backend/apps/core/management/commands/fetch_metabase.py +++ b/backend/apps/core/management/commands/fetch_metabase.py @@ -62,9 +62,7 @@ def get_databases(self, token: str): def get_tables(self, token: str, database_id: int): headers = self.get_headers(token) - response = requests.get( - BASE_URL + f"/api/database/{database_id}/metadata", headers=headers - ) + response = requests.get(BASE_URL + f"/api/database/{database_id}/metadata", headers=headers) json_data = response.json() tables = [] @@ -78,7 +76,7 @@ def get_tables(self, token: str, database_id: int): return tables - def def_get_data_paginated(self, headers, database_id, query, page=0): + def get_data_paginated(self, headers, database_id, query, page=0): limit = 2000 new_query = query + f" LIMIT {limit} OFFSET {page * limit}" @@ -104,29 +102,26 @@ def get_table_data(self, token: str, database_id: int, table: Table): fields = [f'"{field}"' for field in table.fields] formated_field = ", ".join(fields) query = f'SELECT {formated_field} FROM "{table.name}"' - - raw_rows = [] + all_rows = [] page = 0 + while True: - data = self.def_get_data_paginated(headers, database_id, query, page) + data = self.get_data_paginated(headers, database_id, query, page) if len(data) == 0: break - raw_rows += data - page += 1 - - self.stdout.write(self.style.SUCCESS(f"Fetched {len(raw_rows)} rows from {str(table)}")) + for row in data: + instance = {} + for i, field in enumerate(table.fields): + instance[field] = row[i] + all_rows.append(instance) - rows = [] - for row in raw_rows: - instance = {} - for i, field in enumerate(table.fields): - instance[field] = row[i] - - rows.append(instance) + page += 1 + self.stdout.write(self.style.SUCCESS(f"Paginated {len(data)} rows from {str(table)}")) - if len(rows) > 0: - self.save_data(table.name, json.dumps(rows, ensure_ascii=False, indent=4)) + # Salva todos os dados de uma vez + if len(all_rows) > 0: + self.save_data(table.name, all_rows) else: self.stdout.write(self.style.WARNING(f"No data found for {str(table)}")) @@ -146,8 +141,19 @@ def save_data(self, table_name, data): file_path = os.path.join(directory, f"{table_name}.json") + if os.path.exists(file_path): + with open(file_path, "r", encoding="utf-8") as file: + try: + existing_data = json.load(file) + except json.JSONDecodeError: + existing_data = [] + else: + existing_data = [] + + existing_data.extend(data) + with open(file_path, "w", encoding="utf-8") as file: - file.write(data) + json.dump(existing_data, file, ensure_ascii=False, indent=4) def handle(self, *args, **kwargs): token = self.authenticate() diff --git a/backend/apps/core/management/commands/populate.py b/backend/apps/core/management/commands/populate.py index 807f96b6..7310fca4 100644 --- a/backend/apps/core/management/commands/populate.py +++ b/backend/apps/core/management/commands/populate.py @@ -4,7 +4,7 @@ from django.apps import apps from django.core.management.base import BaseCommand -from django.db import models, transaction +from django.db import connection, models, transaction from tqdm import tqdm @@ -29,32 +29,12 @@ def bulk_update(self): model = instances[0].__class__ field_name = namespace.split(".")[1] - # Bulk update in chunks of 2000 instances - for i in range(0, len(instances), 2000): - chunk = instances[i : i + 2000] + # Bulk update in chunks of 1000 instances + for i in range(0, len(instances), 1000): + chunk = instances[i : i + 1000] model.objects.bulk_update(chunk, [field_name]) -class References: - """ - Store references between legacy and new ids - """ - - tables = {} - - def add(self, table, legacy_id, new_id): - if table not in self.tables: - self.tables[table] = {} - - self.tables[table][legacy_id] = new_id - - def get(self, table, legacy_id): - if table not in self.tables: - return None - - return self.tables[table].get(legacy_id) - - class Layer: """ Store models in a layer @@ -83,40 +63,174 @@ def print(self, context): class Command(BaseCommand): help = "Populate database with initial data" - def get_all_files(self): - directory = os.path.join(os.getcwd(), "metabase_data") - files = os.listdir(directory) - self.files = files - - def load_table_data(self, table_name): - directory = os.path.join(os.getcwd(), "metabase_data") - with open(f"{directory}/{table_name}.json") as f: - data = json.load(f) - - return data + def enable_not_null_if_exists(self, table_name, column_name): + """ + Verifica se a coluna deve ter uma restrição NOT NULL e, se necessário, reabilita-a. + A restrição NOT NULL só será reativada se não houver valores nulos na coluna. + """ + with connection.cursor() as cursor: + cursor.execute("SET session_replication_role = 'origin';") + + # Verifica se a coluna deve ter a restrição NOT NULL + cursor.execute( + f""" + SELECT is_nullable + FROM information_schema.columns + WHERE table_name = '{table_name}' + AND column_name = '{column_name}'; + """ + ) + result = cursor.fetchone() + + if result and result[0] == "YES": # 'YES' significa que a coluna permite NULL + # Verifica se há valores nulos na coluna + cursor.execute( + f""" + SELECT COUNT(*) + FROM "{table_name}" + WHERE "{column_name}" IS NULL; + """ + ) + null_count = cursor.fetchone()[0] + + if null_count == 0: + # Reabilita a restrição NOT NULL, pois não há valores nulos + cursor.execute( + f"""ALTER TABLE "{table_name}" ALTER COLUMN "{column_name}" SET NOT NULL;""" + ) + self.stdout.write( + self.style.SUCCESS( + f"Restrição NOT NULL reabilitada para a coluna {column_name} na tabela {table_name}." + ) + ) + else: + self.stdout.write( + self.style.WARNING( + f"A coluna {column_name} na tabela {table_name} possui {null_count} valores nulos. " + f"A restrição NOT NULL não foi reativada." + ) + ) + else: + self.stdout.write( + self.style.WARNING( + f"A coluna {column_name} na tabela {table_name} já possui restrição NOT NULL ou não existe." + ) + ) - def get_m2m_data(self, table_name, current_table_name, field_name, id): - cache_context = f"m2m_cache_{table_name}" + def disable_not_null_if_exists(self, table_name, column_name): + """ + Desabilita a restrição NOT NULL para uma coluna, exceto se o nome da coluna for 'ID'. + """ + # Verifica se o nome da coluna é 'ID' (ignora maiúsculas/minúsculas) + if column_name.lower() == "id": + self.stdout.write( + self.style.WARNING( + f"A coluna {column_name} na tabela {table_name} é 'ID'. A restrição NOT NULL será mantida." + ) + ) + return # Não faz nada para colunas com o nome 'ID' + + with connection.cursor() as cursor: + cursor.execute("SET session_replication_role = 'replica';") + # Verifica se a coluna possui a restrição NOT NULL + cursor.execute( + f""" + SELECT is_nullable + FROM information_schema.columns + WHERE table_name = '{table_name}' + AND column_name = '{column_name}'; + """ + ) + result = cursor.fetchone() + + if ( + result and result[0] == "NO" and column_name.lower() != "id" + ): # 'NO' significa que a coluna é NOT NULL + # Desabilita a restrição NOT NULL + cursor.execute( + f"""ALTER TABLE "{table_name}" ALTER COLUMN "{column_name}" DROP NOT NULL;""" + ) + self.stdout.write( + self.style.SUCCESS( + f"Restrição NOT NULL desabilitada para a coluna {column_name} na tabela {table_name}." + ) + ) + else: + self.stdout.write( + self.style.WARNING( + f"A coluna {column_name} na tabela {table_name} não possui restrição NOT NULL ou não existe." + ) + ) - if not hasattr(self, cache_context): - data = self.load_table_data(table_name) - cache = {} + def disable_constraints(self, items): + """ + Desabilita constraints NOT NULL para uma lista de modelos ou nomes de tabelas + """ + for item in items: + if isinstance(item, str): # É um nome de tabela (sem modelo) + # Para tabelas sem modelo, precisamos obter as colunas NOT NULL do banco de dados + with connection.cursor() as cursor: + cursor.execute( + """ + SELECT column_name + FROM information_schema.columns + WHERE table_name = %s + AND is_nullable = 'NO' + AND column_name != 'id' + """, + [item], + ) + not_null_columns = [row[0] for row in cursor.fetchall()] - for item in data: - related_id = item[current_table_name] - if related_id not in cache: - cache[related_id] = [] + for column in not_null_columns: + self.disable_not_null_if_exists(item, column) + else: # É um modelo Django + table_name = item._meta.db_table + for field in item._meta.get_fields(): + if isinstance(field, models.Field) and field.null is False: + self.disable_not_null_if_exists(table_name, field.column) - cache[related_id].append(item[field_name]) + def enable_constraints(self, items): + """ + Habilita constraints NOT NULL para uma lista de modelos ou nomes de tabelas + """ + for item in items: + if isinstance(item, str): # É um nome de tabela (sem modelo) + with connection.cursor() as cursor: + cursor.execute( + """ + SELECT column_name + FROM information_schema.columns + WHERE table_name = %s + AND is_nullable = 'YES' + """, + [item], + ) + nullable_columns = [row[0] for row in cursor.fetchall()] - setattr(self, cache_context, cache) + for column in nullable_columns: + self.enable_not_null_if_exists(item, column) + else: # É um modelo Django + table_name = item._meta.db_table + for field in item._meta.get_fields(): + if isinstance(field, models.Field) and field.null is False: + self.enable_not_null_if_exists(table_name, field.column) - return getattr(self, cache_context).get(id, []) + def get_all_files(self): + directory = os.path.join(os.getcwd(), "metabase_data") + files = [ + f for f in os.listdir(directory) if f.endswith(".json") + ] # Filtra apenas arquivos JSON + self.files = files - def model_has_data(self, model_name): - if f"{model_name}.json" in self.files: - return True - return False + def load_table_data(self, table_name): + directory = os.path.join(os.getcwd(), "metabase_data") + for file_name in self.files: + if file_name.lower() == f"{table_name.lower()}.json": + with open(f"{directory}/{file_name}", encoding="utf-8") as f: + data = json.load(f) + return data + return [] def get_models_without_foreign_keys(self, models_to_populate): models_without_foreign_keys = [] @@ -154,19 +268,12 @@ def get_models_that_depends_on(self, models_to_populate, layer_models): def sort_models_by_depedencies(self, models_to_populate, other_models): sorted_models = [] - # while len(models_to_populate) > 0: - for vezes in range(len(models_to_populate)): + # while range(len(models_to_populate)) > 0: + for _ in range(len(models_to_populate)): for model in models_to_populate: has_all_dependencies = True - for model in models_to_populate: for field in model._meta.get_fields(): - has_all_dependencies = True - - print( - f"Campo: {field}\nModelos a testar: {len(models_to_populate)}\n{'#' *30}" - ) - if isinstance(field, models.ForeignKey) or isinstance( field, models.ManyToManyField ): @@ -177,203 +284,237 @@ def sort_models_by_depedencies(self, models_to_populate, other_models): and field.null is False ): has_all_dependencies = False + break if has_all_dependencies: sorted_models.append(model) models_to_populate.remove(model) - sorted_models = sorted_models + models_to_populate - print(f"SORTED MODELS: {sorted_models}\n\n") - print(f"MODELS TO POPULATE: {models_to_populate}\n\n") - return sorted_models - def clean_database(self, _models): + def clean_database(self, items): """ - Clean database + Clean database for both Django models and raw tables without models """ - for model in tqdm(_models, desc="Set foreign keys to null"): - foreign_keys = [ - field - for field in model._meta.get_fields() - if isinstance(field, models.ForeignKey) and field.null is True - ] - - if foreign_keys: - field_names = [field.name for field in foreign_keys] - model.objects.update(**{field_name: None for field_name in field_names}) - - for model in tqdm(_models, desc="Cleaning database"): - with transaction.atomic(): - model.objects.all().delete() - - def create_instance(self, model, item): - payload = {} - retry = None - table_name = model._meta.db_table - m2m_payload = {} - - for field in model._meta.get_fields(): + # First pass: Set nullable foreign keys to null + for item in tqdm(items, desc="Setting nullable FKs to null"): + if not isinstance(item, str): # It's a Django model + foreign_keys = [ + field + for field in item._meta.get_fields() + if isinstance(field, models.ForeignKey) and field.null is True + ] + + if foreign_keys: + field_names = [field.name for field in foreign_keys] + item.objects.update(**{field_name: None for field_name in field_names}) + + # Second pass: Delete all data + for item in tqdm(items, desc="Cleaning database"): try: - if isinstance(field, models.ForeignKey): - field_name = f"{field.name}_id" - current_value = item.get(field_name) - - if current_value is None: - continue - - reference = self.references.get( - field.related_model._meta.db_table, current_value - ) - - if reference: - payload[field_name] = reference - else: - # If the field is required and the reference is missing, we need to skip - if field.null is False: - return - - retry = { - "item": item, - "table_name": field.related_model._meta.db_table, - "field_name": field_name, - } - elif isinstance(field, models.ManyToManyField): - field_name = field.name - m2m_table_name = field.m2m_db_table() - - current_model_name = f"{model.__name__.lower()}_id" - field_model_name = field.related_model.__name__.lower() + "_id" - - m2m_related_data = self.get_m2m_data( - m2m_table_name, current_model_name, field_model_name, item["id"] - ) - - instances = [ - self.references.get(field.related_model._meta.db_table, current_value) - for current_value in m2m_related_data - ] - - if instances: - m2m_payload[field_name] = instances - else: - current_value = item.get(field.name) - - if current_value is None: - continue - - payload[field.name] = current_value - except: - breakpoint() - pass - - instance = model(**payload) - instance.save() - - # Set many to many relationships - if m2m_payload: - for field_name, related_data in m2m_payload.items(): - field = getattr(instance, field_name) + with transaction.atomic(): + if isinstance(item, str): # It's a raw table name + with connection.cursor() as cursor: + # Try TRUNCATE first (faster) + try: + cursor.execute(f'TRUNCATE TABLE "{item}" CASCADE;') + self.stdout.write(self.style.SUCCESS(f"Truncated table {item}")) + except Exception: + # Fallback to DELETE if TRUNCATE fails + cursor.execute(f'DELETE FROM "{item}";') + self.stdout.write( + self.style.SUCCESS(f"Cleared table {item} (using DELETE)") + ) + else: # It's a Django model + item.objects.all().delete() + self.stdout.write(self.style.SUCCESS(f"Cleared model {item.__name__}")) + except Exception as error: + self.stdout.write(self.style.ERROR(f"Error cleaning {item}: {error}")) + continue + + def create_instance(self, model, item, bulk, table_name=None): + """ + Cria uma instância no banco de dados usando cursor.execute e INSERT INTO. + :param model: O modelo Django que representa a tabela. + :param item: Um dicionário contendo os dados a serem inseridos. + :param bulk: Objeto BulkUpdate para coletar instâncias que precisam ser atualizadas. + :param table_name: Nome da tabela (usado quando não há modelo). + """ + if model: + table_name = f'"{model._meta.db_table}"' # Nome da tabela no banco de dados + elif table_name: + table_name = f'"{table_name}"' # Nome da tabela fornecido diretamente + else: + raise ValueError("Either model or table_name must be provided.") + + fields = [] # Lista de colunas + values = [] # Lista de valores + placeholders = [] # Placeholders para o INSERT (ex: %s, %s, ...) + + # Itera sobre os campos do modelo + for field_name, field_value in item.items(): + if field_value is not None: # Ignora valores nulos + # Verifica se o campo é uma ForeignKey ou ManyToMany (apenas se houver um modelo) + if model: + try: + field = model._meta.get_field(field_name) + if isinstance(field, models.ForeignKey) or isinstance( + field, models.ManyToManyField + ): + if not field_name.endswith("_id"): + field_name = f"{field_name}_id" # Adiciona o sufixo '_id' para ForeignKey ou ManyToMany + except Exception: + pass # Ignora campos que não existem no modelo + + fields.append(f'"{field_name}"') # Adiciona o nome do campo + values.append(field_value) + placeholders.append("%s") + + # Constrói a query INSERT INTO + if fields: # Verifica se há campos para inserir + fields_str = ", ".join(fields) # Colunas (ex: "field1, field2, ...") + placeholders_str = ", ".join(placeholders) # Placeholders (ex: "%s, %s, ...") + query = f"""INSERT INTO {table_name} ({fields_str}) VALUES ({placeholders_str}) RETURNING id;""" + + # Executa a query usando cursor.execute + with connection.cursor() as cursor: try: - field.set(related_data) + cursor.execute(query, values) + inserted_id = cursor.fetchone()[0] # Obtém o ID da instância inserida + self.stdout.write( + self.style.SUCCESS(f"Inserted into {table_name} with Values {values}") + ) + if model: + instance = model.objects.get( + pk=inserted_id + ) # Recupera a instância inserida + # Adiciona a instância ao BulkUpdate se houver campos para atualizar + for field_name in fields: + if field_name in item and item[field_name]: + bulk.add(instance, field_name) except Exception as e: - print(e) - print(field_name) - print(related_data) - raise e - - if retry: - retry["instance"] = instance - self.retry_instances.append(retry) - - self.references.add(table_name, item["id"], instance.id) + self.stdout.write(self.style.ERROR(f"Erro ao inserir em {table_name}: {e}")) + else: + self.stdout.write(self.style.WARNING(f"No valid fields to insert for {table_name}")) def handle(self, *args, **kwargs): app_name = "v1" app = apps.get_app_config(app_name) self.get_all_files() - models_to_populate = [] + get_models = app.get_models() - for model in app.get_models(): - table_name = model._meta.db_table + # Lista de tabelas a partir dos nomes dos arquivos JSON + tables_from_files = [file_name.replace(".json", "") for file_name in self.files] - if self.model_has_data(table_name): + # Mapeia os modelos correspondentes às tabelas + models_to_populate = [] + for model in get_models: + if model._meta.db_table in tables_from_files: models_to_populate.append(model) - else: - self.stdout.write(self.style.WARNING(f"No data for {table_name}")) - - self.stdout.write(self.style.SUCCESS(f"Will populate {len(models_to_populate)} models")) - - leaf_layer = Layer() - leaf_layer.models = self.get_models_without_foreign_keys(models_to_populate) - # Remove leaf layer models from models_to_populate - models_to_populate = list(set(models_to_populate) - set(leaf_layer.models)) - leaf_layer.print(self) + # Remove as tabelas que já têm modelos da lista de tabelas sem modelos + tables_with_models = [model._meta.db_table for model in models_to_populate] + tables_without_models = [ + table for table in tables_from_files if table not in tables_with_models + ] - # Create a layer with models that only depend on the leaf layer - leaf_dependent_layer = Layer() - leaf_dependent_layer.depth = 2 - leaf_dependent_layer.models = self.get_models_that_depends_on( - models_to_populate, leaf_layer.models + self.stdout.write( + self.style.SUCCESS( + f"Will populate {len(models_to_populate)} models and {len(tables_without_models)} tables without models." + ) ) - # Remove leaf dependent layer models from models_to_populate - models_to_populate = list(set(models_to_populate) - set(leaf_dependent_layer.models)) - leaf_dependent_layer.print(self) - - # Sort populated models by dependencies - sorted_layer = Layer() - sorted_layer.depth = 3 - sorted_layer.models = self.sort_models_by_depedencies( - models_to_populate, leaf_layer.models + leaf_dependent_layer.models - ) - - sorted_layer.print(self) - models_to_populate = list(set(models_to_populate) - set(sorted_layer.models)) - - # Populate models - all_models = leaf_layer.models + leaf_dependent_layer.models + sorted_layer.models - - # Clean database - # make a copy, dont modify the original array - reversed_models = all_models.copy()[::-1] - self.stdout.write(self.style.WARNING("Cleaning database")) - self.clean_database(reversed_models) - self.stdout.write(self.style.SUCCESS("Database cleaned")) - - self.references = References() - # After populating all models, we need to retry the instances that had a missing references - self.retry_instances = [] - self.stdout.write(self.style.SUCCESS("Populating models")) - - for model in all_models: - table_name = model._meta.db_table - data = self.load_table_data(table_name) - self.stdout.write(self.style.SUCCESS(f"Populating {table_name}")) - - for item in tqdm(data, desc=f"Populating {table_name}"): - self.create_instance(model, item) - - self.stdout.write(self.style.SUCCESS("Populating instances with missing references")) - - bulk = BulkUpdate() - - for retry in tqdm(self.retry_instances, desc="Retrying instances"): - item = retry["item"] - instance = retry["instance"] - field_name = retry["field_name"] - related_table_name = retry["table_name"] - current_value = item.get(field_name) - - reference = self.references.get(related_table_name, current_value) + # Popula os modelos correspondentes + if models_to_populate: + leaf_layer = Layer() + leaf_layer.models = self.get_models_without_foreign_keys(models_to_populate) + + models_to_populate = list(set(models_to_populate) - set(leaf_layer.models)) + leaf_layer.print(self) + + leaf_dependent_layer = Layer() + leaf_dependent_layer.depth = 2 + leaf_dependent_layer.models = self.get_models_that_depends_on( + models_to_populate, leaf_layer.models + ) + + models_to_populate = list(set(models_to_populate) - set(leaf_dependent_layer.models)) + leaf_dependent_layer.print(self) + + sorted_layer = Layer() + sorted_layer.depth = 3 + sorted_layer.models = self.sort_models_by_depedencies( + models_to_populate, leaf_layer.models + leaf_dependent_layer.models + ) + sorted_layer.print(self) + models_to_populate = list(set(models_to_populate) - set(sorted_layer.models)) + + all_models = ( + leaf_layer.models + + leaf_dependent_layer.models + + sorted_layer.models + + models_to_populate + ) + + # Limpa o banco de dados + reversed_models = all_models.copy()[::-1] + self.stdout.write(self.style.WARNING("Cleaning database")) + self.clean_database(reversed_models + tables_without_models) + self.stdout.write(self.style.SUCCESS("Database cleaned")) + + bulk = BulkUpdate() + + # Desabilita constraints para todos os modelos ANTES de inserir dados + self.disable_constraints(all_models) + + for model in all_models: + table_name = model._meta.db_table + data = self.load_table_data(table_name) + if not data: + self.stdout.write(self.style.WARNING(f"No data found for {table_name}")) + continue + + self.stdout.write(self.style.SUCCESS(f"Populating {table_name}")) + + for item in tqdm(data, desc=f"Creating instance of {table_name}"): + try: + self.create_instance(model, item, bulk) + except Exception as error: + self.stdout.write( + self.style.ERROR(f"Erro ao criar instância de {table_name}: {error}") + ) + continue - if reference: - setattr(instance, field_name, reference) - bulk.add(instance, field_name) + bulk.bulk_update() + + # Popula as tabelas sem modelos correspondentes + if tables_without_models: + # Desabilita constraints para tabelas sem modelos + self.disable_constraints(tables_without_models) + bulk = BulkUpdate() + + self.stdout.write(self.style.WARNING("Populating tables without models...")) + for table_name in tables_without_models: + data = self.load_table_data(table_name) + if not data: + self.stdout.write(self.style.WARNING(f"No data found for {table_name}")) + continue + + self.stdout.write(self.style.SUCCESS(f"Populating {table_name}")) + + for item in tqdm(data, desc=f"Creating instance of {table_name}"): + try: + self.create_instance(None, item, bulk, table_name=table_name) + except Exception as error: + self.stdout.write( + self.style.ERROR(f"Erro ao criar instância de {table_name}: {error}") + ) + continue - bulk.bulk_update() + bulk.bulk_update() + self.enable_constraints(tables_without_models) + self.enable_constraints(all_models) self.stdout.write(self.style.SUCCESS("Data populated")) diff --git a/backend/settings/local.py b/backend/settings/local.py index e2d446fc..a8a9cb81 100644 --- a/backend/settings/local.py +++ b/backend/settings/local.py @@ -29,8 +29,12 @@ # https://docs.djangoproject.com/en/4.0/ref/settings/#databases DATABASES = { "default": { - "ENGINE": "django.db.backends.sqlite3", - "NAME": BASE_DIR / "api.sqlite3", # noqa + "ENGINE": "django.db.backends.postgresql_psycopg2", + "NAME": getenv("DB_NAME"), + "USER": getenv("DB_USER"), + "PASSWORD": getenv("DB_PASSWORD"), + "HOST": getenv("DB_HOST"), + "PORT": getenv("DB_PORT"), } } From 2f2acb6569ac93780695b13163073258bef1c394 Mon Sep 17 00:00:00 2001 From: Fred Israel Date: Thu, 10 Apr 2025 10:59:48 -0300 Subject: [PATCH 011/181] WIP: Created basic chatbot backend endpoints --- backend/apps/chatbot/models.py | 20 ++++++----- backend/apps/chatbot/urls.py | 40 +++++++++------------ backend/apps/chatbot/views.py | 64 +++++++++++++++++++--------------- 3 files changed, 65 insertions(+), 59 deletions(-) diff --git a/backend/apps/chatbot/models.py b/backend/apps/chatbot/models.py index 104864c4..043e9ab1 100644 --- a/backend/apps/chatbot/models.py +++ b/backend/apps/chatbot/models.py @@ -6,20 +6,24 @@ from backend.apps.account.models import User -class ChatInteraction(models.Model): +class Thread(models.Model): id = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False) - user = models.ForeignKey(User, on_delete=models.CASCADE) + user_id = models.ForeignKey(User, on_delete=models.CASCADE) + created_at = models.DateTimeField(auto_now_add=True) + +class MessagePair(models.Model): + id = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False) + thread_id = models.ForeignKey(Thread, on_delete=models.CASCADE) question = models.TextField() answer = models.TextField() generated_queries = models.JSONField(null=True, blank=True) + generated_visual_elements = models.JSONField(null=True, blank=True) created_at = models.DateTimeField(auto_now_add=True) - model_url = models.URLField(null=True, blank=True) - + model_uri = models.TextField() class Feedback(models.Model): - id = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False) - chat_interaction = models.ForeignKey(ChatInteraction, on_delete=models.CASCADE) - number = models.IntegerField(null=True, blank=True) - comment = models.TextField(null=True, blank=True) + message_pair_id = models.OneToOneField(MessagePair, on_delete=models.CASCADE, primary_key=True) + rating = models.SmallIntegerField(choices=[(-1, "Bad"), (1, "Good")]) + comment = models.TextField(blank=True) created_at = models.DateTimeField(auto_now_add=True) updated_at = models.DateTimeField(auto_now=True) diff --git a/backend/apps/chatbot/urls.py b/backend/apps/chatbot/urls.py index 1fa130b9..56ca4778 100644 --- a/backend/apps/chatbot/urls.py +++ b/backend/apps/chatbot/urls.py @@ -2,37 +2,31 @@ from django.urls import path from .views import ( - ChatbotAskView, - ChatInteractionSaveView, - ClearAssistantMemoryView, - FeedbackSaveView, - FeedbackUpdateView, + ChatbotThreadListView, + ThreadDetailView, + MessageView, + FeedbackView, ) urlpatterns = [ path( - "chatbot/ask", - ChatbotAskView.as_view(), - name="chatbot_ask", + "chatbot/threads/", + ChatbotThreadListView.as_view(), + name="chatbot_threads", ), path( - "chatbot/interactions/save", - ChatInteractionSaveView.as_view(), - name="save_chat_interaction", + "chatbot/threads//", + ThreadDetailView.as_view(), + name="chatbot_thread", ), path( - "chatbot/feedback/save", - FeedbackSaveView.as_view(), - name="save_feedback", + "chatbot/threads//message", + MessageView.as_view(), + name="chatbot_thread", ), path( - "chatbot/feedback/update", - FeedbackUpdateView.as_view(), - name="update_feedback", - ), - path( - "chatbot/memory/clear", - ClearAssistantMemoryView.as_view(), - name="clear_assistant_memory", - ), + "chatbot/message-pairs//feedback", + FeedbackView.as_view(), + name="chatbot_message_pair_feedback", + ) ] diff --git a/backend/apps/chatbot/views.py b/backend/apps/chatbot/views.py index f103c2f6..cf8c80a1 100644 --- a/backend/apps/chatbot/views.py +++ b/backend/apps/chatbot/views.py @@ -1,32 +1,40 @@ # -*- coding: utf-8 -*- -from rest_framework.views import APIView +from django.http import JsonResponse +from django.views import View +from backend.apps.chatbot.models import Feedback, MessagePair, Thread -class ChatbotAskView(APIView): +class ChatbotThreadListView(View): + def get(self, request): + threads = Thread.objects.filter(user=request.user) + return JsonResponse(threads) + def post(self, request): - # Implementation for handling questions - pass - - -class ChatInteractionSaveView(APIView): - def post(self, request): - # Implementation for saving chat interactions - pass - - -class FeedbackSaveView(APIView): - def post(self, request): - # Implementation for saving feedback - pass - - -class FeedbackUpdateView(APIView): - def put(self, request): - # Implementation for updating feedback - pass - - -class ClearAssistantMemoryView(APIView): - def post(self, request): - # Implementation for clearing chat history - pass + thread = Thread.objects.create(user=request.user) + return JsonResponse(thread) + + +class ThreadDetailView(View): + def get(self, request, thread_id): + thread = Thread.objects.get(id=thread_id) + thread.messages.all() + return JsonResponse(thread) + +class MessageView(View): + def post(self, request, thread_id): + thread = Thread.objects.get(id=thread_id) + question = request.POST.get("message") + answer = "Resposta do chatbot" # TODO: call chatbot + # TODO: stream results + message_pair = MessagePair.objects.create(thread=thread, question=question, answer=answer) + return JsonResponse(message_pair) + + +class FeedbackView(View): + def put(self, request, message_pair_id): + feedback = Feedback.objects.update_or_create( + message_pair=message_pair_id, + rating=request.POST.get("rating"), + comment=request.POST.get("comment"), + ) + return JsonResponse(feedback) From 3102f0b7e3669eb46899b62fc08109c610d2aa89 Mon Sep 17 00:00:00 2001 From: Fred Israel Date: Thu, 10 Apr 2025 11:39:50 -0300 Subject: [PATCH 012/181] Adding migrations and fixing admin --- backend/apps/chatbot/admin.py | 19 ++++--- .../apps/chatbot/migrations/0001_initial.py | 53 +++++++++++++++++++ backend/apps/chatbot/migrations/__init__.py | 0 backend/apps/chatbot/models.py | 8 +-- backend/settings/base.py | 1 + backend/settings/local.py | 8 ++- docker-compose.yaml | 12 +++-- start-server.sh | 5 ++ 8 files changed, 89 insertions(+), 17 deletions(-) create mode 100644 backend/apps/chatbot/migrations/0001_initial.py create mode 100644 backend/apps/chatbot/migrations/__init__.py diff --git a/backend/apps/chatbot/admin.py b/backend/apps/chatbot/admin.py index 1c21715f..0daadff1 100644 --- a/backend/apps/chatbot/admin.py +++ b/backend/apps/chatbot/admin.py @@ -1,12 +1,20 @@ # -*- coding: utf-8 -*- from django.contrib import admin -from .models import ChatInteraction, Feedback +from .models import Feedback, MessagePair, Thread -class ChatInteractionAdmin(admin.ModelAdmin): +class ThreadAdmin(admin.ModelAdmin): list_display = [ + "id", + "created_at", + ] + +class MessagePairAdmin(admin.ModelAdmin): + list_display = [ + "id", "question", + "answer", "created_at", ] search_fields = [ @@ -21,14 +29,12 @@ class ChatInteractionAdmin(admin.ModelAdmin): class FeedbackAdmin(admin.ModelAdmin): list_display = [ - "chat_interaction", + "message_pair_id", "rating", "created_at", "updated_at", ] search_fields = [ - "comment", - "chat_interaction__question", ] readonly_fields = [ "created_at", @@ -37,5 +43,6 @@ class FeedbackAdmin(admin.ModelAdmin): ordering = ["-created_at"] -admin.site.register(ChatInteraction, ChatInteractionAdmin) +admin.site.register(Thread, ThreadAdmin) +admin.site.register(MessagePair, MessagePairAdmin) admin.site.register(Feedback, FeedbackAdmin) diff --git a/backend/apps/chatbot/migrations/0001_initial.py b/backend/apps/chatbot/migrations/0001_initial.py new file mode 100644 index 00000000..1b9dfda8 --- /dev/null +++ b/backend/apps/chatbot/migrations/0001_initial.py @@ -0,0 +1,53 @@ +# Generated by Django 4.2.20 on 2025-04-10 14:32 + +from django.conf import settings +from django.db import migrations, models +import django.db.models.deletion +import uuid + + +class Migration(migrations.Migration): + + initial = True + + dependencies = [ + migrations.swappable_dependency(settings.AUTH_USER_MODEL), + ] + + operations = [ + migrations.CreateModel( + name='MessagePair', + fields=[ + ('id', models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False)), + ('question', models.TextField()), + ('answer', models.TextField()), + ('generated_queries', models.JSONField(blank=True, null=True)), + ('generated_visual_elements', models.JSONField(blank=True, null=True)), + ('created_at', models.DateTimeField(auto_now_add=True)), + ('model_uri', models.TextField()), + ], + ), + migrations.CreateModel( + name='Feedback', + fields=[ + ('message_pair', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, primary_key=True, serialize=False, to='chatbot.messagepair')), + ('rating', models.SmallIntegerField(choices=[(-1, 'Bad'), (1, 'Good')])), + ('comment', models.TextField(blank=True)), + ('created_at', models.DateTimeField(auto_now_add=True)), + ('updated_at', models.DateTimeField(auto_now=True)), + ], + ), + migrations.CreateModel( + name='Thread', + fields=[ + ('id', models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False)), + ('created_at', models.DateTimeField(auto_now_add=True)), + ('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)), + ], + ), + migrations.AddField( + model_name='messagepair', + name='thread', + field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='chatbot.thread'), + ), + ] diff --git a/backend/apps/chatbot/migrations/__init__.py b/backend/apps/chatbot/migrations/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/backend/apps/chatbot/models.py b/backend/apps/chatbot/models.py index 043e9ab1..f324fbdd 100644 --- a/backend/apps/chatbot/models.py +++ b/backend/apps/chatbot/models.py @@ -3,17 +3,17 @@ from django.db import models -from backend.apps.account.models import User +from backend.apps.account.models import Account class Thread(models.Model): id = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False) - user_id = models.ForeignKey(User, on_delete=models.CASCADE) + user = models.ForeignKey(Account, on_delete=models.CASCADE) created_at = models.DateTimeField(auto_now_add=True) class MessagePair(models.Model): id = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False) - thread_id = models.ForeignKey(Thread, on_delete=models.CASCADE) + thread = models.ForeignKey(Thread, on_delete=models.CASCADE) question = models.TextField() answer = models.TextField() generated_queries = models.JSONField(null=True, blank=True) @@ -22,7 +22,7 @@ class MessagePair(models.Model): model_uri = models.TextField() class Feedback(models.Model): - message_pair_id = models.OneToOneField(MessagePair, on_delete=models.CASCADE, primary_key=True) + message_pair = models.OneToOneField(MessagePair, on_delete=models.CASCADE, primary_key=True) rating = models.SmallIntegerField(choices=[(-1, "Bad"), (1, "Good")]) comment = models.TextField(blank=True) created_at = models.DateTimeField(auto_now_add=True) diff --git a/backend/settings/base.py b/backend/settings/base.py index 72e25fbf..046f8f7e 100644 --- a/backend/settings/base.py +++ b/backend/settings/base.py @@ -61,6 +61,7 @@ "django_extensions", "huey.contrib.djhuey", # + "backend.apps.chatbot", "backend.apps.account", "backend.apps.account_auth", "backend.apps.account_payment.apps.PaymentConfig", diff --git a/backend/settings/local.py b/backend/settings/local.py index e2d446fc..a8a9cb81 100644 --- a/backend/settings/local.py +++ b/backend/settings/local.py @@ -29,8 +29,12 @@ # https://docs.djangoproject.com/en/4.0/ref/settings/#databases DATABASES = { "default": { - "ENGINE": "django.db.backends.sqlite3", - "NAME": BASE_DIR / "api.sqlite3", # noqa + "ENGINE": "django.db.backends.postgresql_psycopg2", + "NAME": getenv("DB_NAME"), + "USER": getenv("DB_USER"), + "PASSWORD": getenv("DB_PASSWORD"), + "HOST": getenv("DB_HOST"), + "PORT": getenv("DB_PORT"), } } diff --git a/docker-compose.yaml b/docker-compose.yaml index 9495c9c4..e95a66e9 100644 --- a/docker-compose.yaml +++ b/docker-compose.yaml @@ -1,8 +1,6 @@ ---- -version: '3' services: index: - image: docker.elastic.co/elasticsearch/elasticsearch:7.17.3 + image: elasticsearch:8.17.2 # TODO : upgrade prod to elasticsearch:8.17.2 container_name: index environment: - cluster.name=docker-cluster @@ -51,7 +49,11 @@ services: dockerfile: Dockerfile container_name: api env_file: [.env.docker] - ports: [8080:80] + volumes: + - .:/app + ports: + - "8000:8000" # Porta da api + - "5678:5678" # Porta de debug depends_on: index: condition: service_healthy @@ -68,4 +70,4 @@ services: restart: unless-stopped volumes: esdata: - pgdata: + pgdata: \ No newline at end of file diff --git a/start-server.sh b/start-server.sh index 917307f4..ab59c9b2 100755 --- a/start-server.sh +++ b/start-server.sh @@ -1,9 +1,14 @@ #!/usr/bin/env bash # start-server.sh +echo "> Making migrations" (cd /app; python manage.py makemigrations) +echo "> Migrating" (cd /app; python manage.py migrate) +echo "> Creating superuser" if [ -n "$DJANGO_SUPERUSER_USERNAME" ] && [ -n "$DJANGO_SUPERUSER_PASSWORD" ] ; then (cd /app; python manage.py createsuperuser --no-input) fi +echo "> Running Huey" (cd /app; python manage.py run_huey &) +echo "> Running Gunicorn" (cd /app; gunicorn backend.wsgi --user www-data --bind 0.0.0.0:8000 --workers 3 --timeout 180) & nginx -g "daemon off;" From 5fd81b31c3989e952aabd79e14a13cf4434d466c Mon Sep 17 00:00:00 2001 From: Fred Israel Date: Thu, 10 Apr 2025 12:27:12 -0300 Subject: [PATCH 013/181] Fixing stuff and adding todos --- .../apps/chatbot/migrations/0001_initial.py | 4 +- backend/apps/chatbot/models.py | 2 +- backend/apps/chatbot/urls.py | 24 ++--------- backend/apps/chatbot/views.py | 41 ++++++++++++------- backend/urls.py | 1 + 5 files changed, 34 insertions(+), 38 deletions(-) diff --git a/backend/apps/chatbot/migrations/0001_initial.py b/backend/apps/chatbot/migrations/0001_initial.py index 1b9dfda8..3289060f 100644 --- a/backend/apps/chatbot/migrations/0001_initial.py +++ b/backend/apps/chatbot/migrations/0001_initial.py @@ -1,4 +1,4 @@ -# Generated by Django 4.2.20 on 2025-04-10 14:32 +# Generated by Django 4.2.20 on 2025-04-10 15:00 from django.conf import settings from django.db import migrations, models @@ -42,7 +42,7 @@ class Migration(migrations.Migration): fields=[ ('id', models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False)), ('created_at', models.DateTimeField(auto_now_add=True)), - ('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)), + ('account', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)), ], ), migrations.AddField( diff --git a/backend/apps/chatbot/models.py b/backend/apps/chatbot/models.py index f324fbdd..ebb515e6 100644 --- a/backend/apps/chatbot/models.py +++ b/backend/apps/chatbot/models.py @@ -8,7 +8,7 @@ class Thread(models.Model): id = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False) - user = models.ForeignKey(Account, on_delete=models.CASCADE) + account = models.ForeignKey(Account, on_delete=models.CASCADE) created_at = models.DateTimeField(auto_now_add=True) class MessagePair(models.Model): diff --git a/backend/apps/chatbot/urls.py b/backend/apps/chatbot/urls.py index 56ca4778..67e96c05 100644 --- a/backend/apps/chatbot/urls.py +++ b/backend/apps/chatbot/urls.py @@ -9,24 +9,8 @@ ) urlpatterns = [ - path( - "chatbot/threads/", - ChatbotThreadListView.as_view(), - name="chatbot_threads", - ), - path( - "chatbot/threads//", - ThreadDetailView.as_view(), - name="chatbot_thread", - ), - path( - "chatbot/threads//message", - MessageView.as_view(), - name="chatbot_thread", - ), - path( - "chatbot/message-pairs//feedback", - FeedbackView.as_view(), - name="chatbot_message_pair_feedback", - ) + path( "chatbot/threads/", ChatbotThreadListView.as_view(),), + path( "chatbot/threads//", ThreadDetailView.as_view(),), + path( "chatbot/threads//message", MessageView.as_view(),), + path( "chatbot/message-pairs//feedback", FeedbackView.as_view(),) ] diff --git a/backend/apps/chatbot/views.py b/backend/apps/chatbot/views.py index cf8c80a1..cb6c04d4 100644 --- a/backend/apps/chatbot/views.py +++ b/backend/apps/chatbot/views.py @@ -3,38 +3,49 @@ from django.views import View from backend.apps.chatbot.models import Feedback, MessagePair, Thread +# TODO: add authentication (using this login_required decorator + checking user id) +# TODO: add error handling (404 wrong thread if, etc...) +# TODO: To test this, create a test user in a migration + +from django.contrib.auth.decorators import login_required +from django.utils.decorators import method_decorator class ChatbotThreadListView(View): - def get(self, request): - threads = Thread.objects.filter(user=request.user) - return JsonResponse(threads) + def get(self, request, *args, **kwargs): + threads = Thread.objects.filter(account=request.user.id) + return JsonResponse([thread.to_dict() for thread in threads], safe=False) - def post(self, request): - thread = Thread.objects.create(user=request.user) - return JsonResponse(thread) - + def post(self, request, *args, **kwargs): + thread = Thread.objects.create(account=request.user.id) + return JsonResponse(thread.to_dict()) class ThreadDetailView(View): - def get(self, request, thread_id): + def get(self, request, thread_id, *args, **kwargs): thread = Thread.objects.get(id=thread_id) - thread.messages.all() - return JsonResponse(thread) + if thread.account != request.user.id: + return JsonResponse({"error": "You are not authorized to access this thread"}, status=403) + messages = thread.messages.all() + return JsonResponse(messages) class MessageView(View): - def post(self, request, thread_id): + def post(self, request, thread_id, *args, **kwargs): thread = Thread.objects.get(id=thread_id) + if thread.account_id != request.user.id: + return JsonResponse({"error": "You are not authorized to access this thread"}, status=403) question = request.POST.get("message") answer = "Resposta do chatbot" # TODO: call chatbot - # TODO: stream results + # TODO (nice to have): stream results message_pair = MessagePair.objects.create(thread=thread, question=question, answer=answer) return JsonResponse(message_pair) - class FeedbackView(View): - def put(self, request, message_pair_id): + def put(self, request, message_pair_id, *args, **kwargs): + message_pair = MessagePair.objects.get(id=message_pair_id) + if message_pair.thread.account_id != request.user.id: + return JsonResponse({"error": "You are not authorized to access this thread"}, status=403) feedback = Feedback.objects.update_or_create( message_pair=message_pair_id, rating=request.POST.get("rating"), comment=request.POST.get("comment"), ) - return JsonResponse(feedback) + return JsonResponse(feedback) \ No newline at end of file diff --git a/backend/urls.py b/backend/urls.py index 9c86c1df..6758bbb4 100644 --- a/backend/urls.py +++ b/backend/urls.py @@ -26,5 +26,6 @@ path("", include("backend.apps.account.urls")), path("", include("backend.apps.account_auth.urls")), path("", include("backend.apps.account_payment.urls")), + path("", include("backend.apps.chatbot.urls")), ] urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT) From 1e9a1cd51a613c8933c1845f421fe4123d8e7c71 Mon Sep 17 00:00:00 2001 From: Fred Israel Date: Fri, 11 Apr 2025 02:12:23 -0300 Subject: [PATCH 014/181] Bad set to 0 --- backend/apps/chatbot/models.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/backend/apps/chatbot/models.py b/backend/apps/chatbot/models.py index ebb515e6..820bd1d8 100644 --- a/backend/apps/chatbot/models.py +++ b/backend/apps/chatbot/models.py @@ -23,7 +23,7 @@ class MessagePair(models.Model): class Feedback(models.Model): message_pair = models.OneToOneField(MessagePair, on_delete=models.CASCADE, primary_key=True) - rating = models.SmallIntegerField(choices=[(-1, "Bad"), (1, "Good")]) + rating = models.SmallIntegerField(choices=[(0, "Bad"), (1, "Good")]) comment = models.TextField(blank=True) created_at = models.DateTimeField(auto_now_add=True) updated_at = models.DateTimeField(auto_now=True) From 16ada8747dbc6a44c64329d4562500ba3ff37461 Mon Sep 17 00:00:00 2001 From: vrtornisiello Date: Mon, 14 Apr 2025 17:01:05 -0300 Subject: [PATCH 015/181] added feedback id for tracking on langsmith --- backend/apps/chatbot/models.py | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/backend/apps/chatbot/models.py b/backend/apps/chatbot/models.py index 820bd1d8..7aaa5866 100644 --- a/backend/apps/chatbot/models.py +++ b/backend/apps/chatbot/models.py @@ -14,15 +14,16 @@ class Thread(models.Model): class MessagePair(models.Model): id = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False) thread = models.ForeignKey(Thread, on_delete=models.CASCADE) - question = models.TextField() - answer = models.TextField() + model_uri = models.TextField() + user_message = models.TextField() + assistant_message = models.TextField() generated_queries = models.JSONField(null=True, blank=True) - generated_visual_elements = models.JSONField(null=True, blank=True) + generated_chart = models.JSONField(null=True, blank=True) created_at = models.DateTimeField(auto_now_add=True) - model_uri = models.TextField() class Feedback(models.Model): - message_pair = models.OneToOneField(MessagePair, on_delete=models.CASCADE, primary_key=True) + id = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False) + message_pair = models.OneToOneField(MessagePair, on_delete=models.CASCADE, primary_key=False) rating = models.SmallIntegerField(choices=[(0, "Bad"), (1, "Good")]) comment = models.TextField(blank=True) created_at = models.DateTimeField(auto_now_add=True) From 0905a2b040c4efe31cbcaf032ea511426caeb460 Mon Sep 17 00:00:00 2001 From: vrtornisiello Date: Mon, 14 Apr 2025 17:02:10 -0300 Subject: [PATCH 016/181] renamed `ChatbotThreadListView` to `ThreadListView` and created `CheckpointView` for deleting checkpoints --- backend/apps/chatbot/urls.py | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/backend/apps/chatbot/urls.py b/backend/apps/chatbot/urls.py index 67e96c05..244c8d73 100644 --- a/backend/apps/chatbot/urls.py +++ b/backend/apps/chatbot/urls.py @@ -2,15 +2,17 @@ from django.urls import path from .views import ( - ChatbotThreadListView, + ThreadListView, ThreadDetailView, MessageView, FeedbackView, + CheckpointView ) urlpatterns = [ - path( "chatbot/threads/", ChatbotThreadListView.as_view(),), - path( "chatbot/threads//", ThreadDetailView.as_view(),), - path( "chatbot/threads//message", MessageView.as_view(),), - path( "chatbot/message-pairs//feedback", FeedbackView.as_view(),) + path("chatbot/threads/", ThreadListView.as_view()), + path("chatbot/threads//", ThreadDetailView.as_view()), + path("chatbot/threads//message", MessageView.as_view()), + path("chatbot/message-pairs//feedback", FeedbackView.as_view()), + path("chatbot/checkpoints//", CheckpointView.as_view()) ] From 5968092d112bb024026b1064aa740044b765f244 Mon Sep 17 00:00:00 2001 From: vrtornisiello Date: Mon, 14 Apr 2025 17:03:12 -0300 Subject: [PATCH 017/181] refactored views --- backend/apps/chatbot/views.py | 116 ++++++++++++++++++++++++++-------- 1 file changed, 89 insertions(+), 27 deletions(-) diff --git a/backend/apps/chatbot/views.py b/backend/apps/chatbot/views.py index cb6c04d4..008fc60b 100644 --- a/backend/apps/chatbot/views.py +++ b/backend/apps/chatbot/views.py @@ -1,7 +1,8 @@ # -*- coding: utf-8 -*- -from django.http import JsonResponse +import json + +from django.http import HttpRequest, HttpResponse, JsonResponse from django.views import View -from backend.apps.chatbot.models import Feedback, MessagePair, Thread # TODO: add authentication (using this login_required decorator + checking user id) # TODO: add error handling (404 wrong thread if, etc...) @@ -9,43 +10,104 @@ from django.contrib.auth.decorators import login_required from django.utils.decorators import method_decorator +from django.views.decorators.csrf import csrf_exempt + +from backend.apps.chatbot.models import Feedback, MessagePair, Thread +from chatbot.assistants import SQLAssistant, SQLAssistantMessage, UserMessage +from chatbot.databases import BigQueryDatabase + + +database = BigQueryDatabase() + +assistant = SQLAssistant(database=database) + -class ChatbotThreadListView(View): - def get(self, request, *args, **kwargs): - threads = Thread.objects.filter(account=request.user.id) - return JsonResponse([thread.to_dict() for thread in threads], safe=False) - - def post(self, request, *args, **kwargs): - thread = Thread.objects.create(account=request.user.id) +class ThreadListView(View): + def get(self, request: HttpRequest, *args, **kwargs): + threads = Thread.objects.filter(account=request.user) + return JsonResponse({"threads": [thread.to_dict() for thread in threads]}) + + def post(self, request: HttpRequest, *args, **kwargs): + thread = Thread.objects.create(account=request.user) return JsonResponse(thread.to_dict()) class ThreadDetailView(View): - def get(self, request, thread_id, *args, **kwargs): - thread = Thread.objects.get(id=thread_id) - if thread.account != request.user.id: - return JsonResponse({"error": "You are not authorized to access this thread"}, status=403) - messages = thread.messages.all() - return JsonResponse(messages) + def get(self, request: HttpRequest, thread_id: str, *args, **kwargs): + try: + thread = Thread.objects.get(id=thread_id) + except Thread.DoesNotExist: + return HttpResponse(404) + + if thread.account.uuid != request.user.id: + return JsonResponse( + data={"error": "You are not authorized to access this thread"}, + status=403 + ) + + messages = MessagePair.objects.filter(thread=thread) + + return JsonResponse({"messages": [message.to_dict() for message in messages]}) class MessageView(View): - def post(self, request, thread_id, *args, **kwargs): + def post(self, request: HttpRequest, thread_id: str, *args, **kwargs): thread = Thread.objects.get(id=thread_id) - if thread.account_id != request.user.id: - return JsonResponse({"error": "You are not authorized to access this thread"}, status=403) - question = request.POST.get("message") - answer = "Resposta do chatbot" # TODO: call chatbot + + if thread.account.uuid != request.user.id: + return JsonResponse( + data={"error": "You are not authorized to access this thread"}, + status=403 + ) + + user_message = json.loads(request.body.decode("utf-8")) + user_message = UserMessage(**user_message) + + assistant_response: SQLAssistantMessage = assistant.invoke( + message=user_message, + thread_id=thread_id + ) + # TODO (nice to have): stream results - message_pair = MessagePair.objects.create(thread=thread, question=question, answer=answer) + message_pair = MessagePair.objects.create( + id=assistant_response.id, + thread=thread_id, + model_uri=assistant_response.model_uri, + user_message=user_message.content, + assistant_message=assistant_response.content, + generated_queries=assistant_response.sql_queries, + ) + return JsonResponse(message_pair) class FeedbackView(View): - def put(self, request, message_pair_id, *args, **kwargs): + def put(self, request: HttpRequest, message_pair_id: str, *args, **kwargs): message_pair = MessagePair.objects.get(id=message_pair_id) - if message_pair.thread.account_id != request.user.id: - return JsonResponse({"error": "You are not authorized to access this thread"}, status=403) + + if message_pair.thread.account.uuid != request.user.id: + return JsonResponse( + data={"error": "You are not authorized to access this thread"}, + status=403 + ) + + feedback: dict = json.loads(request.body.decode("utf-8")) + feedback = Feedback.objects.update_or_create( message_pair=message_pair_id, - rating=request.POST.get("rating"), - comment=request.POST.get("comment"), + rating=feedback["rating"], + comment=feedback["comment"], ) - return JsonResponse(feedback) \ No newline at end of file + + return JsonResponse(feedback) + +class CheckpointView(View): + def delete(self, request: HttpRequest, thread_id: str, *args, **kwargs): + thread = Thread.objects.get(id=thread_id) + + if thread.account.uuid != request.user.id: + return JsonResponse( + data={"error": "You are not authorized to access this thread"}, + status=403 + ) + + assistant.clear_thread(thread_id) + + return HttpResponse(200) From c058b3535867dec5c471be9f5f8b3882114be49e Mon Sep 17 00:00:00 2001 From: vrtornisiello Date: Mon, 14 Apr 2025 17:03:48 -0300 Subject: [PATCH 018/181] added chatbot env variables --- .env.docker | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/.env.docker b/.env.docker index 7f733362..57c98f36 100644 --- a/.env.docker +++ b/.env.docker @@ -20,3 +20,11 @@ REDIS_HOST="queue" REDIS_PORT="6379" # Index ELASTICSEARCH_URL=http://index:9200 +# Chatbot +GOOGLE_APPLICATION_CREDENTIALS= +BILLING_PROJECT_ID= +QUERY_PROJECT_ID= +MODEL_URI= +OPENAI_API_KEY= +LANGCHAIN_TRACING_V2= +LANGCHAIN_API_KEY= From c057bf3568d6dfbaa93e3deb074883f0481e311e Mon Sep 17 00:00:00 2001 From: vrtornisiello Date: Mon, 14 Apr 2025 17:13:29 -0300 Subject: [PATCH 019/181] temp: added a script for running django in dev mode and updated the Dockerfile for installing the `chatbot` package --- Dockerfile | 13 ++++--- docker-compose.override.yaml | 75 ++++++++++++++++++++++++++++++++++++ start-dev.sh | 17 ++++++++ 3 files changed, 99 insertions(+), 6 deletions(-) create mode 100644 docker-compose.override.yaml create mode 100755 start-dev.sh diff --git a/Dockerfile b/Dockerfile index 0f22234e..efe4327c 100644 --- a/Dockerfile +++ b/Dockerfile @@ -6,19 +6,20 @@ FROM python:${PYTHON_VERSION} RUN pip install --no-cache-dir -U virtualenv>=20.13.1 && virtualenv /env --python=python3.11 ENV PATH /env/bin:$PATH -# Install pip requirements -WORKDIR /app -COPY . . -RUN /env/bin/pip install --no-cache-dir . && rm nginx.conf - # Install make, nginx and copy configuration RUN apt-get update \ - && apt-get install -y --no-install-recommends curl make nginx \ + && apt-get install -y --no-install-recommends curl libpq-dev make nginx \ && apt-get clean \ && rm -rf /var/lib/apt/lists/* \ && rm /etc/nginx/sites-enabled/default COPY nginx.conf /etc/nginx/nginx.conf +# Install pip requirements +WORKDIR /app +COPY . . +RUN /env/bin/pip install --no-cache-dir . && rm nginx.conf +RUN /env/bin/pip install --no-cache-dir ./chatbot/chatbot + # Prevents Python from writing .pyc files to disc # https://docs.python.org/3/using/cmdline.html#envvar-PYTHONDONTWRITEBYTECODE ENV PYTHONDONTWRITEBYTECODE 1 diff --git a/docker-compose.override.yaml b/docker-compose.override.yaml new file mode 100644 index 00000000..98b7d21f --- /dev/null +++ b/docker-compose.override.yaml @@ -0,0 +1,75 @@ +services: + index: + image: elasticsearch:8.17.2 # TODO : upgrade prod to elasticsearch:8.17.2 + container_name: index + environment: + - cluster.name=docker-cluster + - bootstrap.memory_lock=true + - discovery.type=single-node + - xpack.security.enabled=false + - ES_JAVA_OPTS=-Xms512m -Xmx512m + ulimits: + memlock: + soft: -1 + hard: -1 + volumes: [esdata:/usr/share/elasticsearch/data] + ports: [9200:9200, 9300:9300] + healthcheck: + test: [CMD-SHELL, curl -s http://localhost:9200 >/dev/null || exit 1] + interval: 1m + timeout: 20s + retries: 5 + start_period: 1m + queue: + image: redis:6.0 + container_name: queue + ports: [6379:6379] + healthcheck: + test: [CMD, redis-cli, ping] + interval: 10s + timeout: 5s + retries: 3 + database: + image: postgres:14 + container_name: database + environment: [POSTGRES_USER=postgres, POSTGRES_HOST_AUTH_METHOD=trust] + env_file: [.env.docker] + ports: [5432:5432] + volumes: [pgdata:/var/lib/postgresql/data] + healthcheck: + test: [CMD, pg_isready, -U, postgres] + interval: 1m + timeout: 30s + retries: 5 + start_period: 1m + restart: unless-stopped + api: + build: + context: . + dockerfile: Dockerfile + container_name: api + env_file: [.env.docker] + command: ["/app/start-dev.sh"] + volumes: + - .:/app + - $HOME/.config/pydata:$HOME/.config/pydata + ports: + - "8000:8000" # Porta da api + - "5678:5678" # Porta de debug + depends_on: + index: + condition: service_healthy + queue: + condition: service_healthy + database: + condition: service_healthy + healthcheck: + test: [CMD, curl, -f, http://localhost/healthcheck/] + interval: 1m + timeout: 30s + retries: 5 + start_period: 30s + restart: unless-stopped +volumes: + esdata: + pgdata: \ No newline at end of file diff --git a/start-dev.sh b/start-dev.sh new file mode 100755 index 00000000..30c6ec44 --- /dev/null +++ b/start-dev.sh @@ -0,0 +1,17 @@ +#!/usr/bin/env bash +# start-server.sh +echo "> Making migrations" +(cd /app; python manage.py makemigrations) +echo "> Migrating" +(cd /app; python manage.py migrate) +echo "> Installing debugpy" +pip install debugpy +echo "> Creating superuser" +if [ -n "$DJANGO_SUPERUSER_USERNAME" ] && [ -n "$DJANGO_SUPERUSER_PASSWORD" ] ; then +(cd /app; python manage.py createsuperuser --no-input) +fi +echo "> Running Huey" +(cd /app; python manage.py run_huey &) +echo "> Running server in development mode" +# Start the server in development mode with django +(cd /app; python -m debugpy --listen 0.0.0.0:5678 manage.py runserver 0.0.0.0:8000) From 2d2262c60c3bc09b5f37e3e4bfcd0703eaee07a7 Mon Sep 17 00:00:00 2001 From: vrtornisiello Date: Mon, 14 Apr 2025 17:14:01 -0300 Subject: [PATCH 020/181] updated migrations --- .../apps/chatbot/migrations/0001_initial.py | 35 +++++++++---------- 1 file changed, 16 insertions(+), 19 deletions(-) diff --git a/backend/apps/chatbot/migrations/0001_initial.py b/backend/apps/chatbot/migrations/0001_initial.py index 3289060f..ce94391e 100644 --- a/backend/apps/chatbot/migrations/0001_initial.py +++ b/backend/apps/chatbot/migrations/0001_initial.py @@ -1,4 +1,4 @@ -# Generated by Django 4.2.20 on 2025-04-10 15:00 +# Generated by Django 4.2.20 on 2025-04-14 17:19 from django.conf import settings from django.db import migrations, models @@ -16,38 +16,35 @@ class Migration(migrations.Migration): operations = [ migrations.CreateModel( - name='MessagePair', + name='Thread', fields=[ ('id', models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False)), - ('question', models.TextField()), - ('answer', models.TextField()), - ('generated_queries', models.JSONField(blank=True, null=True)), - ('generated_visual_elements', models.JSONField(blank=True, null=True)), ('created_at', models.DateTimeField(auto_now_add=True)), - ('model_uri', models.TextField()), + ('account', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)), ], ), migrations.CreateModel( - name='Feedback', + name='MessagePair', fields=[ - ('message_pair', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, primary_key=True, serialize=False, to='chatbot.messagepair')), - ('rating', models.SmallIntegerField(choices=[(-1, 'Bad'), (1, 'Good')])), - ('comment', models.TextField(blank=True)), + ('id', models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False)), + ('model_uri', models.TextField()), + ('user_message', models.TextField()), + ('assistant_message', models.TextField()), + ('generated_queries', models.JSONField(blank=True, null=True)), + ('generated_chart', models.JSONField(blank=True, null=True)), ('created_at', models.DateTimeField(auto_now_add=True)), - ('updated_at', models.DateTimeField(auto_now=True)), + ('thread', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='chatbot.thread')), ], ), migrations.CreateModel( - name='Thread', + name='Feedback', fields=[ ('id', models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False)), + ('rating', models.SmallIntegerField(choices=[(0, 'Bad'), (1, 'Good')])), + ('comment', models.TextField(blank=True)), ('created_at', models.DateTimeField(auto_now_add=True)), - ('account', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)), + ('updated_at', models.DateTimeField(auto_now=True)), + ('message_pair', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to='chatbot.messagepair')), ], ), - migrations.AddField( - model_name='messagepair', - name='thread', - field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='chatbot.thread'), - ), ] From 8ef6d4559ba34c8ce533a31c2e69c1fc7e9e6b84 Mon Sep 17 00:00:00 2001 From: vrtornisiello Date: Mon, 14 Apr 2025 18:35:23 -0300 Subject: [PATCH 021/181] fixed django admin --- backend/apps/chatbot/admin.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/backend/apps/chatbot/admin.py b/backend/apps/chatbot/admin.py index 0daadff1..37decafd 100644 --- a/backend/apps/chatbot/admin.py +++ b/backend/apps/chatbot/admin.py @@ -13,13 +13,13 @@ class ThreadAdmin(admin.ModelAdmin): class MessagePairAdmin(admin.ModelAdmin): list_display = [ "id", - "question", - "answer", + "user_message", + "assistant_message", "created_at", ] search_fields = [ - "question", - "answer", + "user_message", + "assistant_message", ] readonly_fields = [ "created_at", From be70b56547bb4dba44d84f20ec820fed6bd8cf50 Mon Sep 17 00:00:00 2001 From: vrtornisiello Date: Tue, 15 Apr 2025 17:10:06 -0300 Subject: [PATCH 022/181] added `djangorestframework` and `djangorestframework-simplejwt` packages as dependencies --- backend/settings/base.py | 16 +++- poetry.lock | 192 +++++++++++++++++++++++++++++++++++---- pyproject.toml | 2 + 3 files changed, 189 insertions(+), 21 deletions(-) diff --git a/backend/settings/base.py b/backend/settings/base.py index 046f8f7e..b88d6b0e 100644 --- a/backend/settings/base.py +++ b/backend/settings/base.py @@ -61,14 +61,28 @@ "django_extensions", "huey.contrib.djhuey", # - "backend.apps.chatbot", + "rest_framework", + "rest_framework_simplejwt", + # "backend.apps.account", "backend.apps.account_auth", "backend.apps.account_payment.apps.PaymentConfig", "backend.apps.api.v1", "backend.apps.core", + "backend.apps.chatbot", ] +REST_FRAMEWORK = { + "DEFAULT_AUTHENTICATION_CLASSES": ( + "rest_framework_simplejwt.authentication.JWTAuthentication", + ), +} + +SIMPLE_JWT = { + 'ACCESS_TOKEN_LIFETIME': timedelta(days=1), + 'REFRESH_TOKEN_LIFETIME': timedelta(days=7), +} + MIDDLEWARE = [ "django.middleware.security.SecurityMiddleware", "django.contrib.sessions.middleware.SessionMiddleware", diff --git a/poetry.lock b/poetry.lock index 27b60198..728c390f 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand. +# This file is automatically @generated by Poetry 2.1.2 and should not be changed by hand. [[package]] name = "aniso8601" @@ -6,6 +6,7 @@ version = "9.0.1" description = "A library for parsing ISO 8601 strings." optional = false python-versions = "*" +groups = ["main"] files = [ {file = "aniso8601-9.0.1-py2.py3-none-any.whl", hash = "sha256:1d2b7ef82963909e93c4f24ce48d4de9e66009a21bf1c1e1c85bdd0812fe412f"}, {file = "aniso8601-9.0.1.tar.gz", hash = "sha256:72e3117667eedf66951bb2d93f4296a56b94b078a8a95905a052611fb3f1b973"}, @@ -20,6 +21,7 @@ version = "0.6.0" description = "Reusable constraint types to use with typing.Annotated" optional = false python-versions = ">=3.8" +groups = ["main", "dev"] files = [ {file = "annotated_types-0.6.0-py3-none-any.whl", hash = "sha256:0641064de18ba7a25dee8f96403ebc39113d0cb953a01429249d5c7564666a43"}, {file = "annotated_types-0.6.0.tar.gz", hash = "sha256:563339e807e53ffd9c267e99fc6d9ea23eb8443c08f112651963e24e22f84a5d"}, @@ -31,6 +33,7 @@ version = "3.7.2" description = "ASGI specs, helper code, and adapters" optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "asgiref-3.7.2-py3-none-any.whl", hash = "sha256:89b2ef2247e3b562a16eef663bc0e2e703ec6468e2fa8a5cd61cd449786d4f6e"}, {file = "asgiref-3.7.2.tar.gz", hash = "sha256:9e0ce3aa93a819ba5b45120216b23878cf6e8525eb3848653452b4192b92afed"}, @@ -48,6 +51,8 @@ version = "4.0.3" description = "Timeout context manager for asyncio programs" optional = false python-versions = ">=3.7" +groups = ["main"] +markers = "python_full_version <= \"3.11.2\"" files = [ {file = "async-timeout-4.0.3.tar.gz", hash = "sha256:4640d96be84d82d02ed59ea2b7105a0f7b33abe8703703cd0ab0bf87c427522f"}, {file = "async_timeout-4.0.3-py3-none-any.whl", hash = "sha256:7405140ff1230c310e51dc27b3145b9092d659ce68ff733fb0cefe3ee42be028"}, @@ -59,6 +64,7 @@ version = "5.3.2" description = "Extensible memoizing collections and decorators" optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "cachetools-5.3.2-py3-none-any.whl", hash = "sha256:861f35a13a451f94e301ce2bec7cac63e881232ccce7ed67fab9b5df4d3beaa1"}, {file = "cachetools-5.3.2.tar.gz", hash = "sha256:086ee420196f7b2ab9ca2db2520aca326318b68fe5ba8bc4d49cca91add450f2"}, @@ -70,6 +76,7 @@ version = "2024.2.2" description = "Python package for providing Mozilla's CA Bundle." optional = false python-versions = ">=3.6" +groups = ["main"] files = [ {file = "certifi-2024.2.2-py3-none-any.whl", hash = "sha256:dc383c07b76109f368f6106eee2b593b04a011ea4d55f652c6ca24a754d1cdd1"}, {file = "certifi-2024.2.2.tar.gz", hash = "sha256:0569859f95fc761b18b45ef421b1290a0f65f147e92a1e5eb3e635f9a5e4e66f"}, @@ -81,6 +88,7 @@ version = "3.4.0" description = "Validate configuration and produce human readable error messages." optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "cfgv-3.4.0-py2.py3-none-any.whl", hash = "sha256:b7265b1f29fd3316bfcd2b330d63d024f2bfd8bcb8b0272f8e19a504856c48f9"}, {file = "cfgv-3.4.0.tar.gz", hash = "sha256:e52591d4c5f5dead8e0f673fb16db7949d2cfb3f7da4582893288f0ded8fe560"}, @@ -92,6 +100,7 @@ version = "3.3.2" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." optional = false python-versions = ">=3.7.0" +groups = ["main"] files = [ {file = "charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5"}, {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3"}, @@ -191,6 +200,7 @@ version = "8.1.7" description = "Composable command line interface toolkit" optional = false python-versions = ">=3.7" +groups = ["dev"] files = [ {file = "click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28"}, {file = "click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de"}, @@ -205,10 +215,12 @@ version = "0.4.6" description = "Cross-platform colored terminal text." optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +groups = ["main", "dev", "test"] files = [ {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, ] +markers = {main = "sys_platform == \"win32\" or platform_system == \"Windows\"", dev = "platform_system == \"Windows\"", test = "sys_platform == \"win32\""} [[package]] name = "coverage" @@ -216,6 +228,7 @@ version = "7.4.1" description = "Code coverage measurement for Python" optional = false python-versions = ">=3.8" +groups = ["test"] files = [ {file = "coverage-7.4.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:077d366e724f24fc02dbfe9d946534357fda71af9764ff99d73c3c596001bbd7"}, {file = "coverage-7.4.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0193657651f5399d433c92f8ae264aff31fc1d066deee4b831549526433f3f61"}, @@ -275,7 +288,7 @@ files = [ tomli = {version = "*", optional = true, markers = "python_full_version <= \"3.11.0a6\" and extra == \"toml\""} [package.extras] -toml = ["tomli"] +toml = ["tomli ; python_full_version <= \"3.11.0a6\""] [[package]] name = "db-dtypes" @@ -283,6 +296,7 @@ version = "1.2.0" description = "Pandas Data Types for SQL systems (BigQuery, Spanner)" optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "db-dtypes-1.2.0.tar.gz", hash = "sha256:3531bb1fb8b5fbab33121fe243ccc2ade16ab2524f4c113b05cc702a1908e6ea"}, {file = "db_dtypes-1.2.0-py2.py3-none-any.whl", hash = "sha256:6320bddd31d096447ef749224d64aab00972ed20e4392d86f7d8b81ad79f7ff0"}, @@ -300,6 +314,7 @@ version = "0.3.8" description = "Distribution utilities" optional = false python-versions = "*" +groups = ["dev"] files = [ {file = "distlib-0.3.8-py2.py3-none-any.whl", hash = "sha256:034db59a0b96f8ca18035f36290806a9a6e6bd9d1ff91e45a7f172eb17e51784"}, {file = "distlib-0.3.8.tar.gz", hash = "sha256:1530ea13e350031b6312d8580ddb6b27a104275a31106523b8f123787f494f64"}, @@ -311,6 +326,7 @@ version = "1.9.0" description = "Distro - an OS platform information API" optional = false python-versions = ">=3.6" +groups = ["dev"] files = [ {file = "distro-1.9.0-py3-none-any.whl", hash = "sha256:7bffd925d65168f85027d8da9af6bddab658135b840670a223589bc0c8ef02b2"}, {file = "distro-1.9.0.tar.gz", hash = "sha256:2fa77c6fd8940f116ee1d6b94a2f90b13b5ea8d019b98bc8bafdcabcdd9bdbed"}, @@ -322,6 +338,7 @@ version = "2.8.3" description = "Django + Stripe made easy" optional = false python-versions = ">=3.8.0,<4.0.0" +groups = ["main"] files = [ {file = "dj_stripe-2.8.3-py3-none-any.whl", hash = "sha256:4d442f43dd016ba89af6db3fd790673b61e9d36813f45e5964d471997e9039be"}, {file = "dj_stripe-2.8.3.tar.gz", hash = "sha256:f5205a3f2baa7cd5b858b2250200fb85e48616d9418218b2c9ab9b82747c31d4"}, @@ -341,6 +358,7 @@ version = "4.2.10" description = "A high-level Python web framework that encourages rapid development and clean, pragmatic design." optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "Django-4.2.10-py3-none-any.whl", hash = "sha256:a2d4c4d4ea0b6f0895acde632071aff6400bfc331228fc978b05452a0ff3e9f1"}, {file = "Django-4.2.10.tar.gz", hash = "sha256:b1260ed381b10a11753c73444408e19869f3241fc45c985cd55a30177c789d13"}, @@ -361,6 +379,7 @@ version = "3.14.0" description = "django-cors-headers is a Django application for handling the server headers required for Cross-Origin Resource Sharing (CORS)." optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "django_cors_headers-3.14.0-py3-none-any.whl", hash = "sha256:684180013cc7277bdd8702b80a3c5a4b3fcae4abb2bf134dceb9f5dfe300228e"}, {file = "django_cors_headers-3.14.0.tar.gz", hash = "sha256:5fbd58a6fb4119d975754b2bc090f35ec160a8373f276612c675b00e8a138739"}, @@ -375,6 +394,7 @@ version = "3.2.3" description = "Extensions for Django" optional = false python-versions = ">=3.6" +groups = ["main"] files = [ {file = "django-extensions-3.2.3.tar.gz", hash = "sha256:44d27919d04e23b3f40231c4ab7af4e61ce832ef46d610cc650d53e68328410a"}, {file = "django_extensions-3.2.3-py3-none-any.whl", hash = "sha256:9600b7562f79a92cbf1fde6403c04fee314608fefbb595502e34383ae8203401"}, @@ -389,6 +409,7 @@ version = "22.1" description = "Django-filter is a reusable Django application for allowing users to filter querysets dynamically." optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "django-filter-22.1.tar.gz", hash = "sha256:ed473b76e84f7e83b2511bb2050c3efb36d135207d0128dfe3ae4b36e3594ba5"}, {file = "django_filter-22.1-py3-none-any.whl", hash = "sha256:ed429e34760127e3520a67f415bec4c905d4649fbe45d0d6da37e6ff5e0287eb"}, @@ -403,6 +424,7 @@ version = "0.3.4" description = "JSON Web Token for Django GraphQL." optional = false python-versions = ">=3.6,<4.0" +groups = ["main"] files = [ {file = "django-graphql-jwt-0.3.4.tar.gz", hash = "sha256:654808417a1fa97e4d489766b61046fa8006f58dfad1c44cc3a37a9e4929203b"}, {file = "django_graphql_jwt-0.3.4-py3-none-any.whl", hash = "sha256:fb20194bda649b2b1b49049ef84d0e957851df485fba7f1901aace54ca328063"}, @@ -420,6 +442,7 @@ version = "3.2.1" description = "Pluggable search for Django." optional = false python-versions = "*" +groups = ["main"] files = [ {file = "django-haystack-3.2.1.tar.gz", hash = "sha256:97e3197aefc225fe405b6f17600a2534bf827cb4d6743130c20bc1a06f7293a4"}, ] @@ -437,6 +460,7 @@ version = "3.18.1" description = "Run checks on services like databases, queue servers, celery processes, etc." optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "django-health-check-3.18.1.tar.gz", hash = "sha256:44552d55ae8950c9548d3b90f9d9fd5570b57446a19b2a8e674c82f993cb7a2c"}, {file = "django_health_check-3.18.1-py2.py3-none-any.whl", hash = "sha256:2c89a326cd79830e2fc6808823a9e7e874ab23f7aef3ff2c4d1194c998e1dca1"}, @@ -455,6 +479,7 @@ version = "2.6.0" description = "Drop-in theme for django admin, that utilises AdminLTE 3 & Bootstrap 4 to make yo' admin look jazzy" optional = false python-versions = ">=3.6.2" +groups = ["main"] files = [ {file = "django_jazzmin-2.6.0-py3-none-any.whl", hash = "sha256:fb554c2d564649c65243b13385121fdbdda58521f49544f9d7cb9c414a4908d4"}, {file = "django_jazzmin-2.6.0.tar.gz", hash = "sha256:5bb07055cf19183030724f976904fd8b6337559727959340a43832fab0531812"}, @@ -469,6 +494,7 @@ version = "0.18.11" description = "Translates Django models using a registration approach." optional = false python-versions = "*" +groups = ["main"] files = [ {file = "django-modeltranslation-0.18.11.tar.gz", hash = "sha256:a6e2c459e3b31852287d030bc6e29fa28576db97455dccd399fe08ac8e9223b9"}, {file = "django_modeltranslation-0.18.11-py3-none-any.whl", hash = "sha256:81b68e4dc806a3b779ac88babe1cbd99d5318d374a43b3737a65fb0f4c1cffe8"}, @@ -484,6 +510,7 @@ version = "3.7.4" description = "Allows Django models to be ordered and provides a simple admin interface for reordering them." optional = false python-versions = "*" +groups = ["main"] files = [ {file = "django-ordered-model-3.7.4.tar.gz", hash = "sha256:f258b9762525c00a53009e82f8b8bf2a3aa315e8b453e281e8fdbbfe2b8cb3ba"}, {file = "django_ordered_model-3.7.4-py3-none-any.whl", hash = "sha256:dfcd3183fe0749dad1c9971cba1d6240ce7328742a30ddc92feca41107bb241d"}, @@ -495,6 +522,7 @@ version = "1.14.2" description = "Support for many storage backends in Django" optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "django-storages-1.14.2.tar.gz", hash = "sha256:51b36af28cc5813b98d5f3dfe7459af638d84428c8df4a03990c7d74d1bea4e5"}, {file = "django_storages-1.14.2-py3-none-any.whl", hash = "sha256:1db759346b52ada6c2efd9f23d8241ecf518813eb31db9e2589207174f58f6ad"}, @@ -513,12 +541,53 @@ libcloud = ["apache-libcloud"] s3 = ["boto3 (>=1.4.4)"] sftp = ["paramiko (>=1.15)"] +[[package]] +name = "djangorestframework" +version = "3.16.0" +description = "Web APIs for Django, made easy." +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "djangorestframework-3.16.0-py3-none-any.whl", hash = "sha256:bea7e9f6b96a8584c5224bfb2e4348dfb3f8b5e34edbecb98da258e892089361"}, + {file = "djangorestframework-3.16.0.tar.gz", hash = "sha256:f022ff46613584de994c0c6a4aebbace5fd700555fbe9d33b865ebf173eba6c9"}, +] + +[package.dependencies] +django = ">=4.2" + +[[package]] +name = "djangorestframework-simplejwt" +version = "5.5.0" +description = "A minimal JSON Web Token authentication plugin for Django REST Framework" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "djangorestframework_simplejwt-5.5.0-py3-none-any.whl", hash = "sha256:4ef6b38af20cdde4a4a51d1fd8e063cbbabb7b45f149cc885d38d905c5a62edb"}, + {file = "djangorestframework_simplejwt-5.5.0.tar.gz", hash = "sha256:474a1b737067e6462b3609627a392d13a4da8a08b1f0574104ac6d7b1406f90e"}, +] + +[package.dependencies] +django = ">=4.2" +djangorestframework = ">=3.14" +pyjwt = ">=1.7.1,<2.10.0" + +[package.extras] +crypto = ["cryptography (>=3.3.1)"] +dev = ["Sphinx (>=1.6.5,<2)", "cryptography", "freezegun", "ipython", "pre-commit", "pytest", "pytest-cov", "pytest-django", "pytest-watch", "pytest-xdist", "python-jose (==3.3.0)", "pyupgrade", "ruff", "sphinx_rtd_theme (>=0.1.9)", "tox", "twine", "wheel", "yesqa"] +doc = ["Sphinx (>=1.6.5,<2)", "sphinx_rtd_theme (>=0.1.9)"] +lint = ["pre-commit", "pyupgrade", "ruff", "yesqa"] +python-jose = ["python-jose (==3.3.0)"] +test = ["cryptography", "freezegun", "pytest", "pytest-cov", "pytest-django", "pytest-xdist", "tox"] + [[package]] name = "elasticsearch" version = "7.17.9" description = "Python client for Elasticsearch" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, <4" +groups = ["main"] files = [ {file = "elasticsearch-7.17.9-py2.py3-none-any.whl", hash = "sha256:0e2454645dc00517dee4c6de3863411a9c5f1955d013c5fefa29123dadc92f98"}, {file = "elasticsearch-7.17.9.tar.gz", hash = "sha256:66c4ece2adfe7cc120e2b6a6798a1fd5c777aecf82eec39bb95cef7cfc7ea2b3"}, @@ -540,6 +609,8 @@ version = "1.2.0" description = "Backport of PEP 654 (exception groups)" optional = false python-versions = ">=3.7" +groups = ["test"] +markers = "python_version == \"3.10\"" files = [ {file = "exceptiongroup-1.2.0-py3-none-any.whl", hash = "sha256:4bfd3996ac73b41e9b9628b04e079f193850720ea5945fc96a08633c66912f14"}, {file = "exceptiongroup-1.2.0.tar.gz", hash = "sha256:91f5c769735f051a4290d52edd0858999b57e5876e9f85937691bd4c9fa3ed68"}, @@ -554,6 +625,7 @@ version = "19.13.0" description = "Faker is a Python package that generates fake data for you." optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "Faker-19.13.0-py3-none-any.whl", hash = "sha256:da880a76322db7a879c848a0771e129338e0a680a9f695fd9a3e7a6ac82b45e1"}, {file = "Faker-19.13.0.tar.gz", hash = "sha256:14ccb0aec342d33aa3889a864a56e5b3c2d56bce1b89f9189f4fbc128b9afc1e"}, @@ -568,6 +640,7 @@ version = "3.13.1" description = "A platform independent file lock." optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "filelock-3.13.1-py3-none-any.whl", hash = "sha256:57dbda9b35157b05fb3e58ee91448612eb674172fab98ee235ccb0b5bee19a1c"}, {file = "filelock-3.13.1.tar.gz", hash = "sha256:521f5f56c50f8426f5e03ad3b281b490a87ef15bc6c526f168290f0c7148d44e"}, @@ -576,7 +649,7 @@ files = [ [package.extras] docs = ["furo (>=2023.9.10)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.24)"] testing = ["covdefaults (>=2.3)", "coverage (>=7.3.2)", "diff-cover (>=8)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)", "pytest-timeout (>=2.2)"] -typing = ["typing-extensions (>=4.8)"] +typing = ["typing-extensions (>=4.8) ; python_version < \"3.11\""] [[package]] name = "google-api-core" @@ -584,6 +657,7 @@ version = "2.17.0" description = "Google API client core library" optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "google-api-core-2.17.0.tar.gz", hash = "sha256:de7ef0450faec7c75e0aea313f29ac870fdc44cfaec9d6499a9a17305980ef66"}, {file = "google_api_core-2.17.0-py3-none-any.whl", hash = "sha256:08ed79ed8e93e329de5e3e7452746b734e6bf8438d8d64dd3319d21d3164890c"}, @@ -593,18 +667,18 @@ files = [ google-auth = ">=2.14.1,<3.0.dev0" googleapis-common-protos = ">=1.56.2,<2.0.dev0" grpcio = [ - {version = ">=1.33.2,<2.0dev", optional = true, markers = "python_version < \"3.11\" and extra == \"grpc\""}, {version = ">=1.49.1,<2.0dev", optional = true, markers = "python_version >= \"3.11\" and extra == \"grpc\""}, + {version = ">=1.33.2,<2.0dev", optional = true, markers = "extra == \"grpc\""}, ] grpcio-status = [ - {version = ">=1.33.2,<2.0.dev0", optional = true, markers = "python_version < \"3.11\" and extra == \"grpc\""}, {version = ">=1.49.1,<2.0.dev0", optional = true, markers = "python_version >= \"3.11\" and extra == \"grpc\""}, + {version = ">=1.33.2,<2.0.dev0", optional = true, markers = "extra == \"grpc\""}, ] protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.0 || >4.21.0,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<5.0.0.dev0" requests = ">=2.18.0,<3.0.0.dev0" [package.extras] -grpc = ["grpcio (>=1.33.2,<2.0dev)", "grpcio (>=1.49.1,<2.0dev)", "grpcio-status (>=1.33.2,<2.0.dev0)", "grpcio-status (>=1.49.1,<2.0.dev0)"] +grpc = ["grpcio (>=1.33.2,<2.0dev)", "grpcio (>=1.49.1,<2.0dev) ; python_version >= \"3.11\"", "grpcio-status (>=1.33.2,<2.0.dev0)", "grpcio-status (>=1.49.1,<2.0.dev0) ; python_version >= \"3.11\""] grpcgcp = ["grpcio-gcp (>=0.2.2,<1.0.dev0)"] grpcio-gcp = ["grpcio-gcp (>=0.2.2,<1.0.dev0)"] @@ -614,6 +688,7 @@ version = "2.117.0" description = "Google API Client Library for Python" optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "google-api-python-client-2.117.0.tar.gz", hash = "sha256:b38cd1477ee3c341a0d2f7427326499b416f36c44e9b20d1da229df8be0c596e"}, {file = "google_api_python_client-2.117.0-py2.py3-none-any.whl", hash = "sha256:bd6d393d0eaa7ea1fa13aefb44be787d1ebdc068ab8255f1c3f1d8b486f46afd"}, @@ -632,6 +707,7 @@ version = "2.27.0" description = "Google Authentication Library" optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "google-auth-2.27.0.tar.gz", hash = "sha256:e863a56ccc2d8efa83df7a80272601e43487fa9a728a376205c86c26aaefa821"}, {file = "google_auth-2.27.0-py2.py3-none-any.whl", hash = "sha256:8e4bad367015430ff253fe49d500fdc3396c1a434db5740828c728e45bcce245"}, @@ -655,6 +731,7 @@ version = "0.2.0" description = "Google Authentication Library: httplib2 transport" optional = false python-versions = "*" +groups = ["main"] files = [ {file = "google-auth-httplib2-0.2.0.tar.gz", hash = "sha256:38aa7badf48f974f1eb9861794e9c0cb2a0511a4ec0679b1f886d108f5640e05"}, {file = "google_auth_httplib2-0.2.0-py2.py3-none-any.whl", hash = "sha256:b65a0a2123300dd71281a7bf6e64d65a0759287df52729bdd1ae2e47dc311a3d"}, @@ -670,6 +747,7 @@ version = "1.2.0" description = "Google Authentication Library" optional = false python-versions = ">=3.6" +groups = ["main"] files = [ {file = "google-auth-oauthlib-1.2.0.tar.gz", hash = "sha256:292d2d3783349f2b0734a0a0207b1e1e322ac193c2c09d8f7c613fb7cc501ea8"}, {file = "google_auth_oauthlib-1.2.0-py2.py3-none-any.whl", hash = "sha256:297c1ce4cb13a99b5834c74a1fe03252e1e499716718b190f56bcb9c4abc4faf"}, @@ -688,6 +766,7 @@ version = "3.17.2" description = "Google BigQuery API client library" optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "google-cloud-bigquery-3.17.2.tar.gz", hash = "sha256:6e1cf669a40e567ab3289c7b5f2056363da9fcb85d9a4736ee90240d4a7d84ea"}, {file = "google_cloud_bigquery-3.17.2-py2.py3-none-any.whl", hash = "sha256:cdadf5283dca55a1a350bacf8c8a7466169d3cf46c5a0a3abc5e9aa0b0a51dee"}, @@ -702,14 +781,14 @@ python-dateutil = ">=2.7.2,<3.0dev" requests = ">=2.21.0,<3.0.0dev" [package.extras] -all = ["Shapely (>=1.8.4,<3.0.0dev)", "db-dtypes (>=0.3.0,<2.0.0dev)", "geopandas (>=0.9.0,<1.0dev)", "google-cloud-bigquery-storage (>=2.6.0,<3.0.0dev)", "grpcio (>=1.47.0,<2.0dev)", "grpcio (>=1.49.1,<2.0dev)", "importlib-metadata (>=1.0.0)", "ipykernel (>=6.0.0)", "ipython (>=7.23.1,!=8.1.0)", "ipywidgets (>=7.7.0)", "opentelemetry-api (>=1.1.0)", "opentelemetry-instrumentation (>=0.20b0)", "opentelemetry-sdk (>=1.1.0)", "pandas (>=1.1.0)", "proto-plus (>=1.15.0,<2.0.0dev)", "protobuf (>=3.19.5,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5,<5.0.0dev)", "pyarrow (>=3.0.0)", "tqdm (>=4.7.4,<5.0.0dev)"] +all = ["Shapely (>=1.8.4,<3.0.0dev)", "db-dtypes (>=0.3.0,<2.0.0dev)", "geopandas (>=0.9.0,<1.0dev)", "google-cloud-bigquery-storage (>=2.6.0,<3.0.0dev)", "grpcio (>=1.47.0,<2.0dev)", "grpcio (>=1.49.1,<2.0dev) ; python_version >= \"3.11\"", "importlib-metadata (>=1.0.0) ; python_version < \"3.8\"", "ipykernel (>=6.0.0)", "ipython (>=7.23.1,!=8.1.0)", "ipywidgets (>=7.7.0)", "opentelemetry-api (>=1.1.0)", "opentelemetry-instrumentation (>=0.20b0)", "opentelemetry-sdk (>=1.1.0)", "pandas (>=1.1.0)", "proto-plus (>=1.15.0,<2.0.0dev)", "protobuf (>=3.19.5,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5,<5.0.0dev)", "pyarrow (>=3.0.0)", "tqdm (>=4.7.4,<5.0.0dev)"] bigquery-v2 = ["proto-plus (>=1.15.0,<2.0.0dev)", "protobuf (>=3.19.5,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5,<5.0.0dev)"] -bqstorage = ["google-cloud-bigquery-storage (>=2.6.0,<3.0.0dev)", "grpcio (>=1.47.0,<2.0dev)", "grpcio (>=1.49.1,<2.0dev)", "pyarrow (>=3.0.0)"] +bqstorage = ["google-cloud-bigquery-storage (>=2.6.0,<3.0.0dev)", "grpcio (>=1.47.0,<2.0dev)", "grpcio (>=1.49.1,<2.0dev) ; python_version >= \"3.11\"", "pyarrow (>=3.0.0)"] geopandas = ["Shapely (>=1.8.4,<3.0.0dev)", "geopandas (>=0.9.0,<1.0dev)"] ipython = ["ipykernel (>=6.0.0)", "ipython (>=7.23.1,!=8.1.0)"] ipywidgets = ["ipykernel (>=6.0.0)", "ipywidgets (>=7.7.0)"] opentelemetry = ["opentelemetry-api (>=1.1.0)", "opentelemetry-instrumentation (>=0.20b0)", "opentelemetry-sdk (>=1.1.0)"] -pandas = ["db-dtypes (>=0.3.0,<2.0.0dev)", "importlib-metadata (>=1.0.0)", "pandas (>=1.1.0)", "pyarrow (>=3.0.0)"] +pandas = ["db-dtypes (>=0.3.0,<2.0.0dev)", "importlib-metadata (>=1.0.0) ; python_version < \"3.8\"", "pandas (>=1.1.0)", "pyarrow (>=3.0.0)"] tqdm = ["tqdm (>=4.7.4,<5.0.0dev)"] [[package]] @@ -718,6 +797,7 @@ version = "2.24.0" description = "Google Cloud Bigquery Storage API client library" optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "google-cloud-bigquery-storage-2.24.0.tar.gz", hash = "sha256:b4af5b9aacd8396b8407d1b877601a376d8eea6d192823a8a7881bd2fdc076ce"}, {file = "google_cloud_bigquery_storage-2.24.0-py2.py3-none-any.whl", hash = "sha256:7981eb2758cba56603058d11bb1eeeebf2e1c18097a7118a894510a16e02be52"}, @@ -726,14 +806,14 @@ files = [ [package.dependencies] google-api-core = {version = ">=1.34.0,<2.0.dev0 || >=2.11.dev0,<3.0.0dev", extras = ["grpc"]} proto-plus = [ - {version = ">=1.22.0,<2.0.0dev", markers = "python_version < \"3.11\""}, {version = ">=1.22.2,<2.0.0dev", markers = "python_version >= \"3.11\""}, + {version = ">=1.22.0,<2.0.0dev", markers = "python_version < \"3.11\""}, ] protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.0 || >4.21.0,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<5.0.0dev" [package.extras] fastavro = ["fastavro (>=0.21.2)"] -pandas = ["importlib-metadata (>=1.0.0)", "pandas (>=0.21.1)"] +pandas = ["importlib-metadata (>=1.0.0) ; python_version < \"3.8\"", "pandas (>=0.21.1)"] pyarrow = ["pyarrow (>=0.15.0)"] [[package]] @@ -742,6 +822,7 @@ version = "2.4.1" description = "Google Cloud API client core library" optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "google-cloud-core-2.4.1.tar.gz", hash = "sha256:9b7749272a812bde58fff28868d0c5e2f585b82f37e09a1f6ed2d4d10f134073"}, {file = "google_cloud_core-2.4.1-py2.py3-none-any.whl", hash = "sha256:a9e6a4422b9ac5c29f79a0ede9485473338e2ce78d91f2370c01e730eab22e61"}, @@ -760,6 +841,7 @@ version = "2.14.0" description = "Google Cloud Storage API client library" optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "google-cloud-storage-2.14.0.tar.gz", hash = "sha256:2d23fcf59b55e7b45336729c148bb1c464468c69d5efbaee30f7201dd90eb97e"}, {file = "google_cloud_storage-2.14.0-py2.py3-none-any.whl", hash = "sha256:8641243bbf2a2042c16a6399551fbb13f062cbc9a2de38d6c0bb5426962e9dbd"}, @@ -782,6 +864,7 @@ version = "1.5.0" description = "A python wrapper of the C library 'Google CRC32C'" optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "google-crc32c-1.5.0.tar.gz", hash = "sha256:89284716bc6a5a415d4eaa11b1726d2d60a0cd12aadf5439828353662ede9dd7"}, {file = "google_crc32c-1.5.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:596d1f98fc70232fcb6590c439f43b350cb762fb5d61ce7b0e9db4539654cc13"}, @@ -862,6 +945,7 @@ version = "2.7.0" description = "Utilities for Google Media Downloads and Resumable Uploads" optional = false python-versions = ">= 3.7" +groups = ["main"] files = [ {file = "google-resumable-media-2.7.0.tar.gz", hash = "sha256:5f18f5fa9836f4b083162064a1c2c98c17239bfda9ca50ad970ccf905f3e625b"}, {file = "google_resumable_media-2.7.0-py2.py3-none-any.whl", hash = "sha256:79543cfe433b63fd81c0844b7803aba1bb8950b47bedf7d980c38fa123937e08"}, @@ -880,6 +964,7 @@ version = "1.62.0" description = "Common protobufs used in Google APIs" optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "googleapis-common-protos-1.62.0.tar.gz", hash = "sha256:83f0ece9f94e5672cced82f592d2a5edf527a96ed1794f0bab36d5735c996277"}, {file = "googleapis_common_protos-1.62.0-py2.py3-none-any.whl", hash = "sha256:4750113612205514f9f6aa4cb00d523a94f3e8c06c5ad2fee466387dc4875f07"}, @@ -897,6 +982,7 @@ version = "3.2.1" description = "GraphQL Framework for Python" optional = false python-versions = "*" +groups = ["main"] files = [ {file = "graphene-3.2.1-py2.py3-none-any.whl", hash = "sha256:2ef689f514ba9e65e88961798cf4c637ca580e541168f9aee2ffbe21fd46f388"}, {file = "graphene-3.2.1.tar.gz", hash = "sha256:722243a9da2caeab703b1af9ec0deec602589c97035f86c486106a52d0c67082"}, @@ -917,6 +1003,7 @@ version = "3.0.0" description = "Graphene Django integration" optional = false python-versions = "*" +groups = ["main"] files = [ {file = "graphene-django-3.0.0.tar.gz", hash = "sha256:018a8dc4736d99b5bb4a15d7fd0b46c98010e9201cb52a290f6d1f16ae6fefda"}, {file = "graphene_django-3.0.0-py2.py3-none-any.whl", hash = "sha256:9fa531d319d5c8f9e08274628f547574ee684e74dddd1c969abf38142bc32df2"}, @@ -941,6 +1028,7 @@ version = "1.3.0" description = "Lib for adding file upload functionality to GraphQL mutations in Graphene Django and Flask-Graphql" optional = false python-versions = "*" +groups = ["main"] files = [ {file = "graphene_file_upload-1.3.0-py3-none-any.whl", hash = "sha256:5afe50f409f50e3d198fd92c883d98d868e6c6aaadf5df3a3f4d88ecad90ed97"}, {file = "graphene_file_upload-1.3.0.tar.gz", hash = "sha256:6898480b0556826472c80971032917c01968ade5800d84054008fe598795b063"}, @@ -961,6 +1049,7 @@ version = "3.2.3" description = "GraphQL implementation for Python, a port of GraphQL.js, the JavaScript reference implementation for GraphQL." optional = false python-versions = ">=3.6,<4" +groups = ["main"] files = [ {file = "graphql-core-3.2.3.tar.gz", hash = "sha256:06d2aad0ac723e35b1cb47885d3e5c45e956a53bc1b209a9fc5369007fe46676"}, {file = "graphql_core-3.2.3-py3-none-any.whl", hash = "sha256:5766780452bd5ec8ba133f8bf287dc92713e3868ddd83aee4faab9fc3e303dc3"}, @@ -972,6 +1061,7 @@ version = "3.2.0" description = "Relay library for graphql-core" optional = false python-versions = ">=3.6,<4" +groups = ["main"] files = [ {file = "graphql-relay-3.2.0.tar.gz", hash = "sha256:1ff1c51298356e481a0be009ccdff249832ce53f30559c1338f22a0e0d17250c"}, {file = "graphql_relay-3.2.0-py3-none-any.whl", hash = "sha256:c9b22bd28b170ba1fe674c74384a8ff30a76c8e26f88ac3aa1584dd3179953e5"}, @@ -986,6 +1076,7 @@ version = "1.60.1" description = "HTTP/2-based RPC framework" optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "grpcio-1.60.1-cp310-cp310-linux_armv7l.whl", hash = "sha256:14e8f2c84c0832773fb3958240c69def72357bc11392571f87b2d7b91e0bb092"}, {file = "grpcio-1.60.1-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:33aed0a431f5befeffd9d346b0fa44b2c01aa4aeae5ea5b2c03d3e25e0071216"}, @@ -1052,6 +1143,7 @@ version = "1.60.1" description = "Status proto mapping for gRPC" optional = false python-versions = ">=3.6" +groups = ["main"] files = [ {file = "grpcio-status-1.60.1.tar.gz", hash = "sha256:61b5aab8989498e8aa142c20b88829ea5d90d18c18c853b9f9e6d407d37bf8b4"}, {file = "grpcio_status-1.60.1-py3-none-any.whl", hash = "sha256:3034fdb239185b6e0f3169d08c268c4507481e4b8a434c21311a03d9eb5889a0"}, @@ -1068,6 +1160,7 @@ version = "20.1.0" description = "WSGI HTTP Server for UNIX" optional = false python-versions = ">=3.5" +groups = ["main"] files = [ {file = "gunicorn-20.1.0-py3-none-any.whl", hash = "sha256:9dcc4547dbb1cb284accfb15ab5667a0e5d1881cc443e0677b4882a4067a807e"}, {file = "gunicorn-20.1.0.tar.gz", hash = "sha256:e0a968b5ba15f8a328fdfd7ab1fcb5af4470c28aaf7e55df02a99bc13138e6e8"}, @@ -1088,6 +1181,7 @@ version = "0.22.0" description = "A comprehensive HTTP client library." optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +groups = ["main"] files = [ {file = "httplib2-0.22.0-py3-none-any.whl", hash = "sha256:14ae0a53c1ba8f3d37e9e27cf37eabb0fb9980f435ba405d546948b009dd64dc"}, {file = "httplib2-0.22.0.tar.gz", hash = "sha256:d7a10bc5ef5ab08322488bde8c726eeee5c8618723fdb399597ec58f3d82df81"}, @@ -1102,6 +1196,7 @@ version = "2.5.0" description = "huey, a little task queue" optional = false python-versions = "*" +groups = ["main"] files = [ {file = "huey-2.5.0.tar.gz", hash = "sha256:2ffb52fb5c46a1b0d53c79d59df3622312b27e2ab68d81a580985a8ea4ca3480"}, ] @@ -1116,6 +1211,7 @@ version = "2.5.34" description = "File identification library for Python" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "identify-2.5.34-py2.py3-none-any.whl", hash = "sha256:a4316013779e433d08b96e5eabb7f641e6c7942e4ab5d4c509ebd2e7a8994aed"}, {file = "identify-2.5.34.tar.gz", hash = "sha256:ee17bc9d499899bc9eaec1ac7bf2dc9eedd480db9d88b96d123d3b64a9d34f5d"}, @@ -1130,6 +1226,7 @@ version = "3.6" description = "Internationalized Domain Names in Applications (IDNA)" optional = false python-versions = ">=3.5" +groups = ["main"] files = [ {file = "idna-3.6-py3-none-any.whl", hash = "sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f"}, {file = "idna-3.6.tar.gz", hash = "sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca"}, @@ -1141,6 +1238,7 @@ version = "2.0.0" description = "brain-dead simple config-ini parsing" optional = false python-versions = ">=3.7" +groups = ["test"] files = [ {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, @@ -1152,6 +1250,7 @@ version = "0.7.2" description = "Python logging made (stupidly) simple" optional = false python-versions = ">=3.5" +groups = ["main"] files = [ {file = "loguru-0.7.2-py3-none-any.whl", hash = "sha256:003d71e3d3ed35f0f8984898359d65b79e5b21943f78af86aa5491210429b8eb"}, {file = "loguru-0.7.2.tar.gz", hash = "sha256:e671a53522515f34fd406340ee968cb9ecafbc4b36c679da03c18fd8d0bd51ac"}, @@ -1162,7 +1261,7 @@ colorama = {version = ">=0.3.4", markers = "sys_platform == \"win32\""} win32-setctime = {version = ">=1.0.0", markers = "sys_platform == \"win32\""} [package.extras] -dev = ["Sphinx (==7.2.5)", "colorama (==0.4.5)", "colorama (==0.4.6)", "exceptiongroup (==1.1.3)", "freezegun (==1.1.0)", "freezegun (==1.2.2)", "mypy (==v0.910)", "mypy (==v0.971)", "mypy (==v1.4.1)", "mypy (==v1.5.1)", "pre-commit (==3.4.0)", "pytest (==6.1.2)", "pytest (==7.4.0)", "pytest-cov (==2.12.1)", "pytest-cov (==4.1.0)", "pytest-mypy-plugins (==1.9.3)", "pytest-mypy-plugins (==3.0.0)", "sphinx-autobuild (==2021.3.14)", "sphinx-rtd-theme (==1.3.0)", "tox (==3.27.1)", "tox (==4.11.0)"] +dev = ["Sphinx (==7.2.5) ; python_version >= \"3.9\"", "colorama (==0.4.5) ; python_version < \"3.8\"", "colorama (==0.4.6) ; python_version >= \"3.8\"", "exceptiongroup (==1.1.3) ; python_version >= \"3.7\" and python_version < \"3.11\"", "freezegun (==1.1.0) ; python_version < \"3.8\"", "freezegun (==1.2.2) ; python_version >= \"3.8\"", "mypy (==v0.910) ; python_version < \"3.6\"", "mypy (==v0.971) ; python_version == \"3.6\"", "mypy (==v1.4.1) ; python_version == \"3.7\"", "mypy (==v1.5.1) ; python_version >= \"3.8\"", "pre-commit (==3.4.0) ; python_version >= \"3.8\"", "pytest (==6.1.2) ; python_version < \"3.8\"", "pytest (==7.4.0) ; python_version >= \"3.8\"", "pytest-cov (==2.12.1) ; python_version < \"3.8\"", "pytest-cov (==4.1.0) ; python_version >= \"3.8\"", "pytest-mypy-plugins (==1.9.3) ; python_version >= \"3.6\" and python_version < \"3.8\"", "pytest-mypy-plugins (==3.0.0) ; python_version >= \"3.8\"", "sphinx-autobuild (==2021.3.14) ; python_version >= \"3.9\"", "sphinx-rtd-theme (==1.3.0) ; python_version >= \"3.9\"", "tox (==3.27.1) ; python_version < \"3.8\"", "tox (==4.11.0) ; python_version >= \"3.8\""] [[package]] name = "maison" @@ -1170,6 +1269,7 @@ version = "1.4.3" description = "Read settings from config files" optional = false python-versions = ">=3.7.1,<4.0.0" +groups = ["dev"] files = [ {file = "maison-1.4.3-py3-none-any.whl", hash = "sha256:a36208d0befb3bd8aa3b002ac198ce6f6e61efe568b195132640f4032eff46ac"}, {file = "maison-1.4.3.tar.gz", hash = "sha256:766222ce82ae27138256c4af9d0bc6b3226288349601e095dcc567884cf0ce36"}, @@ -1186,6 +1286,7 @@ version = "1.8.0" description = "Node.js virtual environment builder" optional = false python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*" +groups = ["dev"] files = [ {file = "nodeenv-1.8.0-py2.py3-none-any.whl", hash = "sha256:df865724bb3c3adc86b3876fa209771517b0cfe596beff01a92700e0e8be4cec"}, {file = "nodeenv-1.8.0.tar.gz", hash = "sha256:d51e0c37e64fbf47d017feac3145cdbb58836d7eee8c6f6d3b6880c5456227d2"}, @@ -1200,6 +1301,7 @@ version = "1.26.4" description = "Fundamental package for array computing in Python" optional = false python-versions = ">=3.9" +groups = ["main"] files = [ {file = "numpy-1.26.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9ff0f4f29c51e2803569d7a51c2304de5554655a60c5d776e35b4a41413830d0"}, {file = "numpy-1.26.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2e4ee3380d6de9c9ec04745830fd9e2eccb3e6cf790d39d7b98ffd19b0dd754a"}, @@ -1245,6 +1347,7 @@ version = "3.2.2" description = "A generic, spec-compliant, thorough implementation of the OAuth request-signing logic" optional = false python-versions = ">=3.6" +groups = ["main"] files = [ {file = "oauthlib-3.2.2-py3-none-any.whl", hash = "sha256:8139f29aac13e25d502680e9e19963e83f16838d48a0d71c287fe40e7067fbca"}, {file = "oauthlib-3.2.2.tar.gz", hash = "sha256:9859c40929662bec5d64f34d01c99e093149682a3f38915dc0655d5a633dd918"}, @@ -1261,6 +1364,7 @@ version = "23.2" description = "Core utilities for Python packages" optional = false python-versions = ">=3.7" +groups = ["main", "test"] files = [ {file = "packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7"}, {file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"}, @@ -1272,6 +1376,7 @@ version = "2.2.0" description = "Powerful data structures for data analysis, time series, and statistics" optional = false python-versions = ">=3.9" +groups = ["main"] files = [ {file = "pandas-2.2.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:8108ee1712bb4fa2c16981fba7e68b3f6ea330277f5ca34fa8d557e986a11670"}, {file = "pandas-2.2.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:736da9ad4033aeab51d067fc3bd69a0ba36f5a60f66a527b3d72e2030e63280a"}, @@ -1306,9 +1411,9 @@ files = [ [package.dependencies] numpy = [ - {version = ">=1.22.4,<2", markers = "python_version < \"3.11\""}, {version = ">=1.23.2,<2", markers = "python_version == \"3.11\""}, {version = ">=1.26.0,<2", markers = "python_version >= \"3.12\""}, + {version = ">=1.22.4,<2", markers = "python_version < \"3.11\""}, ] python-dateutil = ">=2.8.2" pytz = ">=2020.1" @@ -1344,6 +1449,7 @@ version = "0.19.2" description = "Google BigQuery connector for pandas" optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "pandas-gbq-0.19.2.tar.gz", hash = "sha256:b0f7fa84a2be0fe767e33a008ca7e4ad9a9e3ac67255fd0a41fc19b503138447"}, {file = "pandas_gbq-0.19.2-py2.py3-none-any.whl", hash = "sha256:0ef8da3e4088053a2bea069ed688992a44b52af67dadb97eee494b32a2147563"}, @@ -1371,6 +1477,7 @@ version = "9.5.0" description = "Python Imaging Library (Fork)" optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "Pillow-9.5.0-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:ace6ca218308447b9077c14ea4ef381ba0b67ee78d64046b3f19cf4e1139ad16"}, {file = "Pillow-9.5.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d3d403753c9d5adc04d4694d35cf0391f0f3d57c8e0030aac09d7678fa8030aa"}, @@ -1450,6 +1557,7 @@ version = "3.11.0" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." optional = false python-versions = ">=3.7" +groups = ["dev"] files = [ {file = "platformdirs-3.11.0-py3-none-any.whl", hash = "sha256:e9d171d00af68be50e9202731309c4e658fd8bc76f55c11c7dd760d023bda68e"}, {file = "platformdirs-3.11.0.tar.gz", hash = "sha256:cf8ee52a3afdb965072dcc652433e0c7e3e40cf5ea1477cd4b3b1d2eb75495b3"}, @@ -1465,6 +1573,7 @@ version = "1.4.0" description = "plugin and hook calling mechanisms for python" optional = false python-versions = ">=3.8" +groups = ["test"] files = [ {file = "pluggy-1.4.0-py3-none-any.whl", hash = "sha256:7db9f7b503d67d1c5b95f59773ebb58a8c1c288129a88665838012cfb07b8981"}, {file = "pluggy-1.4.0.tar.gz", hash = "sha256:8c85c2876142a764e5b7548e7d9a0e0ddb46f5185161049a79b7e974454223be"}, @@ -1480,6 +1589,7 @@ version = "3.6.1" description = "A framework for managing and maintaining multi-language pre-commit hooks." optional = false python-versions = ">=3.9" +groups = ["dev"] files = [ {file = "pre_commit-3.6.1-py2.py3-none-any.whl", hash = "sha256:9fe989afcf095d2c4796ce7c553cf28d4d4a9b9346de3cda079bcf40748454a4"}, {file = "pre_commit-3.6.1.tar.gz", hash = "sha256:c90961d8aa706f75d60935aba09469a6b0bcb8345f127c3fbee4bdc5f114cf4b"}, @@ -1498,6 +1608,7 @@ version = "2.3" description = "Promises/A+ implementation for Python" optional = false python-versions = "*" +groups = ["main"] files = [ {file = "promise-2.3.tar.gz", hash = "sha256:dfd18337c523ba4b6a58801c164c1904a9d4d1b1747c7d5dbf45b693a49d93d0"}, ] @@ -1514,6 +1625,7 @@ version = "1.23.0" description = "Beautiful, Pythonic protocol buffers." optional = false python-versions = ">=3.6" +groups = ["main"] files = [ {file = "proto-plus-1.23.0.tar.gz", hash = "sha256:89075171ef11988b3fa157f5dbd8b9cf09d65fffee97e29ce403cd8defba19d2"}, {file = "proto_plus-1.23.0-py3-none-any.whl", hash = "sha256:a829c79e619e1cf632de091013a4173deed13a55f326ef84f05af6f50ff4c82c"}, @@ -1531,6 +1643,7 @@ version = "4.25.2" description = "" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "protobuf-4.25.2-cp310-abi3-win32.whl", hash = "sha256:b50c949608682b12efb0b2717f53256f03636af5f60ac0c1d900df6213910fd6"}, {file = "protobuf-4.25.2-cp310-abi3-win_amd64.whl", hash = "sha256:8f62574857ee1de9f770baf04dde4165e30b15ad97ba03ceac65f760ff018ac9"}, @@ -1551,6 +1664,7 @@ version = "2.9.9" description = "psycopg2 - Python-PostgreSQL Database Adapter" optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "psycopg2-binary-2.9.9.tar.gz", hash = "sha256:7f01846810177d829c7692f1f5ada8096762d9172af1b1a28d4ab5b77c923c1c"}, {file = "psycopg2_binary-2.9.9-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c2470da5418b76232f02a2fcd2229537bb2d5a7096674ce61859c3229f2eb202"}, @@ -1632,6 +1746,7 @@ version = "15.0.0" description = "Python library for Apache Arrow" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "pyarrow-15.0.0-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:0a524532fd6dd482edaa563b686d754c70417c2f72742a8c990b322d4c03a15d"}, {file = "pyarrow-15.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:60a6bdb314affa9c2e0d5dddf3d9cbb9ef4a8dddaa68669975287d47ece67642"}, @@ -1680,6 +1795,7 @@ version = "0.5.1" description = "Pure-Python implementation of ASN.1 types and DER/BER/CER codecs (X.208)" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" +groups = ["main"] files = [ {file = "pyasn1-0.5.1-py2.py3-none-any.whl", hash = "sha256:4439847c58d40b1d0a573d07e3856e95333f1976294494c325775aeca506eb58"}, {file = "pyasn1-0.5.1.tar.gz", hash = "sha256:6d391a96e59b23130a5cfa74d6fd7f388dbbe26cc8f1edf39fdddf08d9d6676c"}, @@ -1691,6 +1807,7 @@ version = "0.3.0" description = "A collection of ASN.1-based protocols modules" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" +groups = ["main"] files = [ {file = "pyasn1_modules-0.3.0-py2.py3-none-any.whl", hash = "sha256:d3ccd6ed470d9ffbc716be08bd90efbd44d0734bc9303818f7336070984a162d"}, {file = "pyasn1_modules-0.3.0.tar.gz", hash = "sha256:5bd01446b736eb9d31512a30d46c1ac3395d676c6f3cafa4c03eb54b9925631c"}, @@ -1705,6 +1822,7 @@ version = "2.6.1" description = "Data validation using Python type hints" optional = false python-versions = ">=3.8" +groups = ["main", "dev"] files = [ {file = "pydantic-2.6.1-py3-none-any.whl", hash = "sha256:0b6a909df3192245cb736509a92ff69e4fef76116feffec68e93a567347bae6f"}, {file = "pydantic-2.6.1.tar.gz", hash = "sha256:4fd5c182a2488dc63e6d32737ff19937888001e2a6d86e94b3f233104a5d1fa9"}, @@ -1724,6 +1842,7 @@ version = "2.16.2" description = "" optional = false python-versions = ">=3.8" +groups = ["main", "dev"] files = [ {file = "pydantic_core-2.16.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:3fab4e75b8c525a4776e7630b9ee48aea50107fea6ca9f593c98da3f4d11bf7c"}, {file = "pydantic_core-2.16.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8bde5b48c65b8e807409e6f20baee5d2cd880e0fad00b1a811ebc43e39a00ab2"}, @@ -1815,6 +1934,7 @@ version = "1.8.2" description = "PyData helpers for authenticating to Google APIs" optional = false python-versions = "*" +groups = ["main"] files = [ {file = "pydata-google-auth-1.8.2.tar.gz", hash = "sha256:547b6c0fbea657dcecd50887c5db8640ebec062a59a2b88e8ff8e53a04818303"}, {file = "pydata_google_auth-1.8.2-py2.py3-none-any.whl", hash = "sha256:a9dce59af4a170ea60c4b2ebbc83ee1f74d34255a4f97b2469ae9a4a0dc98e99"}, @@ -1831,6 +1951,7 @@ version = "2.8.0" description = "JSON Web Token implementation in Python" optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "PyJWT-2.8.0-py3-none-any.whl", hash = "sha256:59127c392cc44c2da5bb3192169a91f429924e17aff6534d70fdc02ab3e04320"}, {file = "PyJWT-2.8.0.tar.gz", hash = "sha256:57e28d156e3d5c10088e0c68abb90bfac3df82b40a71bd0daa20c65ccd5c23de"}, @@ -1848,6 +1969,7 @@ version = "3.1.1" description = "pyparsing module - Classes and methods to define and execute parsing grammars" optional = false python-versions = ">=3.6.8" +groups = ["main"] files = [ {file = "pyparsing-3.1.1-py3-none-any.whl", hash = "sha256:32c7c0b711493c72ff18a981d24f28aaf9c1fb7ed5e9667c9e84e3db623bdbfb"}, {file = "pyparsing-3.1.1.tar.gz", hash = "sha256:ede28a1a32462f5a9705e07aea48001a08f7cf81a021585011deba701581a0db"}, @@ -1862,6 +1984,7 @@ version = "7.4.4" description = "pytest: simple powerful testing with Python" optional = false python-versions = ">=3.7" +groups = ["test"] files = [ {file = "pytest-7.4.4-py3-none-any.whl", hash = "sha256:b090cdf5ed60bf4c45261be03239c2c1c22df034fbffe691abe93cd80cea01d8"}, {file = "pytest-7.4.4.tar.gz", hash = "sha256:2cf0005922c6ace4a3e2ec8b4080eb0d9753fdc93107415332f50ce9e7994280"}, @@ -1884,6 +2007,7 @@ version = "4.1.0" description = "Pytest plugin for measuring coverage." optional = false python-versions = ">=3.7" +groups = ["test"] files = [ {file = "pytest-cov-4.1.0.tar.gz", hash = "sha256:3904b13dfbfec47f003b8e77fd5b589cd11904a21ddf1ab38a64f204d6a10ef6"}, {file = "pytest_cov-4.1.0-py3-none-any.whl", hash = "sha256:6ba70b9e97e69fcc3fb45bfeab2d0a138fb65c4d0d6a41ef33983ad114be8c3a"}, @@ -1902,6 +2026,7 @@ version = "4.8.0" description = "A Django plugin for pytest." optional = false python-versions = ">=3.8" +groups = ["test"] files = [ {file = "pytest-django-4.8.0.tar.gz", hash = "sha256:5d054fe011c56f3b10f978f41a8efb2e5adfc7e680ef36fb571ada1f24779d90"}, {file = "pytest_django-4.8.0-py3-none-any.whl", hash = "sha256:ca1ddd1e0e4c227cf9e3e40a6afc6d106b3e70868fd2ac5798a22501271cd0c7"}, @@ -1920,6 +2045,7 @@ version = "2.8.2" description = "Extensions to the standard Python datetime module" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +groups = ["main"] files = [ {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, @@ -1934,6 +2060,7 @@ version = "2024.1" description = "World timezone definitions, modern and historical" optional = false python-versions = "*" +groups = ["main"] files = [ {file = "pytz-2024.1-py2.py3-none-any.whl", hash = "sha256:328171f4e3623139da4983451950b28e95ac706e13f3f2630a879749e7a8b319"}, {file = "pytz-2024.1.tar.gz", hash = "sha256:2a29735ea9c18baf14b448846bde5a48030ed267578472d8955cd0e7443a9812"}, @@ -1945,6 +2072,7 @@ version = "6.0.1" description = "YAML parser and emitter for Python" optional = false python-versions = ">=3.6" +groups = ["dev"] files = [ {file = "PyYAML-6.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d858aa552c999bc8a8d57426ed01e40bef403cd8ccdd0fc5f6f04a00414cac2a"}, {file = "PyYAML-6.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fd66fc5d0da6d9815ba2cebeb4205f95818ff4b79c3ebe268e75d961704af52f"}, @@ -2005,6 +2133,7 @@ version = "5.0.1" description = "Python client for Redis database and key-value store" optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "redis-5.0.1-py3-none-any.whl", hash = "sha256:ed4802971884ae19d640775ba3b03aa2e7bd5e8fb8dfaed2decce4d0fc48391f"}, {file = "redis-5.0.1.tar.gz", hash = "sha256:0dab495cd5753069d3bc650a0dde8a8f9edde16fc5691b689a566eda58100d0f"}, @@ -2023,6 +2152,7 @@ version = "2.31.0" description = "Python HTTP for Humans." optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "requests-2.31.0-py3-none-any.whl", hash = "sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f"}, {file = "requests-2.31.0.tar.gz", hash = "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1"}, @@ -2044,6 +2174,7 @@ version = "1.3.1" description = "OAuthlib authentication support for Requests." optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +groups = ["main"] files = [ {file = "requests-oauthlib-1.3.1.tar.gz", hash = "sha256:75beac4a47881eeb94d5ea5d6ad31ef88856affe2332b9aafb52c6452ccf0d7a"}, {file = "requests_oauthlib-1.3.1-py2.py3-none-any.whl", hash = "sha256:2577c501a2fb8d05a304c09d090d6e47c306fef15809d102b327cf8364bddab5"}, @@ -2062,6 +2193,7 @@ version = "4.9" description = "Pure-Python RSA implementation" optional = false python-versions = ">=3.6,<4" +groups = ["main"] files = [ {file = "rsa-4.9-py3-none-any.whl", hash = "sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7"}, {file = "rsa-4.9.tar.gz", hash = "sha256:e38464a49c6c85d7f1351b0126661487a7e0a14a50f1675ec50eb34d4f20ef21"}, @@ -2076,6 +2208,7 @@ version = "0.2.1" description = "An extremely fast Python linter and code formatter, written in Rust." optional = false python-versions = ">=3.7" +groups = ["dev"] files = [ {file = "ruff-0.2.1-py3-none-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:dd81b911d28925e7e8b323e8d06951554655021df8dd4ac3045d7212ac4ba080"}, {file = "ruff-0.2.1-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:dc586724a95b7d980aa17f671e173df00f0a2eef23f8babbeee663229a938fec"}, @@ -2102,6 +2235,7 @@ version = "0.91.0" description = "ruyaml is a fork of ruamel.yaml" optional = false python-versions = ">=3.6" +groups = ["dev"] files = [ {file = "ruyaml-0.91.0-py3-none-any.whl", hash = "sha256:50e0ee3389c77ad340e209472e0effd41ae0275246df00cdad0a067532171755"}, {file = "ruyaml-0.91.0.tar.gz", hash = "sha256:6ce9de9f4d082d696d3bde264664d1bcdca8f5a9dff9d1a1f1a127969ab871ab"}, @@ -2120,6 +2254,7 @@ version = "69.0.3" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false python-versions = ">=3.8" +groups = ["main", "dev"] files = [ {file = "setuptools-69.0.3-py3-none-any.whl", hash = "sha256:385eb4edd9c9d5c17540511303e39a147ce2fc04bc55289c322b9e5904fe2c05"}, {file = "setuptools-69.0.3.tar.gz", hash = "sha256:be1af57fc409f93647f2e8e4573a142ed38724b8cdd389706a867bb4efcf1e78"}, @@ -2127,7 +2262,7 @@ files = [ [package.extras] docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] -testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21) ; python_version >= \"3.9\" and sys_platform != \"cygwin\"", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pytest (>=6)", "pytest-black (>=0.3.7) ; platform_python_implementation != \"PyPy\"", "pytest-checkdocs (>=2.4)", "pytest-cov ; platform_python_implementation != \"PyPy\"", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1) ; platform_python_implementation != \"PyPy\"", "pytest-perf ; sys_platform != \"cygwin\"", "pytest-ruff ; sys_platform != \"cygwin\"", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.1)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] [[package]] @@ -2136,6 +2271,7 @@ version = "0.20.0" description = "sqlfmt formats your dbt SQL files so you don't have to." optional = false python-versions = ">=3.8,<4.0" +groups = ["dev"] files = [ {file = "shandy_sqlfmt-0.20.0-py3-none-any.whl", hash = "sha256:0a8fd640e7d5fdb60b97faef9485e7389b94406f36501f7dc84c86577283f282"}, {file = "shandy_sqlfmt-0.20.0.tar.gz", hash = "sha256:2c6a8a39b03b1dac761239a08e66fbde849eed739528c2e80aeebf5164b45f6b"}, @@ -2157,6 +2293,7 @@ version = "1.16.0" description = "Python 2 and 3 compatibility utilities" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" +groups = ["main"] files = [ {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, @@ -2168,6 +2305,7 @@ version = "0.4.4" description = "A non-validating SQL parser." optional = false python-versions = ">=3.5" +groups = ["main"] files = [ {file = "sqlparse-0.4.4-py3-none-any.whl", hash = "sha256:5430a4fe2ac7d0f93e66f1efc6e1338a41884b7ddf2a350cedd20ccc4d9d28f3"}, {file = "sqlparse-0.4.4.tar.gz", hash = "sha256:d446183e84b8349fa3061f0fe7f06ca94ba65b426946ffebe6e3e8295332420c"}, @@ -2184,6 +2322,7 @@ version = "4.2.0" description = "Python bindings for the Stripe API" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +groups = ["main"] files = [ {file = "stripe-4.2.0-py2.py3-none-any.whl", hash = "sha256:8ce03bfc099465740e33890000c454e79316c8730e45ad1efbaec3d52a019d05"}, {file = "stripe-4.2.0.tar.gz", hash = "sha256:f0134704bd4e9410fae25034836dc6f5849d92c0f9083d58d43e01b3e631ac4c"}, @@ -2198,6 +2337,7 @@ version = "1.3" description = "The most basic Text::Unidecode port" optional = false python-versions = "*" +groups = ["main"] files = [ {file = "text-unidecode-1.3.tar.gz", hash = "sha256:bad6603bb14d279193107714b288be206cac565dfa49aa5b105294dd5c4aab93"}, {file = "text_unidecode-1.3-py2.py3-none-any.whl", hash = "sha256:1311f10e8b895935241623731c2ba64f4c455287888b18189350b67134a822e8"}, @@ -2209,6 +2349,7 @@ version = "0.10.2" description = "Python Library for Tom's Obvious, Minimal Language" optional = false python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" +groups = ["dev"] files = [ {file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"}, {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"}, @@ -2220,6 +2361,8 @@ version = "2.0.1" description = "A lil' TOML parser" optional = false python-versions = ">=3.7" +groups = ["dev", "test"] +markers = "python_version == \"3.10\"" files = [ {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, @@ -2231,6 +2374,7 @@ version = "4.66.4" description = "Fast, Extensible Progress Meter" optional = false python-versions = ">=3.7" +groups = ["main", "dev"] files = [ {file = "tqdm-4.66.4-py3-none-any.whl", hash = "sha256:b75ca56b413b030bc3f00af51fd2c1a1a5eac6a0c1cca83cbb37a5c52abce644"}, {file = "tqdm-4.66.4.tar.gz", hash = "sha256:e4d936c9de8727928f3be6079590e97d9abfe8d39a590be678eb5919ffc186bb"}, @@ -2251,6 +2395,7 @@ version = "4.9.0" description = "Backported and Experimental Type Hints for Python 3.8+" optional = false python-versions = ">=3.8" +groups = ["main", "dev"] files = [ {file = "typing_extensions-4.9.0-py3-none-any.whl", hash = "sha256:af72aea155e91adfc61c3ae9e0e342dbc0cba726d6cba4b6c72c1f34e47291cd"}, {file = "typing_extensions-4.9.0.tar.gz", hash = "sha256:23478f88c37f27d76ac8aee6c905017a143b0b1b886c3c9f66bc2fd94f9f5783"}, @@ -2262,6 +2407,7 @@ version = "2023.4" description = "Provider of IANA time zone data" optional = false python-versions = ">=2" +groups = ["main"] files = [ {file = "tzdata-2023.4-py2.py3-none-any.whl", hash = "sha256:aa3ace4329eeacda5b7beb7ea08ece826c28d761cda36e747cfbf97996d39bf3"}, {file = "tzdata-2023.4.tar.gz", hash = "sha256:dd54c94f294765522c77399649b4fefd95522479a664a0cec87f41bebc6148c9"}, @@ -2273,6 +2419,7 @@ version = "4.1.1" description = "Implementation of RFC 6570 URI Templates" optional = false python-versions = ">=3.6" +groups = ["main"] files = [ {file = "uritemplate-4.1.1-py2.py3-none-any.whl", hash = "sha256:830c08b8d99bdd312ea4ead05994a38e8936266f84b9a7878232db50b044e02e"}, {file = "uritemplate-4.1.1.tar.gz", hash = "sha256:4346edfc5c3b79f694bccd6d6099a322bbeb628dbf2cd86eea55a456ce5124f0"}, @@ -2284,14 +2431,15 @@ version = "1.26.18" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" +groups = ["main"] files = [ {file = "urllib3-1.26.18-py2.py3-none-any.whl", hash = "sha256:34b97092d7e0a3a8cf7cd10e386f401b3737364026c45e622aa02903dffe0f07"}, {file = "urllib3-1.26.18.tar.gz", hash = "sha256:f8ecc1bba5667413457c529ab955bf8c67b45db799d159066261719e328580a0"}, ] [package.extras] -brotli = ["brotli (==1.0.9)", "brotli (>=1.0.9)", "brotlicffi (>=0.8.0)", "brotlipy (>=0.6.0)"] -secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "ipaddress", "pyOpenSSL (>=0.14)", "urllib3-secure-extra"] +brotli = ["brotli (==1.0.9) ; os_name != \"nt\" and python_version < \"3\" and platform_python_implementation == \"CPython\"", "brotli (>=1.0.9) ; python_version >= \"3\" and platform_python_implementation == \"CPython\"", "brotlicffi (>=0.8.0) ; (os_name != \"nt\" or python_version >= \"3\") and platform_python_implementation != \"CPython\"", "brotlipy (>=0.6.0) ; os_name == \"nt\" and python_version < \"3\""] +secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "ipaddress ; python_version == \"2.7\"", "pyOpenSSL (>=0.14)", "urllib3-secure-extra"] socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] [[package]] @@ -2300,6 +2448,7 @@ version = "20.25.0" description = "Virtual Python Environment builder" optional = false python-versions = ">=3.7" +groups = ["dev"] files = [ {file = "virtualenv-20.25.0-py3-none-any.whl", hash = "sha256:4238949c5ffe6876362d9c0180fc6c3a824a7b12b80604eeb8085f2ed7460de3"}, {file = "virtualenv-20.25.0.tar.gz", hash = "sha256:bf51c0d9c7dd63ea8e44086fa1e4fb1093a31e963b86959257378aef020e1f1b"}, @@ -2312,7 +2461,7 @@ platformdirs = ">=3.9.1,<5" [package.extras] docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.2)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=23.6)"] -test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=23.1)", "pytest (>=7.4)", "pytest-env (>=0.8.2)", "pytest-freezer (>=0.4.8)", "pytest-mock (>=3.11.1)", "pytest-randomly (>=3.12)", "pytest-timeout (>=2.1)", "setuptools (>=68)", "time-machine (>=2.10)"] +test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=23.1)", "pytest (>=7.4)", "pytest-env (>=0.8.2)", "pytest-freezer (>=0.4.8) ; platform_python_implementation == \"PyPy\"", "pytest-mock (>=3.11.1)", "pytest-randomly (>=3.12)", "pytest-timeout (>=2.1)", "setuptools (>=68)", "time-machine (>=2.10) ; platform_python_implementation == \"CPython\""] [[package]] name = "win32-setctime" @@ -2320,13 +2469,15 @@ version = "1.1.0" description = "A small Python utility to set file creation time on Windows" optional = false python-versions = ">=3.5" +groups = ["main"] +markers = "sys_platform == \"win32\"" files = [ {file = "win32_setctime-1.1.0-py3-none-any.whl", hash = "sha256:231db239e959c2fe7eb1d7dc129f11172354f98361c4fa2d6d2d7e278baa8aad"}, {file = "win32_setctime-1.1.0.tar.gz", hash = "sha256:15cf5750465118d6929ae4de4eb46e8edae9a5634350c01ba582df868e932cb2"}, ] [package.extras] -dev = ["black (>=19.3b0)", "pytest (>=4.6.2)"] +dev = ["black (>=19.3b0) ; python_version >= \"3.6\"", "pytest (>=4.6.2)"] [[package]] name = "yamlfix" @@ -2334,6 +2485,7 @@ version = "1.16.0" description = "A simple opionated yaml formatter that keeps your comments!" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "yamlfix-1.16.0-py3-none-any.whl", hash = "sha256:d92bf8a6d5b6f186bd9d643d633549a1c2424555cb8d176a5d38bce3e678b2b0"}, {file = "yamlfix-1.16.0.tar.gz", hash = "sha256:72f7990e5b2b4459ef3249df4724dacbd85ce7b87f4ea3503d8a72c48574cc32"}, @@ -2345,6 +2497,6 @@ maison = ">=1.4.0" ruyaml = ">=0.91.0" [metadata] -lock-version = "2.0" +lock-version = "2.1" python-versions = ">=3.10,<3.13" -content-hash = "5973fbf28a9bc43c2038ff04f01624241ddb0b56a704b6d0ce8426370c308fc5" +content-hash = "dd9e4d7250d6758ec617051e48a482c3cc96946537cc5bd24b4dd72f90a179e1" diff --git a/pyproject.toml b/pyproject.toml index b3622a2b..a2b8cedf 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -40,6 +40,8 @@ dj-stripe = "^2.8.3" pydantic = "^2.5.3" requests = "^2.31.0" tqdm = "^4.66.4" +djangorestframework = "^3.16.0" +djangorestframework-simplejwt = "^5.5.0" [tool.poetry.group.dev.dependencies] pre-commit = "^3.3.3" From 2b9fbc5f5744d0284c6ba538f2b33e3d379c1521 Mon Sep 17 00:00:00 2001 From: vrtornisiello Date: Tue, 15 Apr 2025 17:10:31 -0300 Subject: [PATCH 023/181] comment could be empty/null --- backend/apps/chatbot/models.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/backend/apps/chatbot/models.py b/backend/apps/chatbot/models.py index 7aaa5866..7fc42c95 100644 --- a/backend/apps/chatbot/models.py +++ b/backend/apps/chatbot/models.py @@ -25,6 +25,6 @@ class Feedback(models.Model): id = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False) message_pair = models.OneToOneField(MessagePair, on_delete=models.CASCADE, primary_key=False) rating = models.SmallIntegerField(choices=[(0, "Bad"), (1, "Good")]) - comment = models.TextField(blank=True) + comment = models.TextField(null=True, blank=True) created_at = models.DateTimeField(auto_now_add=True) updated_at = models.DateTimeField(auto_now=True) From f25da98f71c01997074a08bd1ff41e33218b5c79 Mon Sep 17 00:00:00 2001 From: vrtornisiello Date: Tue, 15 Apr 2025 17:11:03 -0300 Subject: [PATCH 024/181] added jwt tokens urls --- backend/apps/chatbot/urls.py | 17 ++++++++--------- 1 file changed, 8 insertions(+), 9 deletions(-) diff --git a/backend/apps/chatbot/urls.py b/backend/apps/chatbot/urls.py index 244c8d73..84b700ed 100644 --- a/backend/apps/chatbot/urls.py +++ b/backend/apps/chatbot/urls.py @@ -1,18 +1,17 @@ # -*- coding: utf-8 -*- from django.urls import path +from rest_framework_simplejwt.views import (TokenObtainPairView, + TokenRefreshView) -from .views import ( - ThreadListView, - ThreadDetailView, - MessageView, - FeedbackView, - CheckpointView -) +from .views import (CheckpointView, FeedbackView, MessageView, + ThreadDetailView, ThreadListView) urlpatterns = [ + path('chatbot/token/', TokenObtainPairView.as_view()), + path('chatbot/token/refresh/', TokenRefreshView.as_view()), path("chatbot/threads/", ThreadListView.as_view()), path("chatbot/threads//", ThreadDetailView.as_view()), - path("chatbot/threads//message", MessageView.as_view()), - path("chatbot/message-pairs//feedback", FeedbackView.as_view()), + path("chatbot/threads//message/", MessageView.as_view()), + path("chatbot/message-pairs//feedback/", FeedbackView.as_view()), path("chatbot/checkpoints//", CheckpointView.as_view()) ] From f680da5ba56b840616de06838d09e3edbd80bb85 Mon Sep 17 00:00:00 2001 From: vrtornisiello Date: Tue, 15 Apr 2025 17:11:57 -0300 Subject: [PATCH 025/181] changed `chatbot` package installation path --- Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Dockerfile b/Dockerfile index efe4327c..ee659365 100644 --- a/Dockerfile +++ b/Dockerfile @@ -18,7 +18,7 @@ COPY nginx.conf /etc/nginx/nginx.conf WORKDIR /app COPY . . RUN /env/bin/pip install --no-cache-dir . && rm nginx.conf -RUN /env/bin/pip install --no-cache-dir ./chatbot/chatbot +RUN /env/bin/pip install --no-cache-dir ./chatbot # Prevents Python from writing .pyc files to disc # https://docs.python.org/3/using/cmdline.html#envvar-PYTHONDONTWRITEBYTECODE From 86048fe155be0bd819e134a114fdf4465978ec66 Mon Sep 17 00:00:00 2001 From: vrtornisiello Date: Tue, 15 Apr 2025 17:13:19 -0300 Subject: [PATCH 026/181] created serializers and refactored views, added jwt auth, error handling, etc. --- backend/apps/chatbot/serializers.py | 29 ++++++ backend/apps/chatbot/views.py | 144 +++++++++++++++------------- 2 files changed, 105 insertions(+), 68 deletions(-) create mode 100644 backend/apps/chatbot/serializers.py diff --git a/backend/apps/chatbot/serializers.py b/backend/apps/chatbot/serializers.py new file mode 100644 index 00000000..0ca78e1f --- /dev/null +++ b/backend/apps/chatbot/serializers.py @@ -0,0 +1,29 @@ +import uuid + +from rest_framework import serializers + +from .models import Feedback, MessagePair, Thread + + +class FeedbackCreateSerializer(serializers.Serializer): + rating = serializers.IntegerField() + comment = serializers.CharField(allow_null=True) + +class FeedbackSerializer(serializers.ModelSerializer): + class Meta: + model = Feedback + fields = [field.name for field in Feedback._meta.fields] + +class MessagePairSerializer(serializers.ModelSerializer): + class Meta: + model = MessagePair + fields = [field.name for field in MessagePair._meta.fields] + +class ThreadSerializer(serializers.ModelSerializer): + class Meta: + model = Thread + fields = [field.name for field in Thread._meta.fields] + +class UserMessageSerializer(serializers.Serializer): + id = serializers.UUIDField(default=uuid.uuid4) + content = serializers.CharField() diff --git a/backend/apps/chatbot/views.py b/backend/apps/chatbot/views.py index 008fc60b..9aef185d 100644 --- a/backend/apps/chatbot/views.py +++ b/backend/apps/chatbot/views.py @@ -1,65 +1,73 @@ # -*- coding: utf-8 -*- -import json +from django.http import HttpResponse, JsonResponse +from rest_framework import exceptions +from rest_framework.parsers import JSONParser +from rest_framework.permissions import IsAuthenticated +from rest_framework.request import Request +from rest_framework.views import APIView -from django.http import HttpRequest, HttpResponse, JsonResponse -from django.views import View +from chatbot.assistants import SQLAssistant, SQLAssistantMessage, UserMessage +from chatbot.databases import BigQueryDatabase + +from .models import * +from .serializers import * # TODO: add authentication (using this login_required decorator + checking user id) # TODO: add error handling (404 wrong thread if, etc...) # TODO: To test this, create a test user in a migration -from django.contrib.auth.decorators import login_required -from django.utils.decorators import method_decorator -from django.views.decorators.csrf import csrf_exempt - -from backend.apps.chatbot.models import Feedback, MessagePair, Thread -from chatbot.assistants import SQLAssistant, SQLAssistantMessage, UserMessage -from chatbot.databases import BigQueryDatabase - - database = BigQueryDatabase() assistant = SQLAssistant(database=database) +def get_thread_by_id(thread_id: str) -> Thread: + try: + return Thread.objects.get(id=thread_id) + except Thread.DoesNotExist: + raise exceptions.NotFound -class ThreadListView(View): - def get(self, request: HttpRequest, *args, **kwargs): - threads = Thread.objects.filter(account=request.user) - return JsonResponse({"threads": [thread.to_dict() for thread in threads]}) +def get_message_pair_by_id(message_pair_id: str) -> MessagePair: + try: + return MessagePair.objects.get(id=message_pair_id) + except MessagePair.DoesNotExist: + raise exceptions.NotFound - def post(self, request: HttpRequest, *args, **kwargs): - thread = Thread.objects.create(account=request.user) - return JsonResponse(thread.to_dict()) +class ThreadListView(APIView): + permission_classes = [IsAuthenticated] -class ThreadDetailView(View): - def get(self, request: HttpRequest, thread_id: str, *args, **kwargs): - try: - thread = Thread.objects.get(id=thread_id) - except Thread.DoesNotExist: - return HttpResponse(404) + def get(self, request: Request): + threads = Thread.objects.filter(account=request.user.id) + serializer = ThreadSerializer(threads, many=True) + return JsonResponse(serializer.data, safe=False) - if thread.account.uuid != request.user.id: - return JsonResponse( - data={"error": "You are not authorized to access this thread"}, - status=403 - ) + def post(self, request: Request): + thread = Thread.objects.create(account=request.user) + serializer = ThreadSerializer(thread) + return JsonResponse(serializer.data) + +class ThreadDetailView(APIView): + permission_classes = [IsAuthenticated] + def get(self, request: Request, thread_id: str): + thread = get_thread_by_id(thread_id) messages = MessagePair.objects.filter(thread=thread) + serializer = MessagePairSerializer(messages, many=True) + return JsonResponse(serializer.data, safe=False) + +class MessageView(APIView): + permission_classes = [IsAuthenticated] - return JsonResponse({"messages": [message.to_dict() for message in messages]}) + def post(self, request: Request, thread_id: str): + data = JSONParser().parse(request) -class MessageView(View): - def post(self, request: HttpRequest, thread_id: str, *args, **kwargs): - thread = Thread.objects.get(id=thread_id) + serializer = UserMessageSerializer(data=data) - if thread.account.uuid != request.user.id: - return JsonResponse( - data={"error": "You are not authorized to access this thread"}, - status=403 - ) + if not serializer.is_valid(): + return JsonResponse(serializer.errors, status=400) - user_message = json.loads(request.body.decode("utf-8")) - user_message = UserMessage(**user_message) + user_message = UserMessage(**serializer.data) + + thread = get_thread_by_id(thread_id) assistant_response: SQLAssistantMessage = assistant.invoke( message=user_message, @@ -69,45 +77,45 @@ def post(self, request: HttpRequest, thread_id: str, *args, **kwargs): # TODO (nice to have): stream results message_pair = MessagePair.objects.create( id=assistant_response.id, - thread=thread_id, + thread=thread, model_uri=assistant_response.model_uri, user_message=user_message.content, assistant_message=assistant_response.content, generated_queries=assistant_response.sql_queries, ) - return JsonResponse(message_pair) + serializer = MessagePairSerializer(message_pair) -class FeedbackView(View): - def put(self, request: HttpRequest, message_pair_id: str, *args, **kwargs): - message_pair = MessagePair.objects.get(id=message_pair_id) + return JsonResponse(serializer.data, status=201) - if message_pair.thread.account.uuid != request.user.id: - return JsonResponse( - data={"error": "You are not authorized to access this thread"}, - status=403 - ) +class FeedbackView(APIView): + permission_classes = [IsAuthenticated] - feedback: dict = json.loads(request.body.decode("utf-8")) + def put(self, request: Request, message_pair_id: str): + data = JSONParser().parse(request) - feedback = Feedback.objects.update_or_create( - message_pair=message_pair_id, - rating=feedback["rating"], - comment=feedback["comment"], - ) + serializer = FeedbackCreateSerializer(data=data) - return JsonResponse(feedback) + if not serializer.is_valid(): + return JsonResponse(serializer.errors, status=400) -class CheckpointView(View): - def delete(self, request: HttpRequest, thread_id: str, *args, **kwargs): - thread = Thread.objects.get(id=thread_id) + message_pair = get_message_pair_by_id(message_pair_id) - if thread.account.uuid != request.user.id: - return JsonResponse( - data={"error": "You are not authorized to access this thread"}, - status=403 - ) + feedback, created = Feedback.objects.update_or_create( + message_pair=message_pair, + defaults=serializer.data + ) + + serializer = FeedbackSerializer(feedback) - assistant.clear_thread(thread_id) + status = 201 if created else 200 - return HttpResponse(200) + return JsonResponse(serializer.data, status=status) + +class CheckpointView(APIView): + def delete(self, request: Request, thread_id: str): + try: + assistant.clear_thread(thread_id) + return HttpResponse("Checkpoint cleared successfully", status=200) + except Exception: + return HttpResponse("Error clearing checkpoint", status=500) From 11a863026a374894c3e0e047e3da8257da005116 Mon Sep 17 00:00:00 2001 From: vrtornisiello Date: Tue, 22 Apr 2025 17:20:10 -0300 Subject: [PATCH 027/181] reminder for custom authentication rules --- backend/settings/base.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/backend/settings/base.py b/backend/settings/base.py index b88d6b0e..08bfdda7 100644 --- a/backend/settings/base.py +++ b/backend/settings/base.py @@ -79,8 +79,9 @@ } SIMPLE_JWT = { - 'ACCESS_TOKEN_LIFETIME': timedelta(days=1), - 'REFRESH_TOKEN_LIFETIME': timedelta(days=7), + "ACCESS_TOKEN_LIFETIME": timedelta(days=1), + "REFRESH_TOKEN_LIFETIME": timedelta(days=7), + # "USER_AUTHENTICATION_RULE": "backend.apps.chatbot.authentication.user_authentication_rule", <- for custom authentication rules } MIDDLEWARE = [ From a898fd48b2a8871a9a5891ec4c607dab2ba5e0c6 Mon Sep 17 00:00:00 2001 From: vrtornisiello Date: Tue, 22 Apr 2025 17:26:27 -0300 Subject: [PATCH 028/181] updated dockerfile and compose file for chromadb --- .env.docker | 5 +++++ Dockerfile | 2 +- docker-compose.override.yaml | 11 ++++++++++- 3 files changed, 16 insertions(+), 2 deletions(-) diff --git a/.env.docker b/.env.docker index 57c98f36..e9c922be 100644 --- a/.env.docker +++ b/.env.docker @@ -28,3 +28,8 @@ MODEL_URI= OPENAI_API_KEY= LANGCHAIN_TRACING_V2= LANGCHAIN_API_KEY= +DB_URL= +CHROMA_HOST= +CHROMA_PORT= +SQL_CHROMA_COLLECTION= +VIZ_CHROMA_COLLECTION= diff --git a/Dockerfile b/Dockerfile index ee659365..2c78a4ec 100644 --- a/Dockerfile +++ b/Dockerfile @@ -8,7 +8,7 @@ ENV PATH /env/bin:$PATH # Install make, nginx and copy configuration RUN apt-get update \ - && apt-get install -y --no-install-recommends curl libpq-dev make nginx \ + && apt-get install -y --no-install-recommends build-essential curl g++ libpq-dev make nginx \ && apt-get clean \ && rm -rf /var/lib/apt/lists/* \ && rm /etc/nginx/sites-enabled/default diff --git a/docker-compose.override.yaml b/docker-compose.override.yaml index 98b7d21f..b063039f 100644 --- a/docker-compose.override.yaml +++ b/docker-compose.override.yaml @@ -70,6 +70,15 @@ services: retries: 5 start_period: 30s restart: unless-stopped + chromadb: + image: chromadb/chroma:0.6.3 # chromadb version that gets installed with langchain-chroma==0.2.2 + ports: + - "8001:8000" + volumes: + - chroma_data:/data + restart: unless-stopped + volumes: esdata: - pgdata: \ No newline at end of file + pgdata: + chroma_data: From 7e63cc0457cda72ffc669742a6f061cc8861e9b4 Mon Sep 17 00:00:00 2001 From: vrtornisiello Date: Tue, 22 Apr 2025 17:29:20 -0300 Subject: [PATCH 029/181] fixing a TypeError (the assistant expects `thread_id` as a string) and using the `get_sync_sql_assistant` helper method --- backend/apps/chatbot/views.py | 27 ++++++++++++++------------- 1 file changed, 14 insertions(+), 13 deletions(-) diff --git a/backend/apps/chatbot/views.py b/backend/apps/chatbot/views.py index 9aef185d..b6e30d09 100644 --- a/backend/apps/chatbot/views.py +++ b/backend/apps/chatbot/views.py @@ -1,4 +1,6 @@ # -*- coding: utf-8 -*- +import uuid + from django.http import HttpResponse, JsonResponse from rest_framework import exceptions from rest_framework.parsers import JSONParser @@ -6,27 +8,23 @@ from rest_framework.request import Request from rest_framework.views import APIView -from chatbot.assistants import SQLAssistant, SQLAssistantMessage, UserMessage +from chatbot.assistants import (SQLAssistantMessage, UserMessage, + get_sync_sql_assistant) from chatbot.databases import BigQueryDatabase from .models import * from .serializers import * -# TODO: add authentication (using this login_required decorator + checking user id) -# TODO: add error handling (404 wrong thread if, etc...) -# TODO: To test this, create a test user in a migration - database = BigQueryDatabase() +assistant, pool = get_sync_sql_assistant(database) -assistant = SQLAssistant(database=database) - -def get_thread_by_id(thread_id: str) -> Thread: +def get_thread_by_id(thread_id: uuid.UUID) -> Thread: try: return Thread.objects.get(id=thread_id) except Thread.DoesNotExist: raise exceptions.NotFound -def get_message_pair_by_id(message_pair_id: str) -> MessagePair: +def get_message_pair_by_id(message_pair_id: uuid.UUID) -> MessagePair: try: return MessagePair.objects.get(id=message_pair_id) except MessagePair.DoesNotExist: @@ -48,7 +46,7 @@ def post(self, request: Request): class ThreadDetailView(APIView): permission_classes = [IsAuthenticated] - def get(self, request: Request, thread_id: str): + def get(self, request: Request, thread_id: uuid.UUID): thread = get_thread_by_id(thread_id) messages = MessagePair.objects.filter(thread=thread) serializer = MessagePairSerializer(messages, many=True) @@ -57,7 +55,9 @@ def get(self, request: Request, thread_id: str): class MessageView(APIView): permission_classes = [IsAuthenticated] - def post(self, request: Request, thread_id: str): + def post(self, request: Request, thread_id: uuid.UUID): + thread_id = str(thread_id) + data = JSONParser().parse(request) serializer = UserMessageSerializer(data=data) @@ -91,7 +91,7 @@ def post(self, request: Request, thread_id: str): class FeedbackView(APIView): permission_classes = [IsAuthenticated] - def put(self, request: Request, message_pair_id: str): + def put(self, request: Request, message_pair_id: uuid.UUID): data = JSONParser().parse(request) serializer = FeedbackCreateSerializer(data=data) @@ -113,8 +113,9 @@ def put(self, request: Request, message_pair_id: str): return JsonResponse(serializer.data, status=status) class CheckpointView(APIView): - def delete(self, request: Request, thread_id: str): + def delete(self, request: Request, thread_id: uuid.UUID): try: + thread_id = str(thread_id) assistant.clear_thread(thread_id) return HttpResponse("Checkpoint cleared successfully", status=200) except Exception: From 3e5426f2d61a14c6a628c0d1eaebde2c12bbebf0 Mon Sep 17 00:00:00 2001 From: Fred Israel Date: Wed, 23 Apr 2025 23:43:57 -0300 Subject: [PATCH 030/181] Adding chatbot as submodule --- .gitmodules | 3 +++ chatbot | 1 + 2 files changed, 4 insertions(+) create mode 100644 .gitmodules create mode 160000 chatbot diff --git a/.gitmodules b/.gitmodules new file mode 100644 index 00000000..8d0fe23f --- /dev/null +++ b/.gitmodules @@ -0,0 +1,3 @@ +[submodule "chatbot"] + path = chatbot + url = git@github.com:basedosdados/chatbot.git diff --git a/chatbot b/chatbot new file mode 160000 index 00000000..67526ccb --- /dev/null +++ b/chatbot @@ -0,0 +1 @@ +Subproject commit 67526ccb4e0dc064f11f5cbfdc7dc42e837369e5 From 5a77ec0b553529241f4fba6ad24b85501f6a4968 Mon Sep 17 00:00:00 2001 From: Fred Israel Date: Wed, 23 Apr 2025 23:50:12 -0300 Subject: [PATCH 031/181] Download submodules in ci. Also update README to alert on cloning submodules --- .github/workflows/deploy-dev.yaml | 1 + .github/workflows/deploy-prod.yaml | 1 + .github/workflows/deploy-staging.yaml | 1 + .github/workflows/release-chart.yaml | 2 ++ .github/workflows/release-dev.yaml | 1 + .github/workflows/release-prod.yaml | 1 + .github/workflows/release-staging.yaml | 1 + Dockerfile | 1 + README.md | 5 +++++ 9 files changed, 14 insertions(+) diff --git a/.github/workflows/deploy-dev.yaml b/.github/workflows/deploy-dev.yaml index a921f7cb..06f3b6e4 100644 --- a/.github/workflows/deploy-dev.yaml +++ b/.github/workflows/deploy-dev.yaml @@ -19,6 +19,7 @@ jobs: uses: actions/checkout@v4 with: ref: dev + submodules: recursive - name: Import secrets from Vault id: import_secrets uses: hashicorp/vault-action@v2.8.0 diff --git a/.github/workflows/deploy-prod.yaml b/.github/workflows/deploy-prod.yaml index abba4734..ca5c9c19 100644 --- a/.github/workflows/deploy-prod.yaml +++ b/.github/workflows/deploy-prod.yaml @@ -19,6 +19,7 @@ jobs: uses: actions/checkout@v4 with: ref: main + submodules: recursive - name: Import secrets from Vault id: import_secrets uses: hashicorp/vault-action@v2.8.0 diff --git a/.github/workflows/deploy-staging.yaml b/.github/workflows/deploy-staging.yaml index bdaf4391..5a4e1cbd 100644 --- a/.github/workflows/deploy-staging.yaml +++ b/.github/workflows/deploy-staging.yaml @@ -19,6 +19,7 @@ jobs: uses: actions/checkout@v4 with: ref: staging + submodules: recursive - name: Import secrets from Vault id: import_secrets uses: hashicorp/vault-action@v2.8.0 diff --git a/.github/workflows/release-chart.yaml b/.github/workflows/release-chart.yaml index 000626c5..3cbf6a57 100644 --- a/.github/workflows/release-chart.yaml +++ b/.github/workflows/release-chart.yaml @@ -10,6 +10,8 @@ jobs: steps: - name: Checkout uses: actions/checkout@v4 + with: + submodules: recursive - name: Configure Git run: | git config user.name "$GITHUB_ACTOR" diff --git a/.github/workflows/release-dev.yaml b/.github/workflows/release-dev.yaml index 4be5fcf6..5aec0861 100644 --- a/.github/workflows/release-dev.yaml +++ b/.github/workflows/release-dev.yaml @@ -13,6 +13,7 @@ jobs: uses: actions/checkout@v4 with: ref: dev + submodules: recursive - name: Login to GitHub Container Registry uses: docker/login-action@v2 with: diff --git a/.github/workflows/release-prod.yaml b/.github/workflows/release-prod.yaml index 2e856aba..d6e90e08 100644 --- a/.github/workflows/release-prod.yaml +++ b/.github/workflows/release-prod.yaml @@ -13,6 +13,7 @@ jobs: uses: actions/checkout@v4 with: ref: main + submodules: recursive - name: Login to GitHub Container Registry uses: docker/login-action@v2 with: diff --git a/.github/workflows/release-staging.yaml b/.github/workflows/release-staging.yaml index 15bd6c92..a6a23f63 100644 --- a/.github/workflows/release-staging.yaml +++ b/.github/workflows/release-staging.yaml @@ -13,6 +13,7 @@ jobs: uses: actions/checkout@v4 with: ref: staging + submodules: recursive - name: Login to GitHub Container Registry uses: docker/login-action@v2 with: diff --git a/Dockerfile b/Dockerfile index 2c78a4ec..378053ed 100644 --- a/Dockerfile +++ b/Dockerfile @@ -18,6 +18,7 @@ COPY nginx.conf /etc/nginx/nginx.conf WORKDIR /app COPY . . RUN /env/bin/pip install --no-cache-dir . && rm nginx.conf +RUN test -d ./chatbot || (echo "ERROR: Git submodule 'chatbot' not found. Please run 'git submodule update --init --recursive'. See backend/README.md for more information." && exit 1) RUN /env/bin/pip install --no-cache-dir ./chatbot # Prevents Python from writing .pyc files to disc diff --git a/README.md b/README.md index ad4a63e5..72820b6d 100644 --- a/README.md +++ b/README.md @@ -17,6 +17,11 @@ git clone https://github.com/basedosdados/backend.git ``` +- Baixar submodulos + ``` + git submodule update --recursive --init + ``` + - Abrí-lo no seu editor de texto - No seu ambiente de desenvolvimento, instalar [poetry](https://python-poetry.org/) para gerenciamento de dependências From 16de6002211d5d1e127dbf794f442d80bbad1384 Mon Sep 17 00:00:00 2001 From: Fred Israel Date: Thu, 24 Apr 2025 00:12:50 -0300 Subject: [PATCH 032/181] minor stuff --- backend/apps/chatbot/urls.py | 4 ++-- backend/apps/chatbot/views.py | 32 ++++++++++++++++---------------- chatbot | 2 +- 3 files changed, 19 insertions(+), 19 deletions(-) diff --git a/backend/apps/chatbot/urls.py b/backend/apps/chatbot/urls.py index 84b700ed..0140b72f 100644 --- a/backend/apps/chatbot/urls.py +++ b/backend/apps/chatbot/urls.py @@ -3,7 +3,7 @@ from rest_framework_simplejwt.views import (TokenObtainPairView, TokenRefreshView) -from .views import (CheckpointView, FeedbackView, MessageView, +from .views import (CheckpointView, FeedbackView, MessageListView, ThreadDetailView, ThreadListView) urlpatterns = [ @@ -11,7 +11,7 @@ path('chatbot/token/refresh/', TokenRefreshView.as_view()), path("chatbot/threads/", ThreadListView.as_view()), path("chatbot/threads//", ThreadDetailView.as_view()), - path("chatbot/threads//message/", MessageView.as_view()), + path("chatbot/threads//messages/", MessageListView.as_view()), path("chatbot/message-pairs//feedback/", FeedbackView.as_view()), path("chatbot/checkpoints//", CheckpointView.as_view()) ] diff --git a/backend/apps/chatbot/views.py b/backend/apps/chatbot/views.py index b6e30d09..7e4b2484 100644 --- a/backend/apps/chatbot/views.py +++ b/backend/apps/chatbot/views.py @@ -18,18 +18,6 @@ database = BigQueryDatabase() assistant, pool = get_sync_sql_assistant(database) -def get_thread_by_id(thread_id: uuid.UUID) -> Thread: - try: - return Thread.objects.get(id=thread_id) - except Thread.DoesNotExist: - raise exceptions.NotFound - -def get_message_pair_by_id(message_pair_id: uuid.UUID) -> MessagePair: - try: - return MessagePair.objects.get(id=message_pair_id) - except MessagePair.DoesNotExist: - raise exceptions.NotFound - class ThreadListView(APIView): permission_classes = [IsAuthenticated] @@ -47,12 +35,12 @@ class ThreadDetailView(APIView): permission_classes = [IsAuthenticated] def get(self, request: Request, thread_id: uuid.UUID): - thread = get_thread_by_id(thread_id) + thread = _get_thread_by_id(thread_id) messages = MessagePair.objects.filter(thread=thread) serializer = MessagePairSerializer(messages, many=True) return JsonResponse(serializer.data, safe=False) -class MessageView(APIView): +class MessageListView(APIView): permission_classes = [IsAuthenticated] def post(self, request: Request, thread_id: uuid.UUID): @@ -67,7 +55,7 @@ def post(self, request: Request, thread_id: uuid.UUID): user_message = UserMessage(**serializer.data) - thread = get_thread_by_id(thread_id) + thread = _get_thread_by_id(thread_id) assistant_response: SQLAssistantMessage = assistant.invoke( message=user_message, @@ -99,7 +87,7 @@ def put(self, request: Request, message_pair_id: uuid.UUID): if not serializer.is_valid(): return JsonResponse(serializer.errors, status=400) - message_pair = get_message_pair_by_id(message_pair_id) + message_pair = _get_message_pair_by_id(message_pair_id) feedback, created = Feedback.objects.update_or_create( message_pair=message_pair, @@ -120,3 +108,15 @@ def delete(self, request: Request, thread_id: uuid.UUID): return HttpResponse("Checkpoint cleared successfully", status=200) except Exception: return HttpResponse("Error clearing checkpoint", status=500) + +def _get_thread_by_id(thread_id: uuid.UUID) -> Thread: + try: + return Thread.objects.get(id=thread_id) + except Thread.DoesNotExist: + raise exceptions.NotFound + +def _get_message_pair_by_id(message_pair_id: uuid.UUID) -> MessagePair: + try: + return MessagePair.objects.get(id=message_pair_id) + except MessagePair.DoesNotExist: + raise exceptions.NotFound diff --git a/chatbot b/chatbot index 67526ccb..ee00780c 160000 --- a/chatbot +++ b/chatbot @@ -1 +1 @@ -Subproject commit 67526ccb4e0dc064f11f5cbfdc7dc42e837369e5 +Subproject commit ee00780c0a47e61a86ba8222a6b6b14eeba2700c From 0c5fd791f789f0cbce3cbf790b9b52f896d9dd65 Mon Sep 17 00:00:00 2001 From: isabelmeister Date: Thu, 24 Apr 2025 10:42:32 -0300 Subject: [PATCH 033/181] fix: adjust lint in files --- backend/apps/account/migrations/0001_initial.py | 4 +--- .../0021_rename_role_career_role_old_and_more.py | 15 +++++++-------- .../0022_rename_role_new_career_role_and_more.py | 15 +++++++-------- ...alter_career_role_old_alter_career_team_old.py | 15 +++++++-------- backend/apps/account/models.py | 4 +--- .../api/v1/management/commands/reorder_tables.py | 4 +--- ..._is_closed_alter_dataset_is_closed_and_more.py | 4 +--- backend/apps/api/v1/search_views.py | 4 +--- 8 files changed, 26 insertions(+), 39 deletions(-) diff --git a/backend/apps/account/migrations/0001_initial.py b/backend/apps/account/migrations/0001_initial.py index 08eb76e2..e032df98 100644 --- a/backend/apps/account/migrations/0001_initial.py +++ b/backend/apps/account/migrations/0001_initial.py @@ -84,9 +84,7 @@ class Migration(migrations.Migration): ), ( "twitter", - models.CharField( - blank=True, max_length=255, null=True, verbose_name="Twitter" - ), + models.CharField(blank=True, max_length=255, null=True, verbose_name="Twitter"), ), ( "linkedin", diff --git a/backend/apps/account/migrations/0021_rename_role_career_role_old_and_more.py b/backend/apps/account/migrations/0021_rename_role_career_role_old_and_more.py index d2578ae3..3d091ece 100644 --- a/backend/apps/account/migrations/0021_rename_role_career_role_old_and_more.py +++ b/backend/apps/account/migrations/0021_rename_role_career_role_old_and_more.py @@ -4,20 +4,19 @@ class Migration(migrations.Migration): - dependencies = [ - ('account', '0020_career_role_new'), + ("account", "0020_career_role_new"), ] operations = [ migrations.RenameField( - model_name='career', - old_name='role', - new_name='role_old', + model_name="career", + old_name="role", + new_name="role_old", ), migrations.RenameField( - model_name='career', - old_name='team', - new_name='team_old', + model_name="career", + old_name="team", + new_name="team_old", ), ] diff --git a/backend/apps/account/migrations/0022_rename_role_new_career_role_and_more.py b/backend/apps/account/migrations/0022_rename_role_new_career_role_and_more.py index 8bd4cecc..bf9e79aa 100644 --- a/backend/apps/account/migrations/0022_rename_role_new_career_role_and_more.py +++ b/backend/apps/account/migrations/0022_rename_role_new_career_role_and_more.py @@ -4,20 +4,19 @@ class Migration(migrations.Migration): - dependencies = [ - ('account', '0021_rename_role_career_role_old_and_more'), + ("account", "0021_rename_role_career_role_old_and_more"), ] operations = [ migrations.RenameField( - model_name='career', - old_name='role_new', - new_name='role', + model_name="career", + old_name="role_new", + new_name="role", ), migrations.RenameField( - model_name='career', - old_name='team_new', - new_name='team', + model_name="career", + old_name="team_new", + new_name="team", ), ] diff --git a/backend/apps/account/migrations/0023_alter_career_role_old_alter_career_team_old.py b/backend/apps/account/migrations/0023_alter_career_role_old_alter_career_team_old.py index 220595d9..72e29e35 100644 --- a/backend/apps/account/migrations/0023_alter_career_role_old_alter_career_team_old.py +++ b/backend/apps/account/migrations/0023_alter_career_role_old_alter_career_team_old.py @@ -4,20 +4,19 @@ class Migration(migrations.Migration): - dependencies = [ - ('account', '0022_rename_role_new_career_role_and_more'), + ("account", "0022_rename_role_new_career_role_and_more"), ] operations = [ migrations.AlterField( - model_name='career', - name='role_old', - field=models.CharField(blank=True, max_length=40, verbose_name='Role (old)'), + model_name="career", + name="role_old", + field=models.CharField(blank=True, max_length=40, verbose_name="Role (old)"), ), migrations.AlterField( - model_name='career', - name='team_old', - field=models.CharField(blank=True, max_length=40, verbose_name='Team (old)'), + model_name="career", + name="team_old", + field=models.CharField(blank=True, max_length=40, verbose_name="Team (old)"), ), ] diff --git a/backend/apps/account/models.py b/backend/apps/account/models.py index 684347d9..a2be8cab 100644 --- a/backend/apps/account/models.py +++ b/backend/apps/account/models.py @@ -211,9 +211,7 @@ class Account(BaseModel, AbstractBaseUser, PermissionsMixin): uuid = models.UUIDField(primary_key=False, default=uuid4) email = models.EmailField("Email", unique=True) - gcp_email = models.EmailField( - "GCP email", null=True, blank=True - ) # Google Cloud Platform email + gcp_email = models.EmailField("GCP email", null=True, blank=True) # Google Cloud Platform email username = models.CharField("Username", max_length=40, blank=True, null=True, unique=True) first_name = models.CharField("Nome", max_length=40, blank=True) diff --git a/backend/apps/api/v1/management/commands/reorder_tables.py b/backend/apps/api/v1/management/commands/reorder_tables.py index 8b5b2eaf..db033da3 100644 --- a/backend/apps/api/v1/management/commands/reorder_tables.py +++ b/backend/apps/api/v1/management/commands/reorder_tables.py @@ -10,9 +10,7 @@ class Command(BaseCommand): def add_arguments(self, parser): parser.add_argument("dataset_id", type=str, help="ID of the dataset") - parser.add_argument( - "ordered_slugs", type=str, nargs="+", help="Ordered tables JSON string" - ) + parser.add_argument("ordered_slugs", type=str, nargs="+", help="Ordered tables JSON string") def handle(self, dataset_id, *args, **options): ordered_slugs = options["ordered_slugs"] diff --git a/backend/apps/api/v1/migrations/0003_alter_column_is_closed_alter_dataset_is_closed_and_more.py b/backend/apps/api/v1/migrations/0003_alter_column_is_closed_alter_dataset_is_closed_and_more.py index 7306ad2d..f7b2d234 100644 --- a/backend/apps/api/v1/migrations/0003_alter_column_is_closed_alter_dataset_is_closed_and_more.py +++ b/backend/apps/api/v1/migrations/0003_alter_column_is_closed_alter_dataset_is_closed_and_more.py @@ -27,8 +27,6 @@ class Migration(migrations.Migration): migrations.AlterField( model_name="table", name="is_closed", - field=models.BooleanField( - default=False, help_text="Table is for Pro subscribers only" - ), + field=models.BooleanField(default=False, help_text="Table is for Pro subscribers only"), ), ] diff --git a/backend/apps/api/v1/search_views.py b/backend/apps/api/v1/search_views.py index c7306cce..c37abd31 100644 --- a/backend/apps/api/v1/search_views.py +++ b/backend/apps/api/v1/search_views.py @@ -296,9 +296,7 @@ def as_search_result(result: SearchResult, locale="pt"): ) entities = [] - for slug, name in zip( - result.entity_slug or [], getattr(result, f"entity_name_{locale}") or [] - ): + for slug, name in zip(result.entity_slug or [], getattr(result, f"entity_name_{locale}") or []): entities.append( { "slug": slug, From 014c9cf1b1d18179838090ef7ce461277d825c83 Mon Sep 17 00:00:00 2001 From: vrtornisiello Date: Thu, 24 Apr 2025 11:49:29 -0300 Subject: [PATCH 034/181] adjusting indentation --- docker-compose.override.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docker-compose.override.yaml b/docker-compose.override.yaml index b063039f..19163ae5 100644 --- a/docker-compose.override.yaml +++ b/docker-compose.override.yaml @@ -51,8 +51,8 @@ services: env_file: [.env.docker] command: ["/app/start-dev.sh"] volumes: - - .:/app - - $HOME/.config/pydata:$HOME/.config/pydata + - .:/app + - $HOME/.config/pydata:$HOME/.config/pydata ports: - "8000:8000" # Porta da api - "5678:5678" # Porta de debug From d49acdaeae3fc33f00719843b0e7732b61cf6377 Mon Sep 17 00:00:00 2001 From: vrtornisiello Date: Thu, 24 Apr 2025 11:50:48 -0300 Subject: [PATCH 035/181] updated chatbot package and using validated data --- backend/apps/chatbot/views.py | 8 ++++---- chatbot | 2 +- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/backend/apps/chatbot/views.py b/backend/apps/chatbot/views.py index 7e4b2484..9fc5c1ae 100644 --- a/backend/apps/chatbot/views.py +++ b/backend/apps/chatbot/views.py @@ -8,15 +8,15 @@ from rest_framework.request import Request from rest_framework.views import APIView -from chatbot.assistants import (SQLAssistantMessage, UserMessage, - get_sync_sql_assistant) +from chatbot.assistants import SQLAssistant, SQLAssistantMessage, UserMessage from chatbot.databases import BigQueryDatabase from .models import * from .serializers import * database = BigQueryDatabase() -assistant, pool = get_sync_sql_assistant(database) +assistant = SQLAssistant(database) +assistant.setup() class ThreadListView(APIView): permission_classes = [IsAuthenticated] @@ -53,7 +53,7 @@ def post(self, request: Request, thread_id: uuid.UUID): if not serializer.is_valid(): return JsonResponse(serializer.errors, status=400) - user_message = UserMessage(**serializer.data) + user_message = UserMessage(**serializer.validated_data) thread = _get_thread_by_id(thread_id) diff --git a/chatbot b/chatbot index ee00780c..0d3c5f67 160000 --- a/chatbot +++ b/chatbot @@ -1 +1 @@ -Subproject commit ee00780c0a47e61a86ba8222a6b6b14eeba2700c +Subproject commit 0d3c5f674dd35cfa0981ff83be4c536d06196bf7 From b6add8cc60a0d5d13d27c55295079e3b927c506f Mon Sep 17 00:00:00 2001 From: vrtornisiello Date: Thu, 24 Apr 2025 16:30:54 -0300 Subject: [PATCH 036/181] updated chatbot package version to `v0.3.0` --- chatbot | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/chatbot b/chatbot index 0d3c5f67..0ff29cdd 160000 --- a/chatbot +++ b/chatbot @@ -1 +1 @@ -Subproject commit 0d3c5f674dd35cfa0981ff83be4c536d06196bf7 +Subproject commit 0ff29cdd29dd8c95b2da488c99d7ef7ae91c8aa5 From e244302af9c3b031864d0aba9eea60b86027b3bc Mon Sep 17 00:00:00 2001 From: vrtornisiello Date: Fri, 25 Apr 2025 15:50:08 -0300 Subject: [PATCH 037/181] validated data is serialized to a UUID but `UserMessage` expects a string --- backend/apps/chatbot/views.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/backend/apps/chatbot/views.py b/backend/apps/chatbot/views.py index 9fc5c1ae..73625ec0 100644 --- a/backend/apps/chatbot/views.py +++ b/backend/apps/chatbot/views.py @@ -53,7 +53,7 @@ def post(self, request: Request, thread_id: uuid.UUID): if not serializer.is_valid(): return JsonResponse(serializer.errors, status=400) - user_message = UserMessage(**serializer.validated_data) + user_message = UserMessage(**serializer.data) thread = _get_thread_by_id(thread_id) From 96c786ed84e0f539627310249f0604668abcc3a8 Mon Sep 17 00:00:00 2001 From: vrtornisiello Date: Fri, 25 Apr 2025 16:04:40 -0300 Subject: [PATCH 038/181] added healthcheck to vector database service --- docker-compose.override.yaml | 18 +++++++++++------- 1 file changed, 11 insertions(+), 7 deletions(-) diff --git a/docker-compose.override.yaml b/docker-compose.override.yaml index 19163ae5..4a7eef71 100644 --- a/docker-compose.override.yaml +++ b/docker-compose.override.yaml @@ -43,6 +43,15 @@ services: retries: 5 start_period: 1m restart: unless-stopped + vector-database: + image: chromadb/chroma:0.6.3 # chromadb version that gets installed with langchain-chroma==0.2.2 + ports: + - 8001:8000 + volumes: + - chroma_data:/data + healthcheck: + test: curl -f http://localhost:8000/api/v2/heartbeat || exit 1 + restart: unless-stopped api: build: context: . @@ -63,6 +72,8 @@ services: condition: service_healthy database: condition: service_healthy + vector-database: + condition: service_healthy healthcheck: test: [CMD, curl, -f, http://localhost/healthcheck/] interval: 1m @@ -70,13 +81,6 @@ services: retries: 5 start_period: 30s restart: unless-stopped - chromadb: - image: chromadb/chroma:0.6.3 # chromadb version that gets installed with langchain-chroma==0.2.2 - ports: - - "8001:8000" - volumes: - - chroma_data:/data - restart: unless-stopped volumes: esdata: From bbb44d822232ce8d92e6b2620d71db054f9e0c31 Mon Sep 17 00:00:00 2001 From: vrtornisiello Date: Mon, 28 Apr 2025 12:08:02 -0300 Subject: [PATCH 039/181] created a request validation function using pydantic --- backend/apps/chatbot/serializers.py | 6 ------ backend/apps/chatbot/views.py | 21 +++++++++++++-------- 2 files changed, 13 insertions(+), 14 deletions(-) diff --git a/backend/apps/chatbot/serializers.py b/backend/apps/chatbot/serializers.py index 0ca78e1f..a6affd60 100644 --- a/backend/apps/chatbot/serializers.py +++ b/backend/apps/chatbot/serializers.py @@ -1,5 +1,3 @@ -import uuid - from rest_framework import serializers from .models import Feedback, MessagePair, Thread @@ -23,7 +21,3 @@ class ThreadSerializer(serializers.ModelSerializer): class Meta: model = Thread fields = [field.name for field in Thread._meta.fields] - -class UserMessageSerializer(serializers.Serializer): - id = serializers.UUIDField(default=uuid.uuid4) - content = serializers.CharField() diff --git a/backend/apps/chatbot/views.py b/backend/apps/chatbot/views.py index 73625ec0..c106144c 100644 --- a/backend/apps/chatbot/views.py +++ b/backend/apps/chatbot/views.py @@ -1,6 +1,8 @@ # -*- coding: utf-8 -*- import uuid +from typing import Type, TypeVar +import pydantic from django.http import HttpResponse, JsonResponse from rest_framework import exceptions from rest_framework.parsers import JSONParser @@ -14,6 +16,8 @@ from .models import * from .serializers import * +PydanticModel = TypeVar("PydanticModel", bound=pydantic.BaseModel) + database = BigQueryDatabase() assistant = SQLAssistant(database) assistant.setup() @@ -46,14 +50,7 @@ class MessageListView(APIView): def post(self, request: Request, thread_id: uuid.UUID): thread_id = str(thread_id) - data = JSONParser().parse(request) - - serializer = UserMessageSerializer(data=data) - - if not serializer.is_valid(): - return JsonResponse(serializer.errors, status=400) - - user_message = UserMessage(**serializer.data) + user_message = _validate(request, UserMessage) thread = _get_thread_by_id(thread_id) @@ -120,3 +117,11 @@ def _get_message_pair_by_id(message_pair_id: uuid.UUID) -> MessagePair: return MessagePair.objects.get(id=message_pair_id) except MessagePair.DoesNotExist: raise exceptions.NotFound + +def _validate(request: Request, model: Type[PydanticModel]) -> PydanticModel: + data = JSONParser().parse(request) + + try: + return model(**data) + except pydantic.ValidationError as e: + raise exceptions.ValidationError(e.errors()) From 31d6ab73ebffa226b6aec48425d82b77790bb1a4 Mon Sep 17 00:00:00 2001 From: vrtornisiello Date: Mon, 28 Apr 2025 13:42:43 -0300 Subject: [PATCH 040/181] just renaming views and endpoints --- backend/apps/chatbot/urls.py | 6 +++--- backend/apps/chatbot/views.py | 4 ++-- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/backend/apps/chatbot/urls.py b/backend/apps/chatbot/urls.py index 0140b72f..a5c8f7bd 100644 --- a/backend/apps/chatbot/urls.py +++ b/backend/apps/chatbot/urls.py @@ -3,7 +3,7 @@ from rest_framework_simplejwt.views import (TokenObtainPairView, TokenRefreshView) -from .views import (CheckpointView, FeedbackView, MessageListView, +from .views import (CheckpointListView, FeedbackListView, MessageListView, ThreadDetailView, ThreadListView) urlpatterns = [ @@ -12,6 +12,6 @@ path("chatbot/threads/", ThreadListView.as_view()), path("chatbot/threads//", ThreadDetailView.as_view()), path("chatbot/threads//messages/", MessageListView.as_view()), - path("chatbot/message-pairs//feedback/", FeedbackView.as_view()), - path("chatbot/checkpoints//", CheckpointView.as_view()) + path("chatbot/message-pairs//feedbacks/", FeedbackListView.as_view()), + path("chatbot/checkpoints//", CheckpointListView.as_view()) ] diff --git a/backend/apps/chatbot/views.py b/backend/apps/chatbot/views.py index c106144c..d642e894 100644 --- a/backend/apps/chatbot/views.py +++ b/backend/apps/chatbot/views.py @@ -73,7 +73,7 @@ def post(self, request: Request, thread_id: uuid.UUID): return JsonResponse(serializer.data, status=201) -class FeedbackView(APIView): +class FeedbackListView(APIView): permission_classes = [IsAuthenticated] def put(self, request: Request, message_pair_id: uuid.UUID): @@ -97,7 +97,7 @@ def put(self, request: Request, message_pair_id: uuid.UUID): return JsonResponse(serializer.data, status=status) -class CheckpointView(APIView): +class CheckpointListView(APIView): def delete(self, request: Request, thread_id: uuid.UUID): try: thread_id = str(thread_id) From 2e17f2b82a1cf389224d1bb215834ddbc12462d9 Mon Sep 17 00:00:00 2001 From: vrtornisiello Date: Mon, 28 Apr 2025 14:11:02 -0300 Subject: [PATCH 041/181] added authentication to the checkpoint deletion endpoint --- backend/apps/chatbot/views.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/backend/apps/chatbot/views.py b/backend/apps/chatbot/views.py index d642e894..53341c97 100644 --- a/backend/apps/chatbot/views.py +++ b/backend/apps/chatbot/views.py @@ -98,6 +98,8 @@ def put(self, request: Request, message_pair_id: uuid.UUID): return JsonResponse(serializer.data, status=status) class CheckpointListView(APIView): + permission_classes = [IsAuthenticated] + def delete(self, request: Request, thread_id: uuid.UUID): try: thread_id = str(thread_id) From 408857cba63e1fb9295fb4556f5a78d4c9aa433b Mon Sep 17 00:00:00 2001 From: vrtornisiello Date: Mon, 28 Apr 2025 14:14:02 -0300 Subject: [PATCH 042/181] added docstrings --- backend/apps/chatbot/views.py | 100 ++++++++++++++++++++++++++++++++-- 1 file changed, 94 insertions(+), 6 deletions(-) diff --git a/backend/apps/chatbot/views.py b/backend/apps/chatbot/views.py index 53341c97..219649ee 100644 --- a/backend/apps/chatbot/views.py +++ b/backend/apps/chatbot/views.py @@ -25,12 +25,28 @@ class ThreadListView(APIView): permission_classes = [IsAuthenticated] - def get(self, request: Request): + def get(self, request: Request) -> JsonResponse: + """Retrieve all threads associated with the authenticated user. + + Args: + request (Request): A Django REST framework `Request` object containing the authenticated user. + + Returns: + JsonResponse: A JSON response containing a list of serialized threads. + """ threads = Thread.objects.filter(account=request.user.id) serializer = ThreadSerializer(threads, many=True) return JsonResponse(serializer.data, safe=False) - def post(self, request: Request): + def post(self, request: Request) -> JsonResponse: + """Create a new thread for the authenticated user. + + Args: + request (Request): A Django REST framework `Request` object containing the authenticated user. + + Returns: + JsonResponse: A JSON response containing the serialized newly created thread. + """ thread = Thread.objects.create(account=request.user) serializer = ThreadSerializer(thread) return JsonResponse(serializer.data) @@ -38,7 +54,16 @@ def post(self, request: Request): class ThreadDetailView(APIView): permission_classes = [IsAuthenticated] - def get(self, request: Request, thread_id: uuid.UUID): + def get(self, request: Request, thread_id: uuid.UUID) -> JsonResponse: + """Retrieve all message pairs associated with a specific thread. + + Args: + request (Request): A Django REST framework `Request` object. + thread_id (uuid.UUID): The unique identifier of the thread. + + Returns: + JsonResponse: A JSON response containing the serialized message pairs. + """ thread = _get_thread_by_id(thread_id) messages = MessagePair.objects.filter(thread=thread) serializer = MessagePairSerializer(messages, many=True) @@ -47,7 +72,16 @@ def get(self, request: Request, thread_id: uuid.UUID): class MessageListView(APIView): permission_classes = [IsAuthenticated] - def post(self, request: Request, thread_id: uuid.UUID): + def post(self, request: Request, thread_id: uuid.UUID) -> JsonResponse: + """Create a message pair for a given thread. + + Args: + request (Request): A Django REST framework `Request` object containing a user message. + thread_id (uuid.UUID): The unique identifier for the thread. + + Returns: + JsonResponse: A JSON response with the serialized message pair object. + """ thread_id = str(thread_id) user_message = _validate(request, UserMessage) @@ -76,7 +110,17 @@ def post(self, request: Request, thread_id: uuid.UUID): class FeedbackListView(APIView): permission_classes = [IsAuthenticated] - def put(self, request: Request, message_pair_id: uuid.UUID): + def put(self, request: Request, message_pair_id: uuid.UUID) -> JsonResponse: + """Create or update a feedback for a given message pair. + + Args: + request (Request): A Django REST framework `Request` object containing feedback data. + message_pair_id (uuid.UUID): The unique identifier of the message pair. + + Returns: + JsonResponse: A JSON response with the serialized feedback object and an appropriate + HTTP status code (201 for creation, 200 for update). + """ data = JSONParser().parse(request) serializer = FeedbackCreateSerializer(data=data) @@ -100,7 +144,16 @@ def put(self, request: Request, message_pair_id: uuid.UUID): class CheckpointListView(APIView): permission_classes = [IsAuthenticated] - def delete(self, request: Request, thread_id: uuid.UUID): + def delete(self, request: Request, thread_id: uuid.UUID) -> HttpResponse: + """Delete all checkpoints associated with a given thread ID. + + Args: + request (Request): A Django REST framework `Request` object. + thread_id (uuid.UUID): The unique identifier of the thread. + + Returns: + HttpResponse: An HTTP response indicating success (200) or failure (500). + """ try: thread_id = str(thread_id) assistant.clear_thread(thread_id) @@ -109,18 +162,53 @@ def delete(self, request: Request, thread_id: uuid.UUID): return HttpResponse("Error clearing checkpoint", status=500) def _get_thread_by_id(thread_id: uuid.UUID) -> Thread: + """Retrieve a `Thread` object by its ID. + + Args: + message_pair_id (uuid.UUID): The unique identifier of the `Thread`. + + Raises: + NotFound: If no `Thread` exists with the given ID. + + Returns: + Thread: The retrieved `Thread` object. + """ try: return Thread.objects.get(id=thread_id) except Thread.DoesNotExist: raise exceptions.NotFound def _get_message_pair_by_id(message_pair_id: uuid.UUID) -> MessagePair: + """Retrieve a `MessagePair` object by its ID. + + Args: + message_pair_id (uuid.UUID): The unique identifier of the `MessagePair`. + + Raises: + NotFound: If no `MessagePair` exists with the given ID. + + Returns: + MessagePair: The retrieved `MessagePair` object. + """ try: return MessagePair.objects.get(id=message_pair_id) except MessagePair.DoesNotExist: raise exceptions.NotFound def _validate(request: Request, model: Type[PydanticModel]) -> PydanticModel: + """Parse and validate a request's JSON payload against a Pydantic model. + + Args: + request (Request): A Django REST framework `Request` object containing JSON data. + model (Type[PydanticModel]): A Pydantic model class to validate against. + + Raises: + exceptions.ValidationError: Raised if the request data fails Pydantic validation. + (Re-raised as a Django REST framework `ValidationError`). + + Returns: + PydanticModel: An instance of the provided Pydantic model populated with validated data. + """ data = JSONParser().parse(request) try: From 6c36b47f0a5b8f9b4d418737b082ca53077b804e Mon Sep 17 00:00:00 2001 From: vrtornisiello Date: Wed, 30 Apr 2025 18:33:23 -0300 Subject: [PATCH 043/181] update `chatbot` package to version `v0.4.0` --- backend/apps/chatbot/views.py | 71 +++++++++++++++++++++++++++++++++-- chatbot | 2 +- 2 files changed, 69 insertions(+), 4 deletions(-) diff --git a/backend/apps/chatbot/views.py b/backend/apps/chatbot/views.py index 219649ee..838714b6 100644 --- a/backend/apps/chatbot/views.py +++ b/backend/apps/chatbot/views.py @@ -1,9 +1,16 @@ # -*- coding: utf-8 -*- +import os import uuid +from functools import cache from typing import Type, TypeVar +import chromadb import pydantic from django.http import HttpResponse, JsonResponse +from langchain_chroma import Chroma +from langchain_openai import OpenAIEmbeddings +from langgraph.checkpoint.postgres import PostgresSaver +from psycopg_pool import ConnectionPool from rest_framework import exceptions from rest_framework.parsers import JSONParser from rest_framework.permissions import IsAuthenticated @@ -18,9 +25,64 @@ PydanticModel = TypeVar("PydanticModel", bound=pydantic.BaseModel) -database = BigQueryDatabase() -assistant = SQLAssistant(database) -assistant.setup() +# TODO: put all this inside a _get_sql_assistant() function +db_url = os.environ["DB_URL"] + +bq_billing_project = os.environ["BILLING_PROJECT_ID"] +bq_query_project = os.environ["QUERY_PROJECT_ID"] + +chroma_host = os.getenv("CHROMA_HOST") +chroma_port = os.getenv("CHROMA_PORT") +chroma_collection = os.getenv("SQL_CHROMA_COLLECTION") + +# TODO: Change this database for a database +# that gets the metadata from the PostgreSQL database +database = BigQueryDatabase( + billing_project=bq_billing_project, + query_project=bq_query_project, +) + +if chroma_host and chroma_port and chroma_collection: + chroma_client = chromadb.HttpClient( + host=chroma_host, + port=chroma_port, + ) + + vector_store = Chroma( + client=chroma_client, + collection_name=chroma_collection, + collection_metadata={"hnsw:space": "cosine"}, + embedding_function=OpenAIEmbeddings( + model="text-embedding-3-small" + ), + ) +else: + vector_store = None + +# Connection kwargs defined according to: +# https://github.com/langchain-ai/langgraph/issues/2887 +# https://langchain-ai.github.io/langgraph/how-tos/persistence_postgres +conn_kwargs = { + "autocommit": True, + "prepare_threshold": 0 +} + +pool = ConnectionPool( + conninfo=db_url, + kwargs=conn_kwargs, + max_size=8, + open=False, +) +pool.open() + +checkpointer = PostgresSaver(pool) +checkpointer.setup() + +assistant = SQLAssistant( + database=database, + checkpointer=checkpointer, + vector_store=vector_store +) class ThreadListView(APIView): permission_classes = [IsAuthenticated] @@ -88,6 +150,8 @@ def post(self, request: Request, thread_id: uuid.UUID) -> JsonResponse: thread = _get_thread_by_id(thread_id) + # assistant = _get_sql_assistant() + assistant_response: SQLAssistantMessage = assistant.invoke( message=user_message, thread_id=thread_id @@ -156,6 +220,7 @@ def delete(self, request: Request, thread_id: uuid.UUID) -> HttpResponse: """ try: thread_id = str(thread_id) + # assistant = _get_sql_assistant() assistant.clear_thread(thread_id) return HttpResponse("Checkpoint cleared successfully", status=200) except Exception: diff --git a/chatbot b/chatbot index 0ff29cdd..2e217d90 160000 --- a/chatbot +++ b/chatbot @@ -1 +1 @@ -Subproject commit 0ff29cdd29dd8c95b2da488c99d7ef7ae91c8aa5 +Subproject commit 2e217d90ade3909b33f9d8893107d9fa96a4fbd8 From 2667d9d63d40abd0cef0859ed0ff76157af03bae Mon Sep 17 00:00:00 2001 From: vrtornisiello Date: Wed, 30 Apr 2025 18:34:13 -0300 Subject: [PATCH 044/181] update .gitignore --- .gitignore | 3 +++ 1 file changed, 3 insertions(+) diff --git a/.gitignore b/.gitignore index 2a795c90..5b7a9ce0 100644 --- a/.gitignore +++ b/.gitignore @@ -256,3 +256,6 @@ fixtures* # Version manager .tool-versions + +# fetch_metabase script +metabase_data From 7eb942886dec3b8a9e1f9802bf73b0030da185eb Mon Sep 17 00:00:00 2001 From: vrtornisiello Date: Wed, 30 Apr 2025 18:39:11 -0300 Subject: [PATCH 045/181] updated volume mount path to match the expected path on chroma v0.6.3 --- docker-compose.override.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker-compose.override.yaml b/docker-compose.override.yaml index 4a7eef71..6306ba62 100644 --- a/docker-compose.override.yaml +++ b/docker-compose.override.yaml @@ -48,7 +48,7 @@ services: ports: - 8001:8000 volumes: - - chroma_data:/data + - chroma_data:/chroma/chroma healthcheck: test: curl -f http://localhost:8000/api/v2/heartbeat || exit 1 restart: unless-stopped From a79a8682a29a8f7ea81bf14aa5a7b25fe33e8322 Mon Sep 17 00:00:00 2001 From: vrtornisiello Date: Tue, 6 May 2025 17:14:57 -0300 Subject: [PATCH 046/181] updated `chatbot` package to `v0.4.1` and using Serializers for request validation --- backend/apps/chatbot/serializers.py | 6 ++ backend/apps/chatbot/views.py | 146 +++++++++++++++------------- chatbot | 2 +- 3 files changed, 83 insertions(+), 71 deletions(-) diff --git a/backend/apps/chatbot/serializers.py b/backend/apps/chatbot/serializers.py index a6affd60..b91a70a0 100644 --- a/backend/apps/chatbot/serializers.py +++ b/backend/apps/chatbot/serializers.py @@ -1,3 +1,5 @@ +import uuid + from rest_framework import serializers from .models import Feedback, MessagePair, Thread @@ -21,3 +23,7 @@ class ThreadSerializer(serializers.ModelSerializer): class Meta: model = Thread fields = [field.name for field in Thread._meta.fields] + +class UserMessageSerializer(serializers.Serializer): + id = serializers.CharField(default=str(uuid.uuid4)) + content = serializers.CharField() diff --git a/backend/apps/chatbot/views.py b/backend/apps/chatbot/views.py index 838714b6..a1fd7c89 100644 --- a/backend/apps/chatbot/views.py +++ b/backend/apps/chatbot/views.py @@ -15,6 +15,7 @@ from rest_framework.parsers import JSONParser from rest_framework.permissions import IsAuthenticated from rest_framework.request import Request +from rest_framework.serializers import Serializer from rest_framework.views import APIView from chatbot.assistants import SQLAssistant, SQLAssistantMessage, UserMessage @@ -25,64 +26,69 @@ PydanticModel = TypeVar("PydanticModel", bound=pydantic.BaseModel) -# TODO: put all this inside a _get_sql_assistant() function -db_url = os.environ["DB_URL"] +ModelSerializer = TypeVar("ModelSerializer", bound=Serializer) -bq_billing_project = os.environ["BILLING_PROJECT_ID"] -bq_query_project = os.environ["QUERY_PROJECT_ID"] +@cache +def _get_sql_assistant(): + db_url = os.environ["DB_URL"] -chroma_host = os.getenv("CHROMA_HOST") -chroma_port = os.getenv("CHROMA_PORT") -chroma_collection = os.getenv("SQL_CHROMA_COLLECTION") + bq_billing_project = os.environ["BILLING_PROJECT_ID"] + bq_query_project = os.environ["QUERY_PROJECT_ID"] -# TODO: Change this database for a database -# that gets the metadata from the PostgreSQL database -database = BigQueryDatabase( - billing_project=bq_billing_project, - query_project=bq_query_project, -) + chroma_host = os.getenv("CHROMA_HOST") + chroma_port = os.getenv("CHROMA_PORT") + chroma_collection = os.getenv("SQL_CHROMA_COLLECTION") -if chroma_host and chroma_port and chroma_collection: - chroma_client = chromadb.HttpClient( - host=chroma_host, - port=chroma_port, + # TODO: Change this database for a database + # that gets the metadata from the PostgreSQL database + database = BigQueryDatabase( + billing_project=bq_billing_project, + query_project=bq_query_project, ) - vector_store = Chroma( - client=chroma_client, - collection_name=chroma_collection, - collection_metadata={"hnsw:space": "cosine"}, - embedding_function=OpenAIEmbeddings( - model="text-embedding-3-small" - ), + if chroma_host and chroma_port and chroma_collection: + chroma_client = chromadb.HttpClient( + host=chroma_host, + port=chroma_port, + ) + + vector_store = Chroma( + client=chroma_client, + collection_name=chroma_collection, + collection_metadata={"hnsw:space": "cosine"}, + embedding_function=OpenAIEmbeddings( + model="text-embedding-3-small" + ), + ) + else: + vector_store = None + + # Connection kwargs defined according to: + # https://github.com/langchain-ai/langgraph/issues/2887 + # https://langchain-ai.github.io/langgraph/how-tos/persistence_postgres + conn_kwargs = { + "autocommit": True, + "prepare_threshold": 0 + } + + pool = ConnectionPool( + conninfo=db_url, + kwargs=conn_kwargs, + max_size=8, + open=False, + ) + pool.open() + + checkpointer = PostgresSaver(pool) + checkpointer.setup() + + assistant = SQLAssistant( + database=database, + checkpointer=checkpointer, + vector_store=vector_store ) -else: - vector_store = None - -# Connection kwargs defined according to: -# https://github.com/langchain-ai/langgraph/issues/2887 -# https://langchain-ai.github.io/langgraph/how-tos/persistence_postgres -conn_kwargs = { - "autocommit": True, - "prepare_threshold": 0 -} - -pool = ConnectionPool( - conninfo=db_url, - kwargs=conn_kwargs, - max_size=8, - open=False, -) -pool.open() - -checkpointer = PostgresSaver(pool) -checkpointer.setup() - -assistant = SQLAssistant( - database=database, - checkpointer=checkpointer, - vector_store=vector_store -) + + return assistant class ThreadListView(APIView): permission_classes = [IsAuthenticated] @@ -146,11 +152,13 @@ def post(self, request: Request, thread_id: uuid.UUID) -> JsonResponse: """ thread_id = str(thread_id) - user_message = _validate(request, UserMessage) + serializer = _validate(request, UserMessageSerializer) + + user_message = UserMessage(**serializer.validated_data) thread = _get_thread_by_id(thread_id) - # assistant = _get_sql_assistant() + assistant = _get_sql_assistant() assistant_response: SQLAssistantMessage = assistant.invoke( message=user_message, @@ -185,12 +193,7 @@ def put(self, request: Request, message_pair_id: uuid.UUID) -> JsonResponse: JsonResponse: A JSON response with the serialized feedback object and an appropriate HTTP status code (201 for creation, 200 for update). """ - data = JSONParser().parse(request) - - serializer = FeedbackCreateSerializer(data=data) - - if not serializer.is_valid(): - return JsonResponse(serializer.errors, status=400) + serializer = _validate(request, FeedbackCreateSerializer) message_pair = _get_message_pair_by_id(message_pair_id) @@ -220,7 +223,7 @@ def delete(self, request: Request, thread_id: uuid.UUID) -> HttpResponse: """ try: thread_id = str(thread_id) - # assistant = _get_sql_assistant() + assistant = _get_sql_assistant() assistant.clear_thread(thread_id) return HttpResponse("Checkpoint cleared successfully", status=200) except Exception: @@ -260,23 +263,26 @@ def _get_message_pair_by_id(message_pair_id: uuid.UUID) -> MessagePair: except MessagePair.DoesNotExist: raise exceptions.NotFound -def _validate(request: Request, model: Type[PydanticModel]) -> PydanticModel: - """Parse and validate a request's JSON payload against a Pydantic model. +def _validate(request: Request, model_serializer: Type[ModelSerializer]) -> ModelSerializer: + """ + Parse and validate the JSON payload from a request using a Django REST framework serializer. Args: request (Request): A Django REST framework `Request` object containing JSON data. - model (Type[PydanticModel]): A Pydantic model class to validate against. + model_serializer (Type[ModelSerializer]): A serializer class used to validate the data. Raises: - exceptions.ValidationError: Raised if the request data fails Pydantic validation. - (Re-raised as a Django REST framework `ValidationError`). + exceptions.ValidationError: If the request data fails serializer validation. Returns: - PydanticModel: An instance of the provided Pydantic model populated with validated data. + ModelSerializer: An instance of the serializer populated with validated data. """ + data = JSONParser().parse(request) - try: - return model(**data) - except pydantic.ValidationError as e: - raise exceptions.ValidationError(e.errors()) + serializer = model_serializer(data=data) + + if not serializer.is_valid(): + raise exceptions.ValidationError(serializer.errors) + + return serializer diff --git a/chatbot b/chatbot index 2e217d90..666c2754 160000 --- a/chatbot +++ b/chatbot @@ -1 +1 @@ -Subproject commit 2e217d90ade3909b33f9d8893107d9fa96a4fbd8 +Subproject commit 666c27549785f6aaeb4383a8c71b831969a330f1 From 7d3a9bbde9aaf6109a332b9a59c5b3d0dbe6affa Mon Sep 17 00:00:00 2001 From: vrtornisiello Date: Wed, 7 May 2025 17:40:03 -0300 Subject: [PATCH 047/181] updated chatbot package version to v0.4.2 --- chatbot | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/chatbot b/chatbot index 666c2754..06e51366 160000 --- a/chatbot +++ b/chatbot @@ -1 +1 @@ -Subproject commit 666c27549785f6aaeb4383a8c71b831969a330f1 +Subproject commit 06e51366853450f4366c7bab40f7172166b255b9 From 916a49b97b84160450dd84b7918b4464d900c47b Mon Sep 17 00:00:00 2001 From: vrtornisiello Date: Wed, 7 May 2025 17:41:06 -0300 Subject: [PATCH 048/181] created `ChatbotDatabase` to read metadata from the local database and query data on BigQuery --- backend/apps/chatbot/database/__init__.py | 1 + backend/apps/chatbot/database/database.py | 172 ++++++++++++++++++ .../chatbot/database/metadata_formatter.py | 97 ++++++++++ backend/apps/chatbot/views.py | 5 +- 4 files changed, 272 insertions(+), 3 deletions(-) create mode 100644 backend/apps/chatbot/database/__init__.py create mode 100644 backend/apps/chatbot/database/database.py create mode 100644 backend/apps/chatbot/database/metadata_formatter.py diff --git a/backend/apps/chatbot/database/__init__.py b/backend/apps/chatbot/database/__init__.py new file mode 100644 index 00000000..5de7b00e --- /dev/null +++ b/backend/apps/chatbot/database/__init__.py @@ -0,0 +1 @@ +from .database import ChatbotDatabase diff --git a/backend/apps/chatbot/database/database.py b/backend/apps/chatbot/database/database.py new file mode 100644 index 00000000..9ecf184f --- /dev/null +++ b/backend/apps/chatbot/database/database.py @@ -0,0 +1,172 @@ +import json +import os + +import cachetools.func +from google.cloud import bigquery as bq +from loguru import logger + +from backend.apps.api.v1.models import Dataset + +from .metadata_formatter import * + + +class ChatbotDatabase: + """A BigQuery-backed database interface with local metadata support. + + This class provides methods to: + - Retrieve and format metadata about datasets, tables, and columns + - Execute SQL queries on BigQuery + + Optionally uses a custom metadata formatter; defaults to Markdown formatting. + + Args: + billing_project (str | None): + GCP project ID for billing. Falls back to the `BILLING_PROJECT_ID` environment variable if not provided. + query_project (str | None): + GCP project ID for executing queries. Falls back to the `QUERY_PROJECT_ID` environment variable if not provided. + metadata_formatter (MetadataFormatter | None): + Custom formatter for metadata. Defaults to `MarkdownMetadataFormatter`. + """ + + def __init__( + self, + billing_project: str | None = None, + query_project: str | None = None, + metadata_formatter: MetadataFormatter | None = None, + ): + billing_project = billing_project or os.getenv('BILLING_PROJECT_ID') + query_project = query_project or os.getenv('QUERY_PROJECT_ID') + + self._client = bq.Client(billing_project) + self._project = query_project + + if metadata_formatter is not None: + self.formatter = metadata_formatter + else: + self.formatter = MarkdownMetadataFormatter() + + @staticmethod + @cachetools.func.ttl_cache(ttl=60*60*24) + def _get_metadata() -> list[DatasetMetadata]: + """Fetch and return metadata for all datasets and their associated tables and columns. + The metadata includes dataset and table IDs and descriptions, and column information + such as name, type, and description. The result is cached for 24 hours. + + Returns: + list[DatasetMetadata]: A list of metadata objects, one for each dataset with at least one valid table. + """ + datasets = Dataset.objects.prefetch_related( + "tables__cloud_tables__columns__bigquery_type" + ) + + datasets_metadata: list[DatasetMetadata] = [] + + for dataset in datasets: + gcp_dataset_id = None + tables_metadata: list[TableMetadata] = [] + + for table in dataset.tables.all(): + # There must be only a single CloudTable for a given Table + cloud_table = table.cloud_tables.first() + + if cloud_table is None: + continue + + if gcp_dataset_id is None: + gcp_dataset_id = cloud_table.gcp_dataset_id + + columns_metadata = [ + ColumnMetadata( + name=column.name, + type=column.bigquery_type.name, + description=column.description, + ) + for column in cloud_table.columns.all() + ] + + full_table_id = f"{cloud_table.gcp_project_id}.{cloud_table.gcp_dataset_id}.{cloud_table.gcp_table_id}" + + tables_metadata.append( + TableMetadata( + id=cloud_table.gcp_table_id, + full_table_id=full_table_id, + name=table.name, + description=table.description, + columns=columns_metadata + ) + ) + + # TODO: Get some sample rows for each table + + if tables_metadata: + datasets_metadata.append( + DatasetMetadata( + id=gcp_dataset_id, + name=dataset.name, + description=dataset.description, + tables=tables_metadata + ) + ) + + return datasets_metadata + + def get_datasets_info(self) -> str: + """Return formatted metadata for all datasets in a BigQuery project. + + Returns: + str: A formatted string containing metadata for the datasets. + """ + datasets_info = [ + self.formatter.format_dataset_metadata(dataset) + for dataset in self._get_metadata() + ] + + return "\n\n---\n\n".join(datasets_info) + + def get_tables_info(self, dataset_names: str) -> str: + """Return formatted metadata for all tables in one or more BigQuery datasets. + + Args: + dataset_names (str): A comma-separated list of BigQuery dataset IDs. + + Returns: + str: A formatted string containing metadata for the tables in the specified datasets. + """ + dataset_ids = {id.strip() for id in dataset_names.split(",")} + + datasets = [ + dataset for dataset in self._get_metadata() if dataset.id in dataset_ids + ] + + tables_info = [] + + for dataset in datasets: + tables_info.append( + "\n\n".join([self.formatter.format_table_metadata(table) for table in dataset.tables]) + ) + + return "\n\n---\n\n".join(tables_info) + + def query(self, query: str) -> str: + """Execute a SQL query using BigQuery and return the results as a JSON string. + + Args: + query (str): The SQL query to execute. + + Raises: + Exception: Propagates any exceptions raised during query execution. + + Returns: + str: A JSON-formatted string representing the query results. Returns an empty string if no results are found. + """ + try: + rows = self._client.query(query, project=self._project).result() + + results = [dict(row) for row in rows] + + if results: + return json.dumps(results, ensure_ascii=False, default=str) + return "" + except Exception as e: + logger.exception("Error on querying table:") + raise e diff --git a/backend/apps/chatbot/database/metadata_formatter.py b/backend/apps/chatbot/database/metadata_formatter.py new file mode 100644 index 00000000..a2ebd266 --- /dev/null +++ b/backend/apps/chatbot/database/metadata_formatter.py @@ -0,0 +1,97 @@ +from typing import Protocol + +from pydantic import BaseModel, ConfigDict, Field + + +class Metadata(BaseModel): + model_config = ConfigDict(str_strip_whitespace=True) + +class ColumnMetadata(Metadata): + name: str = Field(description="BigQuery column name") + type: str = Field(description="BigQuery column name") + description: str|None = Field(default=None, description="BigQuery column description") + +class TableMetadata(Metadata): + id: str = Field(description="BigQuery table id") + full_table_id: str = Field(description="BigQuery table_id in the format project_id.dataset_id.table_id") + name: str = Field(description="Table name") + description: str|None = Field(default=None, description="Table description") + columns: list[ColumnMetadata] = Field(description="List of columns for this table") + +class DatasetMetadata(Metadata): + id: str = Field(description="BigQuery dataset id") + name: str = Field(description="Dataset name") + description: str|None = Field(default=None, description="Dataset description") + tables: list[TableMetadata] = Field(description="List of tables for this dataset") + +class MetadataFormatter(Protocol): + @staticmethod + def format_dataset_metadata(dataset: DatasetMetadata) -> str: + ... + + @staticmethod + def format_table_metadata(table: TableMetadata) -> str: + ... + +class MarkdownMetadataFormatter: + @staticmethod + def format_dataset_metadata(dataset: DatasetMetadata) -> str: + """Return formatted dataset metadata in markdown. + + Args: + dataset (DatasetMetadata): An object containing dataset metadata. + + Returns: + str: Formatted metadata for the given dataset. + """ + # Dataset name and description + metadata = f"# {dataset.id}\n\n### Description:\n{dataset.description}\n\n### Tables:\n" + + # Dataset tables + tables_metadata = [ + f"- {table.full_table_id}: {table.description}" + for table in dataset.tables + ] + + metadata += "\n\n".join(tables_metadata) + + return metadata + + @staticmethod + def format_table_metadata(table: TableMetadata) -> str: + """Return formatted table metadata in markdown. + + Args: + table (TableMetadata): An object containing table metadata. + + Returns: + str: Formatted metadata for the given table. + """ + # Table name + metadata = f"# {table.id}\n\n" + + # Table description + metadata += f"### Description:\n{table.description}\n\n" + + # Table schema + metadata += f"### Schema:\n" + fields = "\n\t".join([ + f"{field.name} {field.type}" + for field in table.columns + ]) + metadata += f"CREATE TABLE {table.id} (\n\t{fields}\n)\n\n" + + # Table columns details + metadata += f"### Column Details:\n" + header = "|column name|column type|column description|\n|---|---|---|" + lines = "\n".join([ + f"|{field.name}|{field.type}|{field.description}|" + for field in table.columns + ]) + + if lines: + metadata += f"{header}\n{lines}" + else: + metadata += header + + return metadata diff --git a/backend/apps/chatbot/views.py b/backend/apps/chatbot/views.py index a1fd7c89..10ff71a1 100644 --- a/backend/apps/chatbot/views.py +++ b/backend/apps/chatbot/views.py @@ -19,8 +19,7 @@ from rest_framework.views import APIView from chatbot.assistants import SQLAssistant, SQLAssistantMessage, UserMessage -from chatbot.databases import BigQueryDatabase - +from .database import ChatbotDatabase from .models import * from .serializers import * @@ -41,7 +40,7 @@ def _get_sql_assistant(): # TODO: Change this database for a database # that gets the metadata from the PostgreSQL database - database = BigQueryDatabase( + database = ChatbotDatabase( billing_project=bq_billing_project, query_project=bq_query_project, ) From 8e62c39a8e0fcfc1279c6d776287e68f3fe89cd3 Mon Sep 17 00:00:00 2001 From: vrtornisiello Date: Thu, 8 May 2025 11:47:59 -0300 Subject: [PATCH 049/181] created chatbot authentication field on user account model --- backend/apps/account/models.py | 5 +++++ backend/apps/chatbot/authentication.py | 5 +++++ backend/apps/chatbot/views.py | 6 ++---- backend/settings/base.py | 3 +-- 4 files changed, 13 insertions(+), 6 deletions(-) create mode 100644 backend/apps/chatbot/authentication.py diff --git a/backend/apps/account/models.py b/backend/apps/account/models.py index a2be8cab..687207cc 100644 --- a/backend/apps/account/models.py +++ b/backend/apps/account/models.py @@ -251,6 +251,11 @@ class Account(BaseModel, AbstractBaseUser, PermissionsMixin): is_email_visible = models.BooleanField( "Email é visível", default=False, help_text="Indica se o email do usuário é público" ) + has_chatbot_access = models.BooleanField( + "Tem acesso ao chatbot", + default=False, + help_text="Indica se o usuário tem acesso ao chatbot", + ) profile = models.IntegerField( choices=PROFILE_CHOICES, diff --git a/backend/apps/chatbot/authentication.py b/backend/apps/chatbot/authentication.py new file mode 100644 index 00000000..88b72d18 --- /dev/null +++ b/backend/apps/chatbot/authentication.py @@ -0,0 +1,5 @@ +from backend.apps.account.models import Account + + +def authentication_rule(user: Account) -> bool: + return user.has_chatbot_access diff --git a/backend/apps/chatbot/views.py b/backend/apps/chatbot/views.py index 10ff71a1..c51f7ba4 100644 --- a/backend/apps/chatbot/views.py +++ b/backend/apps/chatbot/views.py @@ -38,8 +38,6 @@ def _get_sql_assistant(): chroma_port = os.getenv("CHROMA_PORT") chroma_collection = os.getenv("SQL_CHROMA_COLLECTION") - # TODO: Change this database for a database - # that gets the metadata from the PostgreSQL database database = ChatbotDatabase( billing_project=bq_billing_project, query_project=bq_query_project, @@ -76,7 +74,7 @@ def _get_sql_assistant(): max_size=8, open=False, ) - pool.open() + pool.open() # TODO: where to close the pool? checkpointer = PostgresSaver(pool) checkpointer.setup() @@ -101,7 +99,7 @@ def get(self, request: Request) -> JsonResponse: Returns: JsonResponse: A JSON response containing a list of serialized threads. """ - threads = Thread.objects.filter(account=request.user.id) + threads = Thread.objects.filter(account=request.user) serializer = ThreadSerializer(threads, many=True) return JsonResponse(serializer.data, safe=False) diff --git a/backend/settings/base.py b/backend/settings/base.py index 08bfdda7..b8bedc77 100644 --- a/backend/settings/base.py +++ b/backend/settings/base.py @@ -81,7 +81,7 @@ SIMPLE_JWT = { "ACCESS_TOKEN_LIFETIME": timedelta(days=1), "REFRESH_TOKEN_LIFETIME": timedelta(days=7), - # "USER_AUTHENTICATION_RULE": "backend.apps.chatbot.authentication.user_authentication_rule", <- for custom authentication rules + "USER_AUTHENTICATION_RULE": "backend.apps.chatbot.authentication.authentication_rule" } MIDDLEWARE = [ @@ -116,7 +116,6 @@ WSGI_APPLICATION = "backend.wsgi.application" - # Password validation # https://docs.djangoproject.com/en/4.1/ref/settings/#auth-password-validators From 3ad728b3d7781df8ba839314cbcef401c06eaefb Mon Sep 17 00:00:00 2001 From: vrtornisiello Date: Fri, 9 May 2025 10:46:12 -0300 Subject: [PATCH 050/181] sending feedbacks to langsmith --- backend/apps/chatbot/feedback_sender.py | 111 ++++++++++++++++++++++++ backend/apps/chatbot/models.py | 20 ++++- backend/apps/chatbot/views.py | 23 +++-- 3 files changed, 148 insertions(+), 6 deletions(-) create mode 100644 backend/apps/chatbot/feedback_sender.py diff --git a/backend/apps/chatbot/feedback_sender.py b/backend/apps/chatbot/feedback_sender.py new file mode 100644 index 00000000..a9dab149 --- /dev/null +++ b/backend/apps/chatbot/feedback_sender.py @@ -0,0 +1,111 @@ +from queue import Queue, Full +from threading import Thread + +import langsmith +from django.utils import timezone +from loguru import logger + +from backend.apps.chatbot.models import Feedback + + +class LangSmithFeedbackSender: + """A feedback sender that sends feedback to LangSmith using a background worker.""" + + def __init__(self, api_url: str | None = None, api_key: str | None = None): + self._langsmith_client = langsmith.Client( + api_url=api_url, + api_key=api_key + ) + + self._queue: Queue[tuple[Feedback, bool]] = Queue(maxsize=1000) + + self._thread = Thread(target=self._process_feedback, daemon=True) + self._thread.start() + + def _create_langsmith_feedback(self, feedback: Feedback) -> bool: + """Create feedback on LangSmith. + + Args: + feedback (Feedback): The feedback instance to create. + + Returns: + bool: True if successful, False otherwise. + """ + try: + _ = self._langsmith_client.create_feedback( + run_id=feedback.message_pair.id, + key="helpfulness", + feedback_id=feedback.id, + score=feedback.rating, + comment=feedback.comment + ) + logger.info( + f"Successfully created feedback {feedback.id} for run {feedback.message_pair.id} on LangSmith" + ) + return True + except Exception: + logger.exception( + f"Failed to create feedback {feedback.id} for run {feedback.message_pair.id} on LangSmith:" + ) + return False + + def _update_langsmith_feedback(self, feedback: Feedback) -> bool: + """Update existing feedback on LangSmith. + + Args: + feedback (Feedback): The feedback instance to update. + + Returns: + bool: True if successful, False otherwise. + """ + try: + self._langsmith_client.update_feedback( + feedback_id=feedback.id, + score=feedback.rating, + comment=feedback.comment + ) + logger.info( + f"Successfully updated feedback {feedback.id} for run {feedback.message_pair.id} on LangSmith" + ) + return True + except Exception: + logger.exception( + f"Failed to update feedback {feedback.id} for run {feedback.message_pair.id} on LangSmith:" + ) + return False + + def _process_feedback(self): + """Background worker that continuously processes feedbacks from the queue. + Updates the feedback sync status in the local database after each operation. + """ + while True: + feedback, created = self._queue.get() + + if created: + success = self._create_langsmith_feedback(feedback) + else: + success = self._update_langsmith_feedback(feedback) + + feedback.sync_status = "success" if success else "failed" + feedback.synced_at = timezone.now() + feedback.save() + + self._queue.task_done() + + def send_feedback(self, feedback: Feedback, created: bool): + """Enqueue a feedback instance for creation or update on LangSmith. + + Args: + feedback (Feedback): The feedback instance to send. + created (bool): True if this is a new feedback, False if it's an update. + """ + try: + self._queue.put( + item=(feedback, created), + timeout=10, + ) + except Full: + operation = "create" if created else "update" + logger.warning( + f"LangSmith feedbacks queue is full - could not {operation} feedback {feedback.id} on LangSmith" + ) diff --git a/backend/apps/chatbot/models.py b/backend/apps/chatbot/models.py index 7fc42c95..56459368 100644 --- a/backend/apps/chatbot/models.py +++ b/backend/apps/chatbot/models.py @@ -1,7 +1,9 @@ # -*- coding: utf-8 -*- import uuid +from typing import Any from django.db import models +from django.utils import timezone from backend.apps.account.models import Account @@ -27,4 +29,20 @@ class Feedback(models.Model): rating = models.SmallIntegerField(choices=[(0, "Bad"), (1, "Good")]) comment = models.TextField(null=True, blank=True) created_at = models.DateTimeField(auto_now_add=True) - updated_at = models.DateTimeField(auto_now=True) + updated_at = models.DateTimeField(null=True, blank=True) + sync_status = models.TextField( + choices=[ + ("pending", "Pending"), + ("success", "Success"), + ("failed", "Failed") + ], + default="pending" + ) + synced_at = models.DateTimeField(null=True, blank=True) + + def user_update(self, data: dict[str, int|str]): + for attr, value in data.items(): + setattr(self, attr, value) + self.updated_at = timezone.now() + self.sync_status = "pending" + self.save() diff --git a/backend/apps/chatbot/views.py b/backend/apps/chatbot/views.py index c51f7ba4..94a2827c 100644 --- a/backend/apps/chatbot/views.py +++ b/backend/apps/chatbot/views.py @@ -20,6 +20,7 @@ from chatbot.assistants import SQLAssistant, SQLAssistantMessage, UserMessage from .database import ChatbotDatabase +from .feedback_sender import LangSmithFeedbackSender from .models import * from .serializers import * @@ -28,7 +29,11 @@ ModelSerializer = TypeVar("ModelSerializer", bound=Serializer) @cache -def _get_sql_assistant(): +def _get_feedback_sender() -> LangSmithFeedbackSender: + return LangSmithFeedbackSender() + +@cache +def _get_sql_assistant() -> SQLAssistant: db_url = os.environ["DB_URL"] bq_billing_project = os.environ["BILLING_PROJECT_ID"] @@ -194,10 +199,18 @@ def put(self, request: Request, message_pair_id: uuid.UUID) -> JsonResponse: message_pair = _get_message_pair_by_id(message_pair_id) - feedback, created = Feedback.objects.update_or_create( - message_pair=message_pair, - defaults=serializer.data - ) + try: + feedback = Feedback.objects.get(message_pair=message_pair) + feedback.user_update(serializer.validated_data) + created = False + except Feedback.DoesNotExist: + feedback = Feedback.objects.create( + message_pair=message_pair, **serializer.validated_data + ) + created = True + + feedback_sender = _get_feedback_sender() + feedback_sender.send_feedback(feedback, created) serializer = FeedbackSerializer(feedback) From d21a3a434357571cd6cb0a8a0cf9c66e4048280e Mon Sep 17 00:00:00 2001 From: vrtornisiello Date: Fri, 9 May 2025 17:18:58 -0300 Subject: [PATCH 051/181] fixed authentication rule (when a user is not found in db, django simple jwt returns `None`, so it has no `has_chatbot_access` attribute --- backend/apps/chatbot/authentication.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/backend/apps/chatbot/authentication.py b/backend/apps/chatbot/authentication.py index 88b72d18..40209d94 100644 --- a/backend/apps/chatbot/authentication.py +++ b/backend/apps/chatbot/authentication.py @@ -2,4 +2,6 @@ def authentication_rule(user: Account) -> bool: - return user.has_chatbot_access + if user is not None: + return user.has_chatbot_access + return False From 54e95a9f1d9233d55a6f90d74f7cc7077217464a Mon Sep 17 00:00:00 2001 From: vrtornisiello Date: Fri, 9 May 2025 17:28:44 -0300 Subject: [PATCH 052/181] checking if the thread exists before trying to delete its checkpoints and returning 201 on thread creation --- backend/apps/chatbot/views.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/backend/apps/chatbot/views.py b/backend/apps/chatbot/views.py index 94a2827c..43c70e4e 100644 --- a/backend/apps/chatbot/views.py +++ b/backend/apps/chatbot/views.py @@ -119,7 +119,7 @@ def post(self, request: Request) -> JsonResponse: """ thread = Thread.objects.create(account=request.user) serializer = ThreadSerializer(thread) - return JsonResponse(serializer.data) + return JsonResponse(serializer.data, status=201) class ThreadDetailView(APIView): permission_classes = [IsAuthenticated] @@ -231,10 +231,11 @@ def delete(self, request: Request, thread_id: uuid.UUID) -> HttpResponse: Returns: HttpResponse: An HTTP response indicating success (200) or failure (500). """ + thread = _get_thread_by_id(thread_id) + try: - thread_id = str(thread_id) assistant = _get_sql_assistant() - assistant.clear_thread(thread_id) + assistant.clear_thread(str(thread.id)) return HttpResponse("Checkpoint cleared successfully", status=200) except Exception: return HttpResponse("Error clearing checkpoint", status=500) @@ -287,7 +288,6 @@ def _validate(request: Request, model_serializer: Type[ModelSerializer]) -> Mode Returns: ModelSerializer: An instance of the serializer populated with validated data. """ - data = JSONParser().parse(request) serializer = model_serializer(data=data) From 1fa90276e2df2b0505e536c4f1e211e53a2a3811 Mon Sep 17 00:00:00 2001 From: vrtornisiello Date: Fri, 9 May 2025 17:29:34 -0300 Subject: [PATCH 053/181] added unit tests for the endpoints --- backend/apps/chatbot/tests/__init__.py | 0 backend/apps/chatbot/tests/test_endpoints.py | 302 +++++++++++++++++++ 2 files changed, 302 insertions(+) create mode 100644 backend/apps/chatbot/tests/__init__.py create mode 100644 backend/apps/chatbot/tests/test_endpoints.py diff --git a/backend/apps/chatbot/tests/__init__.py b/backend/apps/chatbot/tests/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/backend/apps/chatbot/tests/test_endpoints.py b/backend/apps/chatbot/tests/test_endpoints.py new file mode 100644 index 00000000..2eba5f44 --- /dev/null +++ b/backend/apps/chatbot/tests/test_endpoints.py @@ -0,0 +1,302 @@ +import pytest +from rest_framework.test import APIClient + +from backend.apps.account.models import Account +from backend.apps.chatbot import views +from backend.apps.chatbot.models import * +from chatbot.assistants import SQLAssistantMessage + + +class MockSQLAssistant: + def __init__(self, *args, **kwargs): + ... + + def invoke(self, *args, **kwargs): + return SQLAssistantMessage( + model_uri="google/gemini-2.0-flash", + content="mock response" + ) + + def clear_thread(self, *args, **kwargs): + ... + +class MockLangSmithFeedbackSender: + def __init__(self, *args, **kwargs): + ... + + def send_feedback(self, *args, **kwargs): + ... + +@pytest.fixture +def mock_email() -> str: + return "mockemail@mockdomain.com" + +@pytest.fixture +def mock_password() -> str: + return "mockpassword" + +@pytest.fixture +def client() -> APIClient: + return APIClient() + +@pytest.fixture +def auth_user(mock_email: str, mock_password: str) -> Account: + return Account.objects.create( + email=mock_email, + password=mock_password, + is_active=True, + has_chatbot_access=True, + ) + +@pytest.fixture +def access_token( + client: APIClient, mock_email: str, mock_password: str, auth_user: Account +) -> str: + response = client.post( + path="/chatbot/token/", + data={"email": mock_email, "password": mock_password} + ) + assert response.status_code == 200 + + return response.data["access"] + +@pytest.fixture +def auth_client(access_token) -> APIClient: + client = APIClient() + client.credentials(HTTP_AUTHORIZATION=f"Bearer {access_token}") + return client + +@pytest.mark.django_db +def test_token_view_authorized( + client: APIClient, mock_email: str, mock_password: str +): + _ = Account.objects.create( + email=mock_email, + password=mock_password, + is_active=True, + has_chatbot_access=True, + ) + + response = client.post( + path="/chatbot/token/", + data={"email": mock_email, "password": mock_password} + ) + + assert response.status_code == 200 + +@pytest.mark.django_db +def test_token_view_unauthorized( + client: APIClient, mock_email: str, mock_password: str +): + _ = Account.objects.create( + email=mock_email, + password=mock_password, + is_active=True, + # has_chatbot_access = False - has_chatbot_access is False by default + ) + + response = client.post( + path="/chatbot/token/", + data={"email": mock_email, "password": mock_password} + ) + + assert response.status_code == 401 + +@pytest.mark.django_db +def test_token_view_user_not_registered( + client: APIClient, mock_email: str, mock_password: str +): + response = client.post( + path="/chatbot/token/", + data={"email": mock_email, "password": mock_password} + ) + + assert response.status_code == 401 + +@pytest.mark.django_db +def test_thread_list_view_get(auth_client: APIClient): + response = auth_client.get("/chatbot/threads/") + assert response.status_code == 200 + assert isinstance(response.json(), list) + +@pytest.mark.django_db +def test_thread_list_view_post(auth_client: APIClient): + response = auth_client.post("/chatbot/threads/") + assert response.status_code == 201 + + thread_attrs = response.json() + + assert "id" in thread_attrs + assert "account" in thread_attrs + assert "created_at" in thread_attrs + assert Thread.objects.get(id=thread_attrs["id"]) + +@pytest.mark.django_db +def test_thread_detail_view_get(auth_client: APIClient, auth_user: Account): + thread = Thread.objects.create(account=auth_user) + + response = auth_client.get(f"/chatbot/threads/{thread.id}/") + assert response.status_code == 200 + assert isinstance(response.json(), list) + +@pytest.mark.django_db +def test_thread_detail_view_get_not_found(auth_client: APIClient): + response = auth_client.get(f"/chatbot/threads/{uuid.uuid4()}/") + assert response.status_code == 404 + +@pytest.mark.django_db +def test_message_list_view_post(monkeypatch, auth_client: APIClient, auth_user: Account): + monkeypatch.setattr(views, "SQLAssistant", MockSQLAssistant) + + thread = Thread.objects.create(account=auth_user) + + response = auth_client.post( + path=f"/chatbot/threads/{thread.id}/messages/", + data={"id": str(uuid.uuid4()), "content": "mock message"}, + format="json" + ) + + assert response.status_code == 201 + + response = auth_client.post( + path=f"/chatbot/threads/{thread.id}/messages/", + data={"content": "mock message"}, + format="json" + ) + + assert response.status_code == 201 + +@pytest.mark.django_db +def test_message_list_view_post_bad_request(auth_client: APIClient, auth_user: Account): + thread = Thread.objects.create(account=auth_user) + + response = auth_client.post( + path=f"/chatbot/threads/{thread.id}/messages/", + data={"id": str(uuid.uuid4())}, + format="json" + ) + + assert response.status_code == 400 + + response = auth_client.post( + path=f"/chatbot/threads/{thread.id}/messages/", + data={"id": str(uuid.uuid4()), "content": []}, + format="json" + ) + + assert response.status_code == 400 + +@pytest.mark.django_db +def test_message_list_view_post_not_found(auth_client: APIClient): + response = auth_client.post( + path=f"/chatbot/threads/{uuid.uuid4()}/messages/", + data={"id": str(uuid.uuid4()), "content": "mock message"}, + format="json" + ) + assert response.status_code == 404 + +@pytest.mark.django_db +def test_feedback_list_view_put_create(monkeypatch, auth_client: APIClient, auth_user: Account): + monkeypatch.setattr(views, "LangSmithFeedbackSender", MockLangSmithFeedbackSender) + + thread = Thread.objects.create(account=auth_user) + + message_pairs = [ + MessagePair.objects.create( + thread=thread, + model_uri="google/gemini-2.0-flash", + user_message="mock message", + assistant_message="mock response", + ) for _ in range(2) + ] + + response = auth_client.put( + path=f"/chatbot/message-pairs/{message_pairs[0].id}/feedbacks/", + data={"rating": 1, "comment": "good"}, + format="json" + ) + + assert response.status_code == 201 + + response = auth_client.put( + path=f"/chatbot/message-pairs/{message_pairs[1].id}/feedbacks/", + data={"rating": 1, "comment": None}, + format="json" + ) + + assert response.status_code == 201 + +@pytest.mark.django_db +def test_feedback_list_view_put_update(monkeypatch, auth_client: APIClient, auth_user: Account): + monkeypatch.setattr(views, "LangSmithFeedbackSender", MockLangSmithFeedbackSender) + + thread = Thread.objects.create(account=auth_user) + + message_pair = MessagePair.objects.create( + thread=thread, + model_uri="google/gemini-2.0-flash", + user_message="mock message", + assistant_message="mock response" + ) + + _ = Feedback.objects.create( + message_pair=message_pair, + rating=0, + comment="bad" + ) + + response = auth_client.put( + path=f"/chatbot/message-pairs/{message_pair.id}/feedbacks/", + data={"rating": 1, "comment": "good"}, + format="json" + ) + + assert response.status_code == 200 + +@pytest.mark.django_db +def test_feedback_list_view_put_bad_request(auth_client: APIClient, auth_user: Account): + thread = Thread.objects.create(account=auth_user) + + message_pair = MessagePair.objects.create( + thread=thread, + model_uri="google/gemini-2.0-flash", + user_message="mock message", + assistant_message="mock response", + ) + + response = auth_client.put( + path=f"/chatbot/message-pairs/{message_pair.id}/feedbacks/", + data={"comment": "good"}, + format="json" + ) + + assert response.status_code == 400 + + response = auth_client.put( + path=f"/chatbot/message-pairs/{message_pair.id}/feedbacks/", + data={"rating": 1, "comment": []}, + format="json" + ) + + assert response.status_code == 400 + +@pytest.mark.django_db +def test_feedback_list_view_put_not_found(auth_client: APIClient, auth_user: Account): + response = auth_client.put( + path=f"/chatbot/message-pairs/{uuid.uuid4()}/feedbacks/", + data={"rating": 1, "comment": "good"}, + format="json" + ) + + assert response.status_code == 404 + +@pytest.mark.django_db +def test_checkpoint_list_view_delete(auth_client: APIClient, auth_user: Account): + thread = Thread.objects.create(account=auth_user) + response = auth_client.delete(f"/chatbot/checkpoints/{thread.id}/") + assert response.status_code == 200 + +@pytest.mark.django_db +def test_checkpoint_list_view_delete_not_found(auth_client: APIClient): + response = auth_client.delete(f"/chatbot/checkpoints/{uuid.uuid4()}/") + assert response.status_code == 404 From da53cf971d1f5b729352c67af462c1a44111602d Mon Sep 17 00:00:00 2001 From: vrtornisiello Date: Fri, 9 May 2025 17:30:07 -0300 Subject: [PATCH 054/181] added `chatbot` dependency --- poetry.lock | 3366 ++++++++++++++++++++++++++++++++++++++++++++---- pyproject.toml | 1 + 2 files changed, 3152 insertions(+), 215 deletions(-) diff --git a/poetry.lock b/poetry.lock index 728c390f..d39569c0 100644 --- a/poetry.lock +++ b/poetry.lock @@ -27,6 +27,29 @@ files = [ {file = "annotated_types-0.6.0.tar.gz", hash = "sha256:563339e807e53ffd9c267e99fc6d9ea23eb8443c08f112651963e24e22f84a5d"}, ] +[[package]] +name = "anyio" +version = "4.9.0" +description = "High level compatibility layer for multiple asynchronous event loop implementations" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "anyio-4.9.0-py3-none-any.whl", hash = "sha256:9f76d541cad6e36af7beb62e978876f3b41e3e04f2c1fbf0884604c0a9c4d93c"}, + {file = "anyio-4.9.0.tar.gz", hash = "sha256:673c0c244e15788651a4ff38710fea9675823028a6f08a5eda409e0c9840a028"}, +] + +[package.dependencies] +exceptiongroup = {version = ">=1.0.2", markers = "python_version < \"3.11\""} +idna = ">=2.8" +sniffio = ">=1.1" +typing_extensions = {version = ">=4.5", markers = "python_version < \"3.13\""} + +[package.extras] +doc = ["Sphinx (>=8.2,<9.0)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx_rtd_theme"] +test = ["anyio[trio]", "blockbuster (>=1.5.23)", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "trustme", "truststore (>=0.9.1) ; python_version >= \"3.10\"", "uvloop (>=0.21) ; platform_python_implementation == \"CPython\" and platform_system != \"Windows\" and python_version < \"3.14\""] +trio = ["trio (>=0.26.1)"] + [[package]] name = "asgiref" version = "3.7.2" @@ -58,6 +81,109 @@ files = [ {file = "async_timeout-4.0.3-py3-none-any.whl", hash = "sha256:7405140ff1230c310e51dc27b3145b9092d659ce68ff733fb0cefe3ee42be028"}, ] +[[package]] +name = "backoff" +version = "2.2.1" +description = "Function decoration for backoff and retry" +optional = false +python-versions = ">=3.7,<4.0" +groups = ["main"] +files = [ + {file = "backoff-2.2.1-py3-none-any.whl", hash = "sha256:63579f9a0628e06278f7e47b7d7d5b6ce20dc65c5e96a6f3ca99a6adca0396e8"}, + {file = "backoff-2.2.1.tar.gz", hash = "sha256:03f829f5bb1923180821643f8753b0502c3b682293992485b0eef2807afa5cba"}, +] + +[[package]] +name = "bcrypt" +version = "4.3.0" +description = "Modern password hashing for your software and your servers" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "bcrypt-4.3.0-cp313-cp313t-macosx_10_12_universal2.whl", hash = "sha256:f01e060f14b6b57bbb72fc5b4a83ac21c443c9a2ee708e04a10e9192f90a6281"}, + {file = "bcrypt-4.3.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c5eeac541cefd0bb887a371ef73c62c3cd78535e4887b310626036a7c0a817bb"}, + {file = "bcrypt-4.3.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:59e1aa0e2cd871b08ca146ed08445038f42ff75968c7ae50d2fdd7860ade2180"}, + {file = "bcrypt-4.3.0-cp313-cp313t-manylinux_2_28_aarch64.whl", hash = "sha256:0042b2e342e9ae3d2ed22727c1262f76cc4f345683b5c1715f0250cf4277294f"}, + {file = "bcrypt-4.3.0-cp313-cp313t-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:74a8d21a09f5e025a9a23e7c0fd2c7fe8e7503e4d356c0a2c1486ba010619f09"}, + {file = "bcrypt-4.3.0-cp313-cp313t-manylinux_2_28_x86_64.whl", hash = "sha256:0142b2cb84a009f8452c8c5a33ace5e3dfec4159e7735f5afe9a4d50a8ea722d"}, + {file = "bcrypt-4.3.0-cp313-cp313t-manylinux_2_34_aarch64.whl", hash = "sha256:12fa6ce40cde3f0b899729dbd7d5e8811cb892d31b6f7d0334a1f37748b789fd"}, + {file = "bcrypt-4.3.0-cp313-cp313t-manylinux_2_34_x86_64.whl", hash = "sha256:5bd3cca1f2aa5dbcf39e2aa13dd094ea181f48959e1071265de49cc2b82525af"}, + {file = "bcrypt-4.3.0-cp313-cp313t-musllinux_1_1_aarch64.whl", hash = "sha256:335a420cfd63fc5bc27308e929bee231c15c85cc4c496610ffb17923abf7f231"}, + {file = "bcrypt-4.3.0-cp313-cp313t-musllinux_1_1_x86_64.whl", hash = "sha256:0e30e5e67aed0187a1764911af023043b4542e70a7461ad20e837e94d23e1d6c"}, + {file = "bcrypt-4.3.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:3b8d62290ebefd49ee0b3ce7500f5dbdcf13b81402c05f6dafab9a1e1b27212f"}, + {file = "bcrypt-4.3.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:2ef6630e0ec01376f59a006dc72918b1bf436c3b571b80fa1968d775fa02fe7d"}, + {file = "bcrypt-4.3.0-cp313-cp313t-win32.whl", hash = "sha256:7a4be4cbf241afee43f1c3969b9103a41b40bcb3a3f467ab19f891d9bc4642e4"}, + {file = "bcrypt-4.3.0-cp313-cp313t-win_amd64.whl", hash = "sha256:5c1949bf259a388863ced887c7861da1df681cb2388645766c89fdfd9004c669"}, + {file = "bcrypt-4.3.0-cp38-abi3-macosx_10_12_universal2.whl", hash = "sha256:f81b0ed2639568bf14749112298f9e4e2b28853dab50a8b357e31798686a036d"}, + {file = "bcrypt-4.3.0-cp38-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:864f8f19adbe13b7de11ba15d85d4a428c7e2f344bac110f667676a0ff84924b"}, + {file = "bcrypt-4.3.0-cp38-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3e36506d001e93bffe59754397572f21bb5dc7c83f54454c990c74a468cd589e"}, + {file = "bcrypt-4.3.0-cp38-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:842d08d75d9fe9fb94b18b071090220697f9f184d4547179b60734846461ed59"}, + {file = "bcrypt-4.3.0-cp38-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:7c03296b85cb87db865d91da79bf63d5609284fc0cab9472fdd8367bbd830753"}, + {file = "bcrypt-4.3.0-cp38-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:62f26585e8b219cdc909b6a0069efc5e4267e25d4a3770a364ac58024f62a761"}, + {file = "bcrypt-4.3.0-cp38-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:beeefe437218a65322fbd0069eb437e7c98137e08f22c4660ac2dc795c31f8bb"}, + {file = "bcrypt-4.3.0-cp38-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:97eea7408db3a5bcce4a55d13245ab3fa566e23b4c67cd227062bb49e26c585d"}, + {file = "bcrypt-4.3.0-cp38-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:191354ebfe305e84f344c5964c7cd5f924a3bfc5d405c75ad07f232b6dffb49f"}, + {file = "bcrypt-4.3.0-cp38-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:41261d64150858eeb5ff43c753c4b216991e0ae16614a308a15d909503617732"}, + {file = "bcrypt-4.3.0-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:33752b1ba962ee793fa2b6321404bf20011fe45b9afd2a842139de3011898fef"}, + {file = "bcrypt-4.3.0-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:50e6e80a4bfd23a25f5c05b90167c19030cf9f87930f7cb2eacb99f45d1c3304"}, + {file = "bcrypt-4.3.0-cp38-abi3-win32.whl", hash = "sha256:67a561c4d9fb9465ec866177e7aebcad08fe23aaf6fbd692a6fab69088abfc51"}, + {file = "bcrypt-4.3.0-cp38-abi3-win_amd64.whl", hash = "sha256:584027857bc2843772114717a7490a37f68da563b3620f78a849bcb54dc11e62"}, + {file = "bcrypt-4.3.0-cp39-abi3-macosx_10_12_universal2.whl", hash = "sha256:0d3efb1157edebfd9128e4e46e2ac1a64e0c1fe46fb023158a407c7892b0f8c3"}, + {file = "bcrypt-4.3.0-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:08bacc884fd302b611226c01014eca277d48f0a05187666bca23aac0dad6fe24"}, + {file = "bcrypt-4.3.0-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f6746e6fec103fcd509b96bacdfdaa2fbde9a553245dbada284435173a6f1aef"}, + {file = "bcrypt-4.3.0-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:afe327968aaf13fc143a56a3360cb27d4ad0345e34da12c7290f1b00b8fe9a8b"}, + {file = "bcrypt-4.3.0-cp39-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:d9af79d322e735b1fc33404b5765108ae0ff232d4b54666d46730f8ac1a43676"}, + {file = "bcrypt-4.3.0-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:f1e3ffa1365e8702dc48c8b360fef8d7afeca482809c5e45e653af82ccd088c1"}, + {file = "bcrypt-4.3.0-cp39-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:3004df1b323d10021fda07a813fd33e0fd57bef0e9a480bb143877f6cba996fe"}, + {file = "bcrypt-4.3.0-cp39-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:531457e5c839d8caea9b589a1bcfe3756b0547d7814e9ce3d437f17da75c32b0"}, + {file = "bcrypt-4.3.0-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:17a854d9a7a476a89dcef6c8bd119ad23e0f82557afbd2c442777a16408e614f"}, + {file = "bcrypt-4.3.0-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:6fb1fd3ab08c0cbc6826a2e0447610c6f09e983a281b919ed721ad32236b8b23"}, + {file = "bcrypt-4.3.0-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:e965a9c1e9a393b8005031ff52583cedc15b7884fce7deb8b0346388837d6cfe"}, + {file = "bcrypt-4.3.0-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:79e70b8342a33b52b55d93b3a59223a844962bef479f6a0ea318ebbcadf71505"}, + {file = "bcrypt-4.3.0-cp39-abi3-win32.whl", hash = "sha256:b4d4e57f0a63fd0b358eb765063ff661328f69a04494427265950c71b992a39a"}, + {file = "bcrypt-4.3.0-cp39-abi3-win_amd64.whl", hash = "sha256:e53e074b120f2877a35cc6c736b8eb161377caae8925c17688bd46ba56daaa5b"}, + {file = "bcrypt-4.3.0-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:c950d682f0952bafcceaf709761da0a32a942272fad381081b51096ffa46cea1"}, + {file = "bcrypt-4.3.0-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:107d53b5c67e0bbc3f03ebf5b030e0403d24dda980f8e244795335ba7b4a027d"}, + {file = "bcrypt-4.3.0-pp310-pypy310_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:b693dbb82b3c27a1604a3dff5bfc5418a7e6a781bb795288141e5f80cf3a3492"}, + {file = "bcrypt-4.3.0-pp310-pypy310_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:b6354d3760fcd31994a14c89659dee887f1351a06e5dac3c1142307172a79f90"}, + {file = "bcrypt-4.3.0-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:a839320bf27d474e52ef8cb16449bb2ce0ba03ca9f44daba6d93fa1d8828e48a"}, + {file = "bcrypt-4.3.0-pp311-pypy311_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:bdc6a24e754a555d7316fa4774e64c6c3997d27ed2d1964d55920c7c227bc4ce"}, + {file = "bcrypt-4.3.0-pp311-pypy311_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:55a935b8e9a1d2def0626c4269db3fcd26728cbff1e84f0341465c31c4ee56d8"}, + {file = "bcrypt-4.3.0-pp311-pypy311_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:57967b7a28d855313a963aaea51bf6df89f833db4320da458e5b3c5ab6d4c938"}, + {file = "bcrypt-4.3.0.tar.gz", hash = "sha256:3a3fd2204178b6d2adcf09cb4f6426ffef54762577a7c9b54c159008cb288c18"}, +] + +[package.extras] +tests = ["pytest (>=3.2.1,!=3.3.0)"] +typecheck = ["mypy"] + +[[package]] +name = "build" +version = "1.2.2.post1" +description = "A simple, correct Python build frontend" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "build-1.2.2.post1-py3-none-any.whl", hash = "sha256:1d61c0887fa860c01971625baae8bdd338e517b836a2f70dd1f7aa3a6b2fc5b5"}, + {file = "build-1.2.2.post1.tar.gz", hash = "sha256:b36993e92ca9375a219c99e606a122ff365a760a2d4bba0caa09bd5278b608b7"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "os_name == \"nt\""} +importlib-metadata = {version = ">=4.6", markers = "python_full_version < \"3.10.2\""} +packaging = ">=19.1" +pyproject_hooks = "*" +tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} + +[package.extras] +docs = ["furo (>=2023.08.17)", "sphinx (>=7.0,<8.0)", "sphinx-argparse-cli (>=1.5)", "sphinx-autodoc-typehints (>=1.10)", "sphinx-issues (>=3.0.0)"] +test = ["build[uv,virtualenv]", "filelock (>=3)", "pytest (>=6.2.4)", "pytest-cov (>=2.12)", "pytest-mock (>=2)", "pytest-rerunfailures (>=9.1)", "pytest-xdist (>=1.34)", "setuptools (>=42.0.0) ; python_version < \"3.10\"", "setuptools (>=56.0.0) ; python_version == \"3.10\"", "setuptools (>=56.0.0) ; python_version == \"3.11\"", "setuptools (>=67.8.0) ; python_version >= \"3.12\"", "wheel (>=0.36.0)"] +typing = ["build[uv]", "importlib-metadata (>=5.1)", "mypy (>=1.9.0,<1.10.0)", "tomli", "typing-extensions (>=3.7.4.3)"] +uv = ["uv (>=0.1.18)"] +virtualenv = ["virtualenv (>=20.0.35)"] + [[package]] name = "cachetools" version = "5.3.2" @@ -82,6 +208,87 @@ files = [ {file = "certifi-2024.2.2.tar.gz", hash = "sha256:0569859f95fc761b18b45ef421b1290a0f65f147e92a1e5eb3e635f9a5e4e66f"}, ] +[[package]] +name = "cffi" +version = "1.17.1" +description = "Foreign Function Interface for Python calling C code." +optional = false +python-versions = ">=3.8" +groups = ["main"] +markers = "platform_python_implementation == \"PyPy\"" +files = [ + {file = "cffi-1.17.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:df8b1c11f177bc2313ec4b2d46baec87a5f3e71fc8b45dab2ee7cae86d9aba14"}, + {file = "cffi-1.17.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8f2cdc858323644ab277e9bb925ad72ae0e67f69e804f4898c070998d50b1a67"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:edae79245293e15384b51f88b00613ba9f7198016a5948b5dddf4917d4d26382"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45398b671ac6d70e67da8e4224a065cec6a93541bb7aebe1b198a61b58c7b702"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ad9413ccdeda48c5afdae7e4fa2192157e991ff761e7ab8fdd8926f40b160cc3"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5da5719280082ac6bd9aa7becb3938dc9f9cbd57fac7d2871717b1feb0902ab6"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bb1a08b8008b281856e5971307cc386a8e9c5b625ac297e853d36da6efe9c17"}, + {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:045d61c734659cc045141be4bae381a41d89b741f795af1dd018bfb532fd0df8"}, + {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:6883e737d7d9e4899a8a695e00ec36bd4e5e4f18fabe0aca0efe0a4b44cdb13e"}, + {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:6b8b4a92e1c65048ff98cfe1f735ef8f1ceb72e3d5f0c25fdb12087a23da22be"}, + {file = "cffi-1.17.1-cp310-cp310-win32.whl", hash = "sha256:c9c3d058ebabb74db66e431095118094d06abf53284d9c81f27300d0e0d8bc7c"}, + {file = "cffi-1.17.1-cp310-cp310-win_amd64.whl", hash = "sha256:0f048dcf80db46f0098ccac01132761580d28e28bc0f78ae0d58048063317e15"}, + {file = "cffi-1.17.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a45e3c6913c5b87b3ff120dcdc03f6131fa0065027d0ed7ee6190736a74cd401"}, + {file = "cffi-1.17.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:30c5e0cb5ae493c04c8b42916e52ca38079f1b235c2f8ae5f4527b963c401caf"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f75c7ab1f9e4aca5414ed4d8e5c0e303a34f4421f8a0d47a4d019ceff0ab6af4"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a1ed2dd2972641495a3ec98445e09766f077aee98a1c896dcb4ad0d303628e41"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:46bf43160c1a35f7ec506d254e5c890f3c03648a4dbac12d624e4490a7046cd1"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a24ed04c8ffd54b0729c07cee15a81d964e6fee0e3d4d342a27b020d22959dc6"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:610faea79c43e44c71e1ec53a554553fa22321b65fae24889706c0a84d4ad86d"}, + {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a9b15d491f3ad5d692e11f6b71f7857e7835eb677955c00cc0aefcd0669adaf6"}, + {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:de2ea4b5833625383e464549fec1bc395c1bdeeb5f25c4a3a82b5a8c756ec22f"}, + {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:fc48c783f9c87e60831201f2cce7f3b2e4846bf4d8728eabe54d60700b318a0b"}, + {file = "cffi-1.17.1-cp311-cp311-win32.whl", hash = "sha256:85a950a4ac9c359340d5963966e3e0a94a676bd6245a4b55bc43949eee26a655"}, + {file = "cffi-1.17.1-cp311-cp311-win_amd64.whl", hash = "sha256:caaf0640ef5f5517f49bc275eca1406b0ffa6aa184892812030f04c2abf589a0"}, + {file = "cffi-1.17.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:805b4371bf7197c329fcb3ead37e710d1bca9da5d583f5073b799d5c5bd1eee4"}, + {file = "cffi-1.17.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:733e99bc2df47476e3848417c5a4540522f234dfd4ef3ab7fafdf555b082ec0c"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1257bdabf294dceb59f5e70c64a3e2f462c30c7ad68092d01bbbfb1c16b1ba36"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da95af8214998d77a98cc14e3a3bd00aa191526343078b530ceb0bd710fb48a5"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d63afe322132c194cf832bfec0dc69a99fb9bb6bbd550f161a49e9e855cc78ff"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f79fc4fc25f1c8698ff97788206bb3c2598949bfe0fef03d299eb1b5356ada99"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b62ce867176a75d03a665bad002af8e6d54644fad99a3c70905c543130e39d93"}, + {file = "cffi-1.17.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:386c8bf53c502fff58903061338ce4f4950cbdcb23e2902d86c0f722b786bbe3"}, + {file = "cffi-1.17.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4ceb10419a9adf4460ea14cfd6bc43d08701f0835e979bf821052f1805850fe8"}, + {file = "cffi-1.17.1-cp312-cp312-win32.whl", hash = "sha256:a08d7e755f8ed21095a310a693525137cfe756ce62d066e53f502a83dc550f65"}, + {file = "cffi-1.17.1-cp312-cp312-win_amd64.whl", hash = "sha256:51392eae71afec0d0c8fb1a53b204dbb3bcabcb3c9b807eedf3e1e6ccf2de903"}, + {file = "cffi-1.17.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f3a2b4222ce6b60e2e8b337bb9596923045681d71e5a082783484d845390938e"}, + {file = "cffi-1.17.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:0984a4925a435b1da406122d4d7968dd861c1385afe3b45ba82b750f229811e2"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d01b12eeeb4427d3110de311e1774046ad344f5b1a7403101878976ecd7a10f3"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:706510fe141c86a69c8ddc029c7910003a17353970cff3b904ff0686a5927683"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de55b766c7aa2e2a3092c51e0483d700341182f08e67c63630d5b6f200bb28e5"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c59d6e989d07460165cc5ad3c61f9fd8f1b4796eacbd81cee78957842b834af4"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd398dbc6773384a17fe0d3e7eeb8d1a21c2200473ee6806bb5e6a8e62bb73dd"}, + {file = "cffi-1.17.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:3edc8d958eb099c634dace3c7e16560ae474aa3803a5df240542b305d14e14ed"}, + {file = "cffi-1.17.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:72e72408cad3d5419375fc87d289076ee319835bdfa2caad331e377589aebba9"}, + {file = "cffi-1.17.1-cp313-cp313-win32.whl", hash = "sha256:e03eab0a8677fa80d646b5ddece1cbeaf556c313dcfac435ba11f107ba117b5d"}, + {file = "cffi-1.17.1-cp313-cp313-win_amd64.whl", hash = "sha256:f6a16c31041f09ead72d69f583767292f750d24913dadacf5756b966aacb3f1a"}, + {file = "cffi-1.17.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:636062ea65bd0195bc012fea9321aca499c0504409f413dc88af450b57ffd03b"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c7eac2ef9b63c79431bc4b25f1cd649d7f061a28808cbc6c47b534bd789ef964"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e221cf152cff04059d011ee126477f0d9588303eb57e88923578ace7baad17f9"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:31000ec67d4221a71bd3f67df918b1f88f676f1c3b535a7eb473255fdc0b83fc"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6f17be4345073b0a7b8ea599688f692ac3ef23ce28e5df79c04de519dbc4912c"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e2b1fac190ae3ebfe37b979cc1ce69c81f4e4fe5746bb401dca63a9062cdaf1"}, + {file = "cffi-1.17.1-cp38-cp38-win32.whl", hash = "sha256:7596d6620d3fa590f677e9ee430df2958d2d6d6de2feeae5b20e82c00b76fbf8"}, + {file = "cffi-1.17.1-cp38-cp38-win_amd64.whl", hash = "sha256:78122be759c3f8a014ce010908ae03364d00a1f81ab5c7f4a7a5120607ea56e1"}, + {file = "cffi-1.17.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b2ab587605f4ba0bf81dc0cb08a41bd1c0a5906bd59243d56bad7668a6fc6c16"}, + {file = "cffi-1.17.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:28b16024becceed8c6dfbc75629e27788d8a3f9030691a1dbf9821a128b22c36"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1d599671f396c4723d016dbddb72fe8e0397082b0a77a4fab8028923bec050e8"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ca74b8dbe6e8e8263c0ffd60277de77dcee6c837a3d0881d8c1ead7268c9e576"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f7f5baafcc48261359e14bcd6d9bff6d4b28d9103847c9e136694cb0501aef87"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:98e3969bcff97cae1b2def8ba499ea3d6f31ddfdb7635374834cf89a1a08ecf0"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cdf5ce3acdfd1661132f2a9c19cac174758dc2352bfe37d98aa7512c6b7178b3"}, + {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9755e4345d1ec879e3849e62222a18c7174d65a6a92d5b346b1863912168b595"}, + {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f1e22e8c4419538cb197e4dd60acc919d7696e5ef98ee4da4e01d3f8cfa4cc5a"}, + {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:c03e868a0b3bc35839ba98e74211ed2b05d2119be4e8a0f224fba9384f1fe02e"}, + {file = "cffi-1.17.1-cp39-cp39-win32.whl", hash = "sha256:e31ae45bc2e29f6b2abd0de1cc3b9d5205aa847cafaecb8af1476a609a2f6eb7"}, + {file = "cffi-1.17.1-cp39-cp39-win_amd64.whl", hash = "sha256:d016c76bdd850f3c626af19b0542c9677ba156e4ee4fccfdd7848803533ef662"}, + {file = "cffi-1.17.1.tar.gz", hash = "sha256:1c39c6016c32bc48dd54561950ebd6836e1670f2ae46128f67cf49e789c52824"}, +] + +[package.dependencies] +pycparser = "*" + [[package]] name = "cfgv" version = "3.4.0" @@ -194,13 +401,125 @@ files = [ {file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"}, ] +[[package]] +name = "chatbot" +version = "0.4.2" +description = "" +optional = false +python-versions = "^3.10" +groups = ["main"] +files = [] +develop = false + +[package.dependencies] +google-cloud-bigquery = "^3.25.0" +grpcio = "1.71.0" +langchain = "0.3.20" +langchain-chroma = "0.2.2" +langchain-google-vertexai = "2.0.15" +langchain-openai = "0.3.9" +langgraph = "0.3.11" +langgraph-checkpoint-postgres = "2.0.17" +loguru = "^0.7.2" +psycopg = "3.2.6" +psycopg-pool = "3.2.6" +sqlparse = "^0.5.1" + +[package.source] +type = "directory" +url = "chatbot" + +[[package]] +name = "chroma-hnswlib" +version = "0.7.6" +description = "Chromas fork of hnswlib" +optional = false +python-versions = "*" +groups = ["main"] +files = [ + {file = "chroma_hnswlib-0.7.6-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f35192fbbeadc8c0633f0a69c3d3e9f1a4eab3a46b65458bbcbcabdd9e895c36"}, + {file = "chroma_hnswlib-0.7.6-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6f007b608c96362b8f0c8b6b2ac94f67f83fcbabd857c378ae82007ec92f4d82"}, + {file = "chroma_hnswlib-0.7.6-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:456fd88fa0d14e6b385358515aef69fc89b3c2191706fd9aee62087b62aad09c"}, + {file = "chroma_hnswlib-0.7.6-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5dfaae825499c2beaa3b75a12d7ec713b64226df72a5c4097203e3ed532680da"}, + {file = "chroma_hnswlib-0.7.6-cp310-cp310-win_amd64.whl", hash = "sha256:2487201982241fb1581be26524145092c95902cb09fc2646ccfbc407de3328ec"}, + {file = "chroma_hnswlib-0.7.6-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:81181d54a2b1e4727369486a631f977ffc53c5533d26e3d366dda243fb0998ca"}, + {file = "chroma_hnswlib-0.7.6-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4b4ab4e11f1083dd0a11ee4f0e0b183ca9f0f2ed63ededba1935b13ce2b3606f"}, + {file = "chroma_hnswlib-0.7.6-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:53db45cd9173d95b4b0bdccb4dbff4c54a42b51420599c32267f3abbeb795170"}, + {file = "chroma_hnswlib-0.7.6-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5c093f07a010b499c00a15bc9376036ee4800d335360570b14f7fe92badcdcf9"}, + {file = "chroma_hnswlib-0.7.6-cp311-cp311-win_amd64.whl", hash = "sha256:0540b0ac96e47d0aa39e88ea4714358ae05d64bbe6bf33c52f316c664190a6a3"}, + {file = "chroma_hnswlib-0.7.6-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:e87e9b616c281bfbe748d01705817c71211613c3b063021f7ed5e47173556cb7"}, + {file = "chroma_hnswlib-0.7.6-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ec5ca25bc7b66d2ecbf14502b5729cde25f70945d22f2aaf523c2d747ea68912"}, + {file = "chroma_hnswlib-0.7.6-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:305ae491de9d5f3c51e8bd52d84fdf2545a4a2bc7af49765cda286b7bb30b1d4"}, + {file = "chroma_hnswlib-0.7.6-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:822ede968d25a2c88823ca078a58f92c9b5c4142e38c7c8b4c48178894a0a3c5"}, + {file = "chroma_hnswlib-0.7.6-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:2fe6ea949047beed19a94b33f41fe882a691e58b70c55fdaa90274ae78be046f"}, + {file = "chroma_hnswlib-0.7.6-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:feceff971e2a2728c9ddd862a9dd6eb9f638377ad98438876c9aeac96c9482f5"}, + {file = "chroma_hnswlib-0.7.6-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bb0633b60e00a2b92314d0bf5bbc0da3d3320be72c7e3f4a9b19f4609dc2b2ab"}, + {file = "chroma_hnswlib-0.7.6-cp37-cp37m-win_amd64.whl", hash = "sha256:a566abe32fab42291f766d667bdbfa234a7f457dcbd2ba19948b7a978c8ca624"}, + {file = "chroma_hnswlib-0.7.6-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6be47853d9a58dedcfa90fc846af202b071f028bbafe1d8711bf64fe5a7f6111"}, + {file = "chroma_hnswlib-0.7.6-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:3a7af35bdd39a88bffa49f9bb4bf4f9040b684514a024435a1ef5cdff980579d"}, + {file = "chroma_hnswlib-0.7.6-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a53b1f1551f2b5ad94eb610207bde1bb476245fc5097a2bec2b476c653c58bde"}, + {file = "chroma_hnswlib-0.7.6-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3085402958dbdc9ff5626ae58d696948e715aef88c86d1e3f9285a88f1afd3bc"}, + {file = "chroma_hnswlib-0.7.6-cp38-cp38-win_amd64.whl", hash = "sha256:77326f658a15adfb806a16543f7db7c45f06fd787d699e643642d6bde8ed49c4"}, + {file = "chroma_hnswlib-0.7.6-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:93b056ab4e25adab861dfef21e1d2a2756b18be5bc9c292aa252fa12bb44e6ae"}, + {file = "chroma_hnswlib-0.7.6-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:fe91f018b30452c16c811fd6c8ede01f84e5a9f3c23e0758775e57f1c3778871"}, + {file = "chroma_hnswlib-0.7.6-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e6c0e627476f0f4d9e153420d36042dd9c6c3671cfd1fe511c0253e38c2a1039"}, + {file = "chroma_hnswlib-0.7.6-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3e9796a4536b7de6c6d76a792ba03e08f5aaa53e97e052709568e50b4d20c04f"}, + {file = "chroma_hnswlib-0.7.6-cp39-cp39-win_amd64.whl", hash = "sha256:d30e2db08e7ffdcc415bd072883a322de5995eb6ec28a8f8c054103bbd3ec1e0"}, + {file = "chroma_hnswlib-0.7.6.tar.gz", hash = "sha256:4dce282543039681160259d29fcde6151cc9106c6461e0485f57cdccd83059b7"}, +] + +[package.dependencies] +numpy = "*" + +[[package]] +name = "chromadb" +version = "0.6.3" +description = "Chroma." +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "chromadb-0.6.3-py3-none-any.whl", hash = "sha256:4851258489a3612b558488d98d09ae0fe0a28d5cad6bd1ba64b96fdc419dc0e5"}, + {file = "chromadb-0.6.3.tar.gz", hash = "sha256:c8f34c0b704b9108b04491480a36d42e894a960429f87c6516027b5481d59ed3"}, +] + +[package.dependencies] +bcrypt = ">=4.0.1" +build = ">=1.0.3" +chroma-hnswlib = "0.7.6" +fastapi = ">=0.95.2" +grpcio = ">=1.58.0" +httpx = ">=0.27.0" +importlib-resources = "*" +kubernetes = ">=28.1.0" +mmh3 = ">=4.0.1" +numpy = ">=1.22.5" +onnxruntime = ">=1.14.1" +opentelemetry-api = ">=1.2.0" +opentelemetry-exporter-otlp-proto-grpc = ">=1.2.0" +opentelemetry-instrumentation-fastapi = ">=0.41b0" +opentelemetry-sdk = ">=1.2.0" +orjson = ">=3.9.12" +overrides = ">=7.3.1" +posthog = ">=2.4.0" +pydantic = ">=1.9" +pypika = ">=0.48.9" +PyYAML = ">=6.0.0" +rich = ">=10.11.0" +tenacity = ">=8.2.3" +tokenizers = ">=0.13.2" +tqdm = ">=4.65.0" +typer = ">=0.9.0" +typing_extensions = ">=4.5.0" +uvicorn = {version = ">=0.18.3", extras = ["standard"]} + [[package]] name = "click" version = "8.1.7" description = "Composable command line interface toolkit" optional = false python-versions = ">=3.7" -groups = ["dev"] +groups = ["main", "dev"] files = [ {file = "click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28"}, {file = "click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de"}, @@ -220,7 +539,25 @@ files = [ {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, ] -markers = {main = "sys_platform == \"win32\" or platform_system == \"Windows\"", dev = "platform_system == \"Windows\"", test = "sys_platform == \"win32\""} +markers = {main = "sys_platform == \"win32\" or platform_system == \"Windows\" or os_name == \"nt\"", dev = "platform_system == \"Windows\"", test = "sys_platform == \"win32\""} + +[[package]] +name = "coloredlogs" +version = "15.0.1" +description = "Colored terminal output for Python's logging module" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +groups = ["main"] +files = [ + {file = "coloredlogs-15.0.1-py2.py3-none-any.whl", hash = "sha256:612ee75c546f53e92e70049c9dbfcc18c935a2b9a53b66085ce9ef6a6e5c0934"}, + {file = "coloredlogs-15.0.1.tar.gz", hash = "sha256:7c991aa71a4577af2f82600d8f8f3a89f936baeaf9b50a9c197da014e5bf16b0"}, +] + +[package.dependencies] +humanfriendly = ">=9.1" + +[package.extras] +cron = ["capturer (>=2.4)"] [[package]] name = "coverage" @@ -308,6 +645,24 @@ packaging = ">=17.0" pandas = ">=0.24.2" pyarrow = ">=3.0.0" +[[package]] +name = "deprecated" +version = "1.2.18" +description = "Python @deprecated decorator to deprecate old python classes, functions or methods." +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,>=2.7" +groups = ["main"] +files = [ + {file = "Deprecated-1.2.18-py2.py3-none-any.whl", hash = "sha256:bd5011788200372a32418f888e326a09ff80d0214bd961147cfed01b5c018eec"}, + {file = "deprecated-1.2.18.tar.gz", hash = "sha256:422b6f6d859da6f2ef57857761bfb392480502a64c3028ca9bbe86085d72115d"}, +] + +[package.dependencies] +wrapt = ">=1.10,<2" + +[package.extras] +dev = ["PyTest", "PyTest-Cov", "bump2version (<1)", "setuptools ; python_version >= \"3.12\"", "tox"] + [[package]] name = "distlib" version = "0.3.8" @@ -326,7 +681,7 @@ version = "1.9.0" description = "Distro - an OS platform information API" optional = false python-versions = ">=3.6" -groups = ["dev"] +groups = ["main", "dev"] files = [ {file = "distro-1.9.0-py3-none-any.whl", hash = "sha256:7bffd925d65168f85027d8da9af6bddab658135b840670a223589bc0c8ef02b2"}, {file = "distro-1.9.0.tar.gz", hash = "sha256:2fa77c6fd8940f116ee1d6b94a2f90b13b5ea8d019b98bc8bafdcabcdd9bdbed"}, @@ -581,6 +936,30 @@ lint = ["pre-commit", "pyupgrade", "ruff", "yesqa"] python-jose = ["python-jose (==3.3.0)"] test = ["cryptography", "freezegun", "pytest", "pytest-cov", "pytest-django", "pytest-xdist", "tox"] +[[package]] +name = "docstring-parser" +version = "0.16" +description = "Parse Python docstrings in reST, Google and Numpydoc format" +optional = false +python-versions = ">=3.6,<4.0" +groups = ["main"] +files = [ + {file = "docstring_parser-0.16-py3-none-any.whl", hash = "sha256:bf0a1387354d3691d102edef7ec124f219ef639982d096e26e3b60aeffa90637"}, + {file = "docstring_parser-0.16.tar.gz", hash = "sha256:538beabd0af1e2db0146b6bd3caa526c35a34d61af9fd2887f3a8a27a739aa6e"}, +] + +[[package]] +name = "durationpy" +version = "0.9" +description = "Module for converting between datetime.timedelta and Go's Duration strings." +optional = false +python-versions = "*" +groups = ["main"] +files = [ + {file = "durationpy-0.9-py3-none-any.whl", hash = "sha256:e65359a7af5cedad07fb77a2dd3f390f8eb0b74cb845589fa6c057086834dd38"}, + {file = "durationpy-0.9.tar.gz", hash = "sha256:fd3feb0a69a0057d582ef643c355c40d2fa1c942191f914d12203b1a01ac722a"}, +] + [[package]] name = "elasticsearch" version = "7.17.9" @@ -609,7 +988,7 @@ version = "1.2.0" description = "Backport of PEP 654 (exception groups)" optional = false python-versions = ">=3.7" -groups = ["test"] +groups = ["main", "test"] markers = "python_version == \"3.10\"" files = [ {file = "exceptiongroup-1.2.0-py3-none-any.whl", hash = "sha256:4bfd3996ac73b41e9b9628b04e079f193850720ea5945fc96a08633c66912f14"}, @@ -634,13 +1013,34 @@ files = [ [package.dependencies] python-dateutil = ">=2.4" +[[package]] +name = "fastapi" +version = "0.115.12" +description = "FastAPI framework, high performance, easy to learn, fast to code, ready for production" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "fastapi-0.115.12-py3-none-any.whl", hash = "sha256:e94613d6c05e27be7ffebdd6ea5f388112e5e430c8f7d6494a9d1d88d43e814d"}, + {file = "fastapi-0.115.12.tar.gz", hash = "sha256:1e2c2a2646905f9e83d32f04a3f86aff4a286669c6c950ca95b5fd68c2602681"}, +] + +[package.dependencies] +pydantic = ">=1.7.4,<1.8 || >1.8,<1.8.1 || >1.8.1,<2.0.0 || >2.0.0,<2.0.1 || >2.0.1,<2.1.0 || >2.1.0,<3.0.0" +starlette = ">=0.40.0,<0.47.0" +typing-extensions = ">=4.8.0" + +[package.extras] +all = ["email-validator (>=2.0.0)", "fastapi-cli[standard] (>=0.0.5)", "httpx (>=0.23.0)", "itsdangerous (>=1.1.0)", "jinja2 (>=3.1.5)", "orjson (>=3.2.1)", "pydantic-extra-types (>=2.0.0)", "pydantic-settings (>=2.0.0)", "python-multipart (>=0.0.18)", "pyyaml (>=5.3.1)", "ujson (>=4.0.1,!=4.0.2,!=4.1.0,!=4.2.0,!=4.3.0,!=5.0.0,!=5.1.0)", "uvicorn[standard] (>=0.12.0)"] +standard = ["email-validator (>=2.0.0)", "fastapi-cli[standard] (>=0.0.5)", "httpx (>=0.23.0)", "jinja2 (>=3.1.5)", "python-multipart (>=0.0.18)", "uvicorn[standard] (>=0.12.0)"] + [[package]] name = "filelock" version = "3.13.1" description = "A platform independent file lock." optional = false python-versions = ">=3.8" -groups = ["dev"] +groups = ["main", "dev"] files = [ {file = "filelock-3.13.1-py3-none-any.whl", hash = "sha256:57dbda9b35157b05fb3e58ee91448612eb674172fab98ee235ccb0b5bee19a1c"}, {file = "filelock-3.13.1.tar.gz", hash = "sha256:521f5f56c50f8426f5e03ad3b281b490a87ef15bc6c526f168290f0c7148d44e"}, @@ -651,6 +1051,58 @@ docs = ["furo (>=2023.9.10)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1 testing = ["covdefaults (>=2.3)", "coverage (>=7.3.2)", "diff-cover (>=8)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)", "pytest-timeout (>=2.2)"] typing = ["typing-extensions (>=4.8) ; python_version < \"3.11\""] +[[package]] +name = "flatbuffers" +version = "25.2.10" +description = "The FlatBuffers serialization format for Python" +optional = false +python-versions = "*" +groups = ["main"] +files = [ + {file = "flatbuffers-25.2.10-py2.py3-none-any.whl", hash = "sha256:ebba5f4d5ea615af3f7fd70fc310636fbb2bbd1f566ac0a23d98dd412de50051"}, + {file = "flatbuffers-25.2.10.tar.gz", hash = "sha256:97e451377a41262f8d9bd4295cc836133415cc03d8cb966410a4af92eb00d26e"}, +] + +[[package]] +name = "fsspec" +version = "2025.3.2" +description = "File-system specification" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "fsspec-2025.3.2-py3-none-any.whl", hash = "sha256:2daf8dc3d1dfa65b6aa37748d112773a7a08416f6c70d96b264c96476ecaf711"}, + {file = "fsspec-2025.3.2.tar.gz", hash = "sha256:e52c77ef398680bbd6a98c0e628fbc469491282981209907bbc8aea76a04fdc6"}, +] + +[package.extras] +abfs = ["adlfs"] +adl = ["adlfs"] +arrow = ["pyarrow (>=1)"] +dask = ["dask", "distributed"] +dev = ["pre-commit", "ruff"] +doc = ["numpydoc", "sphinx", "sphinx-design", "sphinx-rtd-theme", "yarl"] +dropbox = ["dropbox", "dropboxdrivefs", "requests"] +full = ["adlfs", "aiohttp (!=4.0.0a0,!=4.0.0a1)", "dask", "distributed", "dropbox", "dropboxdrivefs", "fusepy", "gcsfs", "libarchive-c", "ocifs", "panel", "paramiko", "pyarrow (>=1)", "pygit2", "requests", "s3fs", "smbprotocol", "tqdm"] +fuse = ["fusepy"] +gcs = ["gcsfs"] +git = ["pygit2"] +github = ["requests"] +gs = ["gcsfs"] +gui = ["panel"] +hdfs = ["pyarrow (>=1)"] +http = ["aiohttp (!=4.0.0a0,!=4.0.0a1)"] +libarchive = ["libarchive-c"] +oci = ["ocifs"] +s3 = ["s3fs"] +sftp = ["paramiko"] +smb = ["smbprotocol"] +ssh = ["paramiko"] +test = ["aiohttp (!=4.0.0a0,!=4.0.0a1)", "numpy", "pytest", "pytest-asyncio (!=0.22.0)", "pytest-benchmark", "pytest-cov", "pytest-mock", "pytest-recording", "pytest-rerunfailures", "requests"] +test-downstream = ["aiobotocore (>=2.5.4,<3.0.0)", "dask[dataframe,test]", "moto[server] (>4,<5)", "pytest-timeout", "xarray"] +test-full = ["adlfs", "aiohttp (!=4.0.0a0,!=4.0.0a1)", "cloudpickle", "dask", "distributed", "dropbox", "dropboxdrivefs", "fastparquet", "fusepy", "gcsfs", "jinja2", "kerchunk", "libarchive-c", "lz4", "notebook", "numpy", "ocifs", "pandas", "panel", "paramiko", "pyarrow", "pyarrow (>=1)", "pyftpdlib", "pygit2", "pytest", "pytest-asyncio (!=0.22.0)", "pytest-benchmark", "pytest-cov", "pytest-mock", "pytest-recording", "pytest-rerunfailures", "python-snappy", "requests", "smbprotocol", "tqdm", "urllib3", "zarr", "zstandard"] +tqdm = ["tqdm"] + [[package]] name = "google-api-core" version = "2.17.0" @@ -760,35 +1212,92 @@ requests-oauthlib = ">=0.7.0" [package.extras] tool = ["click (>=6.0.0)"] +[[package]] +name = "google-cloud-aiplatform" +version = "1.92.0" +description = "Vertex AI API client library" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "google_cloud_aiplatform-1.92.0-py2.py3-none-any.whl", hash = "sha256:ced3e6aca90fadb6f224eb0ee71db41b38723efe26cf46266f65601079e3b2f3"}, + {file = "google_cloud_aiplatform-1.92.0.tar.gz", hash = "sha256:54e6f4ef74566d0e8d8a48e1cbe3ca46017e7a4030689d1cb1561115297630fd"}, +] + +[package.dependencies] +docstring-parser = "<1" +google-api-core = {version = ">=1.34.1,<2.0.dev0 || >=2.8.dev0,<3.0.0", extras = ["grpc"]} +google-auth = ">=2.14.1,<3.0.0" +google-cloud-bigquery = ">=1.15.0,<3.20.0 || >3.20.0,<4.0.0" +google-cloud-resource-manager = ">=1.3.3,<3.0.0" +google-cloud-storage = ">=1.32.0,<3.0.0" +google-genai = ">=1.0.0,<2.0.0" +packaging = ">=14.3" +proto-plus = ">=1.22.3,<2.0.0" +protobuf = ">=3.20.2,<4.21.0 || >4.21.0,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<7.0.0" +pydantic = "<3" +shapely = "<3.0.0" +typing-extensions = "*" + +[package.extras] +adk = ["google-adk (>=0.0.2)"] +ag2 = ["ag2[gemini]", "openinference-instrumentation-autogen (>=0.1.6,<0.2)"] +ag2-testing = ["absl-py", "ag2[gemini]", "cloudpickle (>=3.0,<4.0)", "google-cloud-trace (<2)", "openinference-instrumentation-autogen (>=0.1.6,<0.2)", "opentelemetry-exporter-gcp-trace (<2)", "opentelemetry-sdk (<2)", "pydantic (>=2.11.1,<3)", "pytest-xdist", "typing-extensions"] +agent-engines = ["cloudpickle (>=3.0,<4.0)", "google-cloud-logging (<4)", "google-cloud-trace (<2)", "opentelemetry-exporter-gcp-trace (<2)", "opentelemetry-sdk (<2)", "packaging (>=24.0)", "pydantic (>=2.11.1,<3)", "typing-extensions"] +autologging = ["mlflow (>=1.27.0,<=2.16.0)"] +cloud-profiler = ["tensorboard-plugin-profile (>=2.4.0,<2.18.0)", "tensorflow (>=2.4.0,<3.0.0)", "werkzeug (>=2.0.0,<4.0.0)"] +datasets = ["pyarrow (>=10.0.1) ; python_version == \"3.11\"", "pyarrow (>=14.0.0) ; python_version >= \"3.12\"", "pyarrow (>=3.0.0,<8.0.0) ; python_version < \"3.11\""] +endpoint = ["requests (>=2.28.1)", "requests-toolbelt (<=1.0.0)"] +evaluation = ["jsonschema", "pandas (>=1.0.0)", "ruamel.yaml", "scikit-learn (<1.6.0) ; python_version <= \"3.10\"", "scikit-learn ; python_version > \"3.10\"", "tqdm (>=4.23.0)"] +full = ["docker (>=5.0.3)", "explainable-ai-sdk (>=1.0.0)", "fastapi (>=0.71.0,<=0.114.0)", "google-cloud-bigquery", "google-cloud-bigquery-storage", "google-vizier (>=0.1.6)", "httpx (>=0.23.0,<=0.28.1)", "immutabledict", "jsonschema", "lit-nlp (==0.4.0)", "mlflow (>=1.27.0,<=2.16.0)", "numpy (>=1.15.0)", "pandas (>=1.0.0)", "pyarrow (>=10.0.1) ; python_version == \"3.11\"", "pyarrow (>=14.0.0) ; python_version >= \"3.12\"", "pyarrow (>=3.0.0,<8.0.0) ; python_version < \"3.11\"", "pyarrow (>=6.0.1)", "pyyaml (>=5.3.1,<7)", "ray[default] (>=2.4,<2.5.dev0 || >2.9.0,!=2.9.1,!=2.9.2,<2.10.dev0 || ==2.33.* || >=2.42.dev0,<=2.42.0) ; python_version < \"3.11\"", "ray[default] (>=2.5,<=2.42.0) ; python_version == \"3.11\"", "requests (>=2.28.1)", "requests-toolbelt (<=1.0.0)", "ruamel.yaml", "scikit-learn (<1.6.0) ; python_version <= \"3.10\"", "scikit-learn ; python_version > \"3.10\"", "setuptools (<70.0.0)", "starlette (>=0.17.1)", "tensorboard-plugin-profile (>=2.4.0,<2.18.0)", "tensorflow (>=2.3.0,<3.0.0)", "tensorflow (>=2.3.0,<3.0.0) ; python_version <= \"3.11\"", "tensorflow (>=2.4.0,<3.0.0)", "tqdm (>=4.23.0)", "urllib3 (>=1.21.1,<1.27)", "uvicorn[standard] (>=0.16.0)", "werkzeug (>=2.0.0,<4.0.0)"] +langchain = ["langchain (>=0.3,<0.4)", "langchain-core (>=0.3,<0.4)", "langchain-google-vertexai (>=2,<3)", "langgraph (>=0.2.45,<0.4)", "openinference-instrumentation-langchain (>=0.1.19,<0.2)"] +langchain-testing = ["absl-py", "cloudpickle (>=3.0,<4.0)", "google-cloud-trace (<2)", "langchain (>=0.3,<0.4)", "langchain-core (>=0.3,<0.4)", "langchain-google-vertexai (>=2,<3)", "langgraph (>=0.2.45,<0.4)", "openinference-instrumentation-langchain (>=0.1.19,<0.2)", "opentelemetry-exporter-gcp-trace (<2)", "opentelemetry-sdk (<2)", "pydantic (>=2.11.1,<3)", "pytest-xdist", "typing-extensions"] +lit = ["explainable-ai-sdk (>=1.0.0)", "lit-nlp (==0.4.0)", "pandas (>=1.0.0)", "tensorflow (>=2.3.0,<3.0.0)"] +llama-index = ["llama-index", "llama-index-llms-google-genai", "openinference-instrumentation-llama-index (>=3.0,<4.0)"] +llama-index-testing = ["absl-py", "cloudpickle (>=3.0,<4.0)", "google-cloud-trace (<2)", "llama-index", "llama-index-llms-google-genai", "openinference-instrumentation-llama-index (>=3.0,<4.0)", "opentelemetry-exporter-gcp-trace (<2)", "opentelemetry-sdk (<2)", "pydantic (>=2.11.1,<3)", "pytest-xdist", "typing-extensions"] +metadata = ["numpy (>=1.15.0)", "pandas (>=1.0.0)"] +pipelines = ["pyyaml (>=5.3.1,<7)"] +prediction = ["docker (>=5.0.3)", "fastapi (>=0.71.0,<=0.114.0)", "httpx (>=0.23.0,<=0.28.1)", "starlette (>=0.17.1)", "uvicorn[standard] (>=0.16.0)"] +private-endpoints = ["requests (>=2.28.1)", "urllib3 (>=1.21.1,<1.27)"] +ray = ["google-cloud-bigquery", "google-cloud-bigquery-storage", "immutabledict", "pandas (>=1.0.0)", "pyarrow (>=6.0.1)", "ray[default] (>=2.4,<2.5.dev0 || >2.9.0,!=2.9.1,!=2.9.2,<2.10.dev0 || ==2.33.* || >=2.42.dev0,<=2.42.0) ; python_version < \"3.11\"", "ray[default] (>=2.5,<=2.42.0) ; python_version == \"3.11\"", "setuptools (<70.0.0)"] +ray-testing = ["google-cloud-bigquery", "google-cloud-bigquery-storage", "immutabledict", "pandas (>=1.0.0)", "pyarrow (>=6.0.1)", "pytest-xdist", "ray[default] (>=2.4,<2.5.dev0 || >2.9.0,!=2.9.1,!=2.9.2,<2.10.dev0 || ==2.33.* || >=2.42.dev0,<=2.42.0) ; python_version < \"3.11\"", "ray[default] (>=2.5,<=2.42.0) ; python_version == \"3.11\"", "ray[train]", "scikit-learn (<1.6.0)", "setuptools (<70.0.0)", "tensorflow", "torch (>=2.0.0,<2.1.0)", "xgboost", "xgboost-ray"] +reasoningengine = ["cloudpickle (>=3.0,<4.0)", "google-cloud-trace (<2)", "opentelemetry-exporter-gcp-trace (<2)", "opentelemetry-sdk (<2)", "pydantic (>=2.11.1,<3)", "typing-extensions"] +tensorboard = ["tensorboard-plugin-profile (>=2.4.0,<2.18.0)", "tensorflow (>=2.3.0,<3.0.0) ; python_version <= \"3.11\"", "tensorflow (>=2.4.0,<3.0.0)", "werkzeug (>=2.0.0,<4.0.0)"] +testing = ["aiohttp", "bigframes ; python_version >= \"3.10\"", "docker (>=5.0.3)", "explainable-ai-sdk (>=1.0.0)", "fastapi (>=0.71.0,<=0.114.0)", "google-api-core (>=2.11,<3.0.0)", "google-cloud-bigquery", "google-cloud-bigquery-storage", "google-vizier (>=0.1.6)", "grpcio-testing", "httpx (>=0.23.0,<=0.28.1)", "immutabledict", "ipython", "jsonschema", "kfp (>=2.6.0,<3.0.0)", "lit-nlp (==0.4.0)", "mlflow (>=1.27.0,<=2.16.0)", "nltk", "numpy (>=1.15.0)", "pandas (>=1.0.0)", "protobuf (<=5.29.4)", "pyarrow (>=10.0.1) ; python_version == \"3.11\"", "pyarrow (>=14.0.0) ; python_version >= \"3.12\"", "pyarrow (>=3.0.0,<8.0.0) ; python_version < \"3.11\"", "pyarrow (>=6.0.1)", "pytest-asyncio", "pytest-xdist", "pyyaml (>=5.3.1,<7)", "ray[default] (>=2.4,<2.5.dev0 || >2.9.0,!=2.9.1,!=2.9.2,<2.10.dev0 || ==2.33.* || >=2.42.dev0,<=2.42.0) ; python_version < \"3.11\"", "ray[default] (>=2.5,<=2.42.0) ; python_version == \"3.11\"", "requests (>=2.28.1)", "requests-toolbelt (<=1.0.0)", "ruamel.yaml", "scikit-learn (<1.6.0) ; python_version <= \"3.10\"", "scikit-learn ; python_version > \"3.10\"", "sentencepiece (>=0.2.0)", "setuptools (<70.0.0)", "starlette (>=0.17.1)", "tensorboard-plugin-profile (>=2.4.0,<2.18.0)", "tensorflow (==2.14.1) ; python_version <= \"3.11\"", "tensorflow (==2.19.0) ; python_version > \"3.11\"", "tensorflow (>=2.3.0,<3.0.0)", "tensorflow (>=2.3.0,<3.0.0) ; python_version <= \"3.11\"", "tensorflow (>=2.4.0,<3.0.0)", "torch (>=2.0.0,<2.1.0) ; python_version <= \"3.11\"", "torch (>=2.2.0) ; python_version > \"3.11\"", "tqdm (>=4.23.0)", "urllib3 (>=1.21.1,<1.27)", "uvicorn[standard] (>=0.16.0)", "werkzeug (>=2.0.0,<4.0.0)", "xgboost"] +tokenization = ["sentencepiece (>=0.2.0)"] +vizier = ["google-vizier (>=0.1.6)"] +xai = ["tensorflow (>=2.3.0,<3.0.0)"] + [[package]] name = "google-cloud-bigquery" -version = "3.17.2" +version = "3.30.0" description = "Google BigQuery API client library" optional = false python-versions = ">=3.7" groups = ["main"] files = [ - {file = "google-cloud-bigquery-3.17.2.tar.gz", hash = "sha256:6e1cf669a40e567ab3289c7b5f2056363da9fcb85d9a4736ee90240d4a7d84ea"}, - {file = "google_cloud_bigquery-3.17.2-py2.py3-none-any.whl", hash = "sha256:cdadf5283dca55a1a350bacf8c8a7466169d3cf46c5a0a3abc5e9aa0b0a51dee"}, + {file = "google_cloud_bigquery-3.30.0-py2.py3-none-any.whl", hash = "sha256:f4d28d846a727f20569c9b2d2f4fa703242daadcb2ec4240905aa485ba461877"}, + {file = "google_cloud_bigquery-3.30.0.tar.gz", hash = "sha256:7e27fbafc8ed33cc200fe05af12ecd74d279fe3da6692585a3cef7aee90575b6"}, ] [package.dependencies] -google-api-core = ">=1.31.5,<2.0.dev0 || >2.3.0,<3.0.0dev" -google-cloud-core = ">=1.6.0,<3.0.0dev" -google-resumable-media = ">=0.6.0,<3.0dev" +google-api-core = {version = ">=2.11.1,<3.0.0dev", extras = ["grpc"]} +google-auth = ">=2.14.1,<3.0.0dev" +google-cloud-core = ">=2.4.1,<3.0.0dev" +google-resumable-media = ">=2.0.0,<3.0dev" packaging = ">=20.0.0" -python-dateutil = ">=2.7.2,<3.0dev" +python-dateutil = ">=2.7.3,<3.0dev" requests = ">=2.21.0,<3.0.0dev" [package.extras] -all = ["Shapely (>=1.8.4,<3.0.0dev)", "db-dtypes (>=0.3.0,<2.0.0dev)", "geopandas (>=0.9.0,<1.0dev)", "google-cloud-bigquery-storage (>=2.6.0,<3.0.0dev)", "grpcio (>=1.47.0,<2.0dev)", "grpcio (>=1.49.1,<2.0dev) ; python_version >= \"3.11\"", "importlib-metadata (>=1.0.0) ; python_version < \"3.8\"", "ipykernel (>=6.0.0)", "ipython (>=7.23.1,!=8.1.0)", "ipywidgets (>=7.7.0)", "opentelemetry-api (>=1.1.0)", "opentelemetry-instrumentation (>=0.20b0)", "opentelemetry-sdk (>=1.1.0)", "pandas (>=1.1.0)", "proto-plus (>=1.15.0,<2.0.0dev)", "protobuf (>=3.19.5,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5,<5.0.0dev)", "pyarrow (>=3.0.0)", "tqdm (>=4.7.4,<5.0.0dev)"] -bigquery-v2 = ["proto-plus (>=1.15.0,<2.0.0dev)", "protobuf (>=3.19.5,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5,<5.0.0dev)"] +all = ["google-cloud-bigquery[bigquery-v2,bqstorage,geopandas,ipython,ipywidgets,opentelemetry,pandas,tqdm]"] +bigquery-v2 = ["proto-plus (>=1.22.3,<2.0.0dev)", "protobuf (>=3.20.2,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5,<6.0.0dev)"] bqstorage = ["google-cloud-bigquery-storage (>=2.6.0,<3.0.0dev)", "grpcio (>=1.47.0,<2.0dev)", "grpcio (>=1.49.1,<2.0dev) ; python_version >= \"3.11\"", "pyarrow (>=3.0.0)"] -geopandas = ["Shapely (>=1.8.4,<3.0.0dev)", "geopandas (>=0.9.0,<1.0dev)"] -ipython = ["ipykernel (>=6.0.0)", "ipython (>=7.23.1,!=8.1.0)"] +geopandas = ["Shapely (>=1.8.4,<3.0.0dev)", "geopandas (>=0.9.0,<2.0dev)"] +ipython = ["bigquery-magics (>=0.1.0)"] ipywidgets = ["ipykernel (>=6.0.0)", "ipywidgets (>=7.7.0)"] opentelemetry = ["opentelemetry-api (>=1.1.0)", "opentelemetry-instrumentation (>=0.20b0)", "opentelemetry-sdk (>=1.1.0)"] -pandas = ["db-dtypes (>=0.3.0,<2.0.0dev)", "importlib-metadata (>=1.0.0) ; python_version < \"3.8\"", "pandas (>=1.1.0)", "pyarrow (>=3.0.0)"] +pandas = ["db-dtypes (>=0.3.0,<2.0.0dev)", "grpcio (>=1.47.0,<2.0dev)", "grpcio (>=1.49.1,<2.0dev) ; python_version >= \"3.11\"", "importlib-metadata (>=1.0.0) ; python_version < \"3.8\"", "pandas (>=1.1.0)", "pandas-gbq (>=0.26.1) ; python_version >= \"3.8\"", "pyarrow (>=3.0.0)"] tqdm = ["tqdm (>=4.7.4,<5.0.0dev)"] [[package]] @@ -835,28 +1344,48 @@ google-auth = ">=1.25.0,<3.0dev" [package.extras] grpc = ["grpcio (>=1.38.0,<2.0dev)", "grpcio-status (>=1.38.0,<2.0.dev0)"] +[[package]] +name = "google-cloud-resource-manager" +version = "1.14.2" +description = "Google Cloud Resource Manager API client library" +optional = false +python-versions = ">=3.7" +groups = ["main"] +files = [ + {file = "google_cloud_resource_manager-1.14.2-py3-none-any.whl", hash = "sha256:d0fa954dedd1d2b8e13feae9099c01b8aac515b648e612834f9942d2795a9900"}, + {file = "google_cloud_resource_manager-1.14.2.tar.gz", hash = "sha256:962e2d904c550d7bac48372607904ff7bb3277e3bb4a36d80cc9a37e28e6eb74"}, +] + +[package.dependencies] +google-api-core = {version = ">=1.34.1,<2.0.dev0 || >=2.11.dev0,<3.0.0", extras = ["grpc"]} +google-auth = ">=2.14.1,<2.24.0 || >2.24.0,<2.25.0 || >2.25.0,<3.0.0" +grpc-google-iam-v1 = ">=0.14.0,<1.0.0" +proto-plus = ">=1.22.3,<2.0.0" +protobuf = ">=3.20.2,<4.21.0 || >4.21.0,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<7.0.0" + [[package]] name = "google-cloud-storage" -version = "2.14.0" +version = "2.18.1" description = "Google Cloud Storage API client library" optional = false python-versions = ">=3.7" groups = ["main"] files = [ - {file = "google-cloud-storage-2.14.0.tar.gz", hash = "sha256:2d23fcf59b55e7b45336729c148bb1c464468c69d5efbaee30f7201dd90eb97e"}, - {file = "google_cloud_storage-2.14.0-py2.py3-none-any.whl", hash = "sha256:8641243bbf2a2042c16a6399551fbb13f062cbc9a2de38d6c0bb5426962e9dbd"}, + {file = "google_cloud_storage-2.18.1-py2.py3-none-any.whl", hash = "sha256:9d8db6bde3a979cca7150511cd0e4cb363e5f69d31259d890ba1124fa109418c"}, + {file = "google_cloud_storage-2.18.1.tar.gz", hash = "sha256:6707a6f30a05aee36faca81296419ca2907ac750af1c0457f278bc9a6fb219ad"}, ] [package.dependencies] -google-api-core = ">=1.31.5,<2.0.dev0 || >2.3.0,<3.0.0dev" -google-auth = ">=2.23.3,<3.0dev" +google-api-core = ">=2.15.0,<3.0.0dev" +google-auth = ">=2.26.1,<3.0dev" google-cloud-core = ">=2.3.0,<3.0dev" google-crc32c = ">=1.0,<2.0dev" google-resumable-media = ">=2.6.0" requests = ">=2.18.0,<3.0.0dev" [package.extras] -protobuf = ["protobuf (<5.0.0dev)"] +protobuf = ["protobuf (<6.0.0dev)"] +tracing = ["opentelemetry-api (>=1.1.0)"] [[package]] name = "google-crc32c" @@ -939,6 +1468,27 @@ files = [ [package.extras] testing = ["pytest"] +[[package]] +name = "google-genai" +version = "1.14.0" +description = "GenAI Python SDK" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "google_genai-1.14.0-py3-none-any.whl", hash = "sha256:5916ee985bf69ac7b68c4488949225db71e21579afc7ba5ecd5321173b60d3b2"}, + {file = "google_genai-1.14.0.tar.gz", hash = "sha256:7c608de5bb173486a546f5ec4562255c26bae72d33d758a3207bb26f695d0087"}, +] + +[package.dependencies] +anyio = ">=4.8.0,<5.0.0" +google-auth = ">=2.14.1,<3.0.0" +httpx = ">=0.28.1,<1.0.0" +pydantic = ">=2.0.0,<3.0.0" +requests = ">=2.28.1,<3.0.0" +typing-extensions = ">=4.11.0,<5.0.0" +websockets = ">=13.0.0,<15.1.0" + [[package]] name = "google-resumable-media" version = "2.7.0" @@ -971,6 +1521,7 @@ files = [ ] [package.dependencies] +grpcio = {version = ">=1.44.0,<2.0.0.dev0", optional = true, markers = "extra == \"grpc\""} protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<5.0.0.dev0" [package.extras] @@ -1070,72 +1621,155 @@ files = [ [package.dependencies] graphql-core = ">=3.2,<3.3" +[[package]] +name = "greenlet" +version = "3.2.2" +description = "Lightweight in-process concurrent programming" +optional = false +python-versions = ">=3.9" +groups = ["main"] +markers = "platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\"" +files = [ + {file = "greenlet-3.2.2-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:c49e9f7c6f625507ed83a7485366b46cbe325717c60837f7244fc99ba16ba9d6"}, + {file = "greenlet-3.2.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c3cc1a3ed00ecfea8932477f729a9f616ad7347a5e55d50929efa50a86cb7be7"}, + {file = "greenlet-3.2.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7c9896249fbef2c615853b890ee854f22c671560226c9221cfd27c995db97e5c"}, + {file = "greenlet-3.2.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7409796591d879425997a518138889d8d17e63ada7c99edc0d7a1c22007d4907"}, + {file = "greenlet-3.2.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7791dcb496ec53d60c7f1c78eaa156c21f402dda38542a00afc3e20cae0f480f"}, + {file = "greenlet-3.2.2-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d8009ae46259e31bc73dc183e402f548e980c96f33a6ef58cc2e7865db012e13"}, + {file = "greenlet-3.2.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:fd9fb7c941280e2c837b603850efc93c999ae58aae2b40765ed682a6907ebbc5"}, + {file = "greenlet-3.2.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:00cd814b8959b95a546e47e8d589610534cfb71f19802ea8a2ad99d95d702057"}, + {file = "greenlet-3.2.2-cp310-cp310-win_amd64.whl", hash = "sha256:d0cb7d47199001de7658c213419358aa8937df767936506db0db7ce1a71f4a2f"}, + {file = "greenlet-3.2.2-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:dcb9cebbf3f62cb1e5afacae90761ccce0effb3adaa32339a0670fe7805d8068"}, + {file = "greenlet-3.2.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bf3fc9145141250907730886b031681dfcc0de1c158f3cc51c092223c0f381ce"}, + {file = "greenlet-3.2.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:efcdfb9df109e8a3b475c016f60438fcd4be68cd13a365d42b35914cdab4bb2b"}, + {file = "greenlet-3.2.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4bd139e4943547ce3a56ef4b8b1b9479f9e40bb47e72cc906f0f66b9d0d5cab3"}, + {file = "greenlet-3.2.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:71566302219b17ca354eb274dfd29b8da3c268e41b646f330e324e3967546a74"}, + {file = "greenlet-3.2.2-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:3091bc45e6b0c73f225374fefa1536cd91b1e987377b12ef5b19129b07d93ebe"}, + {file = "greenlet-3.2.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:44671c29da26539a5f142257eaba5110f71887c24d40df3ac87f1117df589e0e"}, + {file = "greenlet-3.2.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:c23ea227847c9dbe0b3910f5c0dd95658b607137614eb821e6cbaecd60d81cc6"}, + {file = "greenlet-3.2.2-cp311-cp311-win_amd64.whl", hash = "sha256:0a16fb934fcabfdfacf21d79e6fed81809d8cd97bc1be9d9c89f0e4567143d7b"}, + {file = "greenlet-3.2.2-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:df4d1509efd4977e6a844ac96d8be0b9e5aa5d5c77aa27ca9f4d3f92d3fcf330"}, + {file = "greenlet-3.2.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da956d534a6d1b9841f95ad0f18ace637668f680b1339ca4dcfb2c1837880a0b"}, + {file = "greenlet-3.2.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9c7b15fb9b88d9ee07e076f5a683027bc3befd5bb5d25954bb633c385d8b737e"}, + {file = "greenlet-3.2.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:752f0e79785e11180ebd2e726c8a88109ded3e2301d40abced2543aa5d164275"}, + {file = "greenlet-3.2.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9ae572c996ae4b5e122331e12bbb971ea49c08cc7c232d1bd43150800a2d6c65"}, + {file = "greenlet-3.2.2-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:02f5972ff02c9cf615357c17ab713737cccfd0eaf69b951084a9fd43f39833d3"}, + {file = "greenlet-3.2.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:4fefc7aa68b34b9224490dfda2e70ccf2131368493add64b4ef2d372955c207e"}, + {file = "greenlet-3.2.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a31ead8411a027c2c4759113cf2bd473690517494f3d6e4bf67064589afcd3c5"}, + {file = "greenlet-3.2.2-cp312-cp312-win_amd64.whl", hash = "sha256:b24c7844c0a0afc3ccbeb0b807adeefb7eff2b5599229ecedddcfeb0ef333bec"}, + {file = "greenlet-3.2.2-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:3ab7194ee290302ca15449f601036007873028712e92ca15fc76597a0aeb4c59"}, + {file = "greenlet-3.2.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2dc5c43bb65ec3669452af0ab10729e8fdc17f87a1f2ad7ec65d4aaaefabf6bf"}, + {file = "greenlet-3.2.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:decb0658ec19e5c1f519faa9a160c0fc85a41a7e6654b3ce1b44b939f8bf1325"}, + {file = "greenlet-3.2.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6fadd183186db360b61cb34e81117a096bff91c072929cd1b529eb20dd46e6c5"}, + {file = "greenlet-3.2.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1919cbdc1c53ef739c94cf2985056bcc0838c1f217b57647cbf4578576c63825"}, + {file = "greenlet-3.2.2-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:3885f85b61798f4192d544aac7b25a04ece5fe2704670b4ab73c2d2c14ab740d"}, + {file = "greenlet-3.2.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:85f3e248507125bf4af607a26fd6cb8578776197bd4b66e35229cdf5acf1dfbf"}, + {file = "greenlet-3.2.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:1e76106b6fc55fa3d6fe1c527f95ee65e324a13b62e243f77b48317346559708"}, + {file = "greenlet-3.2.2-cp313-cp313-win_amd64.whl", hash = "sha256:fe46d4f8e94e637634d54477b0cfabcf93c53f29eedcbdeecaf2af32029b4421"}, + {file = "greenlet-3.2.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ba30e88607fb6990544d84caf3c706c4b48f629e18853fc6a646f82db9629418"}, + {file = "greenlet-3.2.2-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:055916fafad3e3388d27dd68517478933a97edc2fc54ae79d3bec827de2c64c4"}, + {file = "greenlet-3.2.2-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2593283bf81ca37d27d110956b79e8723f9aa50c4bcdc29d3c0543d4743d2763"}, + {file = "greenlet-3.2.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:89c69e9a10670eb7a66b8cef6354c24671ba241f46152dd3eed447f79c29fb5b"}, + {file = "greenlet-3.2.2-cp313-cp313t-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:02a98600899ca1ca5d3a2590974c9e3ec259503b2d6ba6527605fcd74e08e207"}, + {file = "greenlet-3.2.2-cp313-cp313t-musllinux_1_1_aarch64.whl", hash = "sha256:b50a8c5c162469c3209e5ec92ee4f95c8231b11db6a04db09bbe338176723bb8"}, + {file = "greenlet-3.2.2-cp313-cp313t-musllinux_1_1_x86_64.whl", hash = "sha256:45f9f4853fb4cc46783085261c9ec4706628f3b57de3e68bae03e8f8b3c0de51"}, + {file = "greenlet-3.2.2-cp314-cp314-macosx_11_0_universal2.whl", hash = "sha256:9ea5231428af34226c05f927e16fc7f6fa5e39e3ad3cd24ffa48ba53a47f4240"}, + {file = "greenlet-3.2.2-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:1e4747712c4365ef6765708f948acc9c10350719ca0545e362c24ab973017370"}, + {file = "greenlet-3.2.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:782743700ab75716650b5238a4759f840bb2dcf7bff56917e9ffdf9f1f23ec59"}, + {file = "greenlet-3.2.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:354f67445f5bed6604e493a06a9a49ad65675d3d03477d38a4db4a427e9aad0e"}, + {file = "greenlet-3.2.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3aeca9848d08ce5eb653cf16e15bb25beeab36e53eb71cc32569f5f3afb2a3aa"}, + {file = "greenlet-3.2.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8cb8553ee954536500d88a1a2f58fcb867e45125e600e80f586ade399b3f8819"}, + {file = "greenlet-3.2.2-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1592a615b598643dbfd566bac8467f06c8c8ab6e56f069e573832ed1d5d528cc"}, + {file = "greenlet-3.2.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:1f72667cc341c95184f1c68f957cb2d4fc31eef81646e8e59358a10ce6689457"}, + {file = "greenlet-3.2.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a8fa80665b1a29faf76800173ff5325095f3e66a78e62999929809907aca5659"}, + {file = "greenlet-3.2.2-cp39-cp39-win32.whl", hash = "sha256:6629311595e3fe7304039c67f00d145cd1d38cf723bb5b99cc987b23c1433d61"}, + {file = "greenlet-3.2.2-cp39-cp39-win_amd64.whl", hash = "sha256:eeb27bece45c0c2a5842ac4c5a1b5c2ceaefe5711078eed4e8043159fa05c834"}, +] + +[package.extras] +docs = ["Sphinx", "furo"] +test = ["objgraph", "psutil"] + +[[package]] +name = "grpc-google-iam-v1" +version = "0.14.2" +description = "IAM API client library" +optional = false +python-versions = ">=3.7" +groups = ["main"] +files = [ + {file = "grpc_google_iam_v1-0.14.2-py3-none-any.whl", hash = "sha256:a3171468459770907926d56a440b2bb643eec1d7ba215f48f3ecece42b4d8351"}, + {file = "grpc_google_iam_v1-0.14.2.tar.gz", hash = "sha256:b3e1fc387a1a329e41672197d0ace9de22c78dd7d215048c4c78712073f7bd20"}, +] + +[package.dependencies] +googleapis-common-protos = {version = ">=1.56.0,<2.0.0", extras = ["grpc"]} +grpcio = ">=1.44.0,<2.0.0" +protobuf = ">=3.20.2,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<7.0.0" + [[package]] name = "grpcio" -version = "1.60.1" +version = "1.71.0" description = "HTTP/2-based RPC framework" optional = false -python-versions = ">=3.7" +python-versions = ">=3.9" groups = ["main"] files = [ - {file = "grpcio-1.60.1-cp310-cp310-linux_armv7l.whl", hash = "sha256:14e8f2c84c0832773fb3958240c69def72357bc11392571f87b2d7b91e0bb092"}, - {file = "grpcio-1.60.1-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:33aed0a431f5befeffd9d346b0fa44b2c01aa4aeae5ea5b2c03d3e25e0071216"}, - {file = "grpcio-1.60.1-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:fead980fbc68512dfd4e0c7b1f5754c2a8e5015a04dea454b9cada54a8423525"}, - {file = "grpcio-1.60.1-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:082081e6a36b6eb5cf0fd9a897fe777dbb3802176ffd08e3ec6567edd85bc104"}, - {file = "grpcio-1.60.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:55ccb7db5a665079d68b5c7c86359ebd5ebf31a19bc1a91c982fd622f1e31ff2"}, - {file = "grpcio-1.60.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:9b54577032d4f235452f77a83169b6527bf4b77d73aeada97d45b2aaf1bf5ce0"}, - {file = "grpcio-1.60.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:7d142bcd604166417929b071cd396aa13c565749a4c840d6c702727a59d835eb"}, - {file = "grpcio-1.60.1-cp310-cp310-win32.whl", hash = "sha256:2a6087f234cb570008a6041c8ffd1b7d657b397fdd6d26e83d72283dae3527b1"}, - {file = "grpcio-1.60.1-cp310-cp310-win_amd64.whl", hash = "sha256:f2212796593ad1d0235068c79836861f2201fc7137a99aa2fea7beeb3b101177"}, - {file = "grpcio-1.60.1-cp311-cp311-linux_armv7l.whl", hash = "sha256:79ae0dc785504cb1e1788758c588c711f4e4a0195d70dff53db203c95a0bd303"}, - {file = "grpcio-1.60.1-cp311-cp311-macosx_10_10_universal2.whl", hash = "sha256:4eec8b8c1c2c9b7125508ff7c89d5701bf933c99d3910e446ed531cd16ad5d87"}, - {file = "grpcio-1.60.1-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:8c9554ca8e26241dabe7951aa1fa03a1ba0856688ecd7e7bdbdd286ebc272e4c"}, - {file = "grpcio-1.60.1-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:91422ba785a8e7a18725b1dc40fbd88f08a5bb4c7f1b3e8739cab24b04fa8a03"}, - {file = "grpcio-1.60.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cba6209c96828711cb7c8fcb45ecef8c8859238baf15119daa1bef0f6c84bfe7"}, - {file = "grpcio-1.60.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c71be3f86d67d8d1311c6076a4ba3b75ba5703c0b856b4e691c9097f9b1e8bd2"}, - {file = "grpcio-1.60.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:af5ef6cfaf0d023c00002ba25d0751e5995fa0e4c9eec6cd263c30352662cbce"}, - {file = "grpcio-1.60.1-cp311-cp311-win32.whl", hash = "sha256:a09506eb48fa5493c58f946c46754ef22f3ec0df64f2b5149373ff31fb67f3dd"}, - {file = "grpcio-1.60.1-cp311-cp311-win_amd64.whl", hash = "sha256:49c9b6a510e3ed8df5f6f4f3c34d7fbf2d2cae048ee90a45cd7415abab72912c"}, - {file = "grpcio-1.60.1-cp312-cp312-linux_armv7l.whl", hash = "sha256:b58b855d0071575ea9c7bc0d84a06d2edfbfccec52e9657864386381a7ce1ae9"}, - {file = "grpcio-1.60.1-cp312-cp312-macosx_10_10_universal2.whl", hash = "sha256:a731ac5cffc34dac62053e0da90f0c0b8560396a19f69d9703e88240c8f05858"}, - {file = "grpcio-1.60.1-cp312-cp312-manylinux_2_17_aarch64.whl", hash = "sha256:cf77f8cf2a651fbd869fbdcb4a1931464189cd210abc4cfad357f1cacc8642a6"}, - {file = "grpcio-1.60.1-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c557e94e91a983e5b1e9c60076a8fd79fea1e7e06848eb2e48d0ccfb30f6e073"}, - {file = "grpcio-1.60.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:069fe2aeee02dfd2135d562d0663fe70fbb69d5eed6eb3389042a7e963b54de8"}, - {file = "grpcio-1.60.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:cb0af13433dbbd1c806e671d81ec75bd324af6ef75171fd7815ca3074fe32bfe"}, - {file = "grpcio-1.60.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:2f44c32aef186bbba254129cea1df08a20be414144ac3bdf0e84b24e3f3b2e05"}, - {file = "grpcio-1.60.1-cp312-cp312-win32.whl", hash = "sha256:a212e5dea1a4182e40cd3e4067ee46be9d10418092ce3627475e995cca95de21"}, - {file = "grpcio-1.60.1-cp312-cp312-win_amd64.whl", hash = "sha256:6e490fa5f7f5326222cb9f0b78f207a2b218a14edf39602e083d5f617354306f"}, - {file = "grpcio-1.60.1-cp37-cp37m-linux_armv7l.whl", hash = "sha256:4216e67ad9a4769117433814956031cb300f85edc855252a645a9a724b3b6594"}, - {file = "grpcio-1.60.1-cp37-cp37m-macosx_10_10_universal2.whl", hash = "sha256:73e14acd3d4247169955fae8fb103a2b900cfad21d0c35f0dcd0fdd54cd60367"}, - {file = "grpcio-1.60.1-cp37-cp37m-manylinux_2_17_aarch64.whl", hash = "sha256:6ecf21d20d02d1733e9c820fb5c114c749d888704a7ec824b545c12e78734d1c"}, - {file = "grpcio-1.60.1-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:33bdea30dcfd4f87b045d404388469eb48a48c33a6195a043d116ed1b9a0196c"}, - {file = "grpcio-1.60.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:53b69e79d00f78c81eecfb38f4516080dc7f36a198b6b37b928f1c13b3c063e9"}, - {file = "grpcio-1.60.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:39aa848794b887120b1d35b1b994e445cc028ff602ef267f87c38122c1add50d"}, - {file = "grpcio-1.60.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:72153a0d2e425f45b884540a61c6639436ddafa1829a42056aa5764b84108b8e"}, - {file = "grpcio-1.60.1-cp37-cp37m-win_amd64.whl", hash = "sha256:50d56280b482875d1f9128ce596e59031a226a8b84bec88cb2bf76c289f5d0de"}, - {file = "grpcio-1.60.1-cp38-cp38-linux_armv7l.whl", hash = "sha256:6d140bdeb26cad8b93c1455fa00573c05592793c32053d6e0016ce05ba267549"}, - {file = "grpcio-1.60.1-cp38-cp38-macosx_10_10_universal2.whl", hash = "sha256:bc808924470643b82b14fe121923c30ec211d8c693e747eba8a7414bc4351a23"}, - {file = "grpcio-1.60.1-cp38-cp38-manylinux_2_17_aarch64.whl", hash = "sha256:70c83bb530572917be20c21f3b6be92cd86b9aecb44b0c18b1d3b2cc3ae47df0"}, - {file = "grpcio-1.60.1-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9b106bc52e7f28170e624ba61cc7dc6829566e535a6ec68528f8e1afbed1c41f"}, - {file = "grpcio-1.60.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:30e980cd6db1088c144b92fe376747328d5554bc7960ce583ec7b7d81cd47287"}, - {file = "grpcio-1.60.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:0c5807e9152eff15f1d48f6b9ad3749196f79a4a050469d99eecb679be592acc"}, - {file = "grpcio-1.60.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:f1c3dc536b3ee124e8b24feb7533e5c70b9f2ef833e3b2e5513b2897fd46763a"}, - {file = "grpcio-1.60.1-cp38-cp38-win32.whl", hash = "sha256:d7404cebcdb11bb5bd40bf94131faf7e9a7c10a6c60358580fe83913f360f929"}, - {file = "grpcio-1.60.1-cp38-cp38-win_amd64.whl", hash = "sha256:c8754c75f55781515a3005063d9a05878b2cfb3cb7e41d5401ad0cf19de14872"}, - {file = "grpcio-1.60.1-cp39-cp39-linux_armv7l.whl", hash = "sha256:0250a7a70b14000fa311de04b169cc7480be6c1a769b190769d347939d3232a8"}, - {file = "grpcio-1.60.1-cp39-cp39-macosx_10_10_universal2.whl", hash = "sha256:660fc6b9c2a9ea3bb2a7e64ba878c98339abaf1811edca904ac85e9e662f1d73"}, - {file = "grpcio-1.60.1-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:76eaaba891083fcbe167aa0f03363311a9f12da975b025d30e94b93ac7a765fc"}, - {file = "grpcio-1.60.1-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e5d97c65ea7e097056f3d1ead77040ebc236feaf7f71489383d20f3b4c28412a"}, - {file = "grpcio-1.60.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bb2a2911b028f01c8c64d126f6b632fcd8a9ac975aa1b3855766c94e4107180"}, - {file = "grpcio-1.60.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:5a1ebbae7e2214f51b1f23b57bf98eeed2cf1ba84e4d523c48c36d5b2f8829ff"}, - {file = "grpcio-1.60.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:9a66f4d2a005bc78e61d805ed95dedfcb35efa84b7bba0403c6d60d13a3de2d6"}, - {file = "grpcio-1.60.1-cp39-cp39-win32.whl", hash = "sha256:8d488fbdbf04283f0d20742b64968d44825617aa6717b07c006168ed16488804"}, - {file = "grpcio-1.60.1-cp39-cp39-win_amd64.whl", hash = "sha256:61b7199cd2a55e62e45bfb629a35b71fc2c0cb88f686a047f25b1112d3810904"}, - {file = "grpcio-1.60.1.tar.gz", hash = "sha256:dd1d3a8d1d2e50ad9b59e10aa7f07c7d1be2b367f3f2d33c5fade96ed5460962"}, -] - -[package.extras] -protobuf = ["grpcio-tools (>=1.60.1)"] + {file = "grpcio-1.71.0-cp310-cp310-linux_armv7l.whl", hash = "sha256:c200cb6f2393468142eb50ab19613229dcc7829b5ccee8b658a36005f6669fdd"}, + {file = "grpcio-1.71.0-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:b2266862c5ad664a380fbbcdbdb8289d71464c42a8c29053820ee78ba0119e5d"}, + {file = "grpcio-1.71.0-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:0ab8b2864396663a5b0b0d6d79495657ae85fa37dcb6498a2669d067c65c11ea"}, + {file = "grpcio-1.71.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c30f393f9d5ff00a71bb56de4aa75b8fe91b161aeb61d39528db6b768d7eac69"}, + {file = "grpcio-1.71.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f250ff44843d9a0615e350c77f890082102a0318d66a99540f54769c8766ab73"}, + {file = "grpcio-1.71.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e6d8de076528f7c43a2f576bc311799f89d795aa6c9b637377cc2b1616473804"}, + {file = "grpcio-1.71.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:9b91879d6da1605811ebc60d21ab6a7e4bae6c35f6b63a061d61eb818c8168f6"}, + {file = "grpcio-1.71.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f71574afdf944e6652203cd1badcda195b2a27d9c83e6d88dc1ce3cfb73b31a5"}, + {file = "grpcio-1.71.0-cp310-cp310-win32.whl", hash = "sha256:8997d6785e93308f277884ee6899ba63baafa0dfb4729748200fcc537858a509"}, + {file = "grpcio-1.71.0-cp310-cp310-win_amd64.whl", hash = "sha256:7d6ac9481d9d0d129224f6d5934d5832c4b1cddb96b59e7eba8416868909786a"}, + {file = "grpcio-1.71.0-cp311-cp311-linux_armv7l.whl", hash = "sha256:d6aa986318c36508dc1d5001a3ff169a15b99b9f96ef5e98e13522c506b37eef"}, + {file = "grpcio-1.71.0-cp311-cp311-macosx_10_14_universal2.whl", hash = "sha256:d2c170247315f2d7e5798a22358e982ad6eeb68fa20cf7a820bb74c11f0736e7"}, + {file = "grpcio-1.71.0-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:e6f83a583ed0a5b08c5bc7a3fe860bb3c2eac1f03f1f63e0bc2091325605d2b7"}, + {file = "grpcio-1.71.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4be74ddeeb92cc87190e0e376dbc8fc7736dbb6d3d454f2fa1f5be1dee26b9d7"}, + {file = "grpcio-1.71.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4dd0dfbe4d5eb1fcfec9490ca13f82b089a309dc3678e2edabc144051270a66e"}, + {file = "grpcio-1.71.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a2242d6950dc892afdf9e951ed7ff89473aaf744b7d5727ad56bdaace363722b"}, + {file = "grpcio-1.71.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:0fa05ee31a20456b13ae49ad2e5d585265f71dd19fbd9ef983c28f926d45d0a7"}, + {file = "grpcio-1.71.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3d081e859fb1ebe176de33fc3adb26c7d46b8812f906042705346b314bde32c3"}, + {file = "grpcio-1.71.0-cp311-cp311-win32.whl", hash = "sha256:d6de81c9c00c8a23047136b11794b3584cdc1460ed7cbc10eada50614baa1444"}, + {file = "grpcio-1.71.0-cp311-cp311-win_amd64.whl", hash = "sha256:24e867651fc67717b6f896d5f0cac0ec863a8b5fb7d6441c2ab428f52c651c6b"}, + {file = "grpcio-1.71.0-cp312-cp312-linux_armv7l.whl", hash = "sha256:0ff35c8d807c1c7531d3002be03221ff9ae15712b53ab46e2a0b4bb271f38537"}, + {file = "grpcio-1.71.0-cp312-cp312-macosx_10_14_universal2.whl", hash = "sha256:b78a99cd1ece4be92ab7c07765a0b038194ded2e0a26fd654591ee136088d8d7"}, + {file = "grpcio-1.71.0-cp312-cp312-manylinux_2_17_aarch64.whl", hash = "sha256:dc1a1231ed23caac1de9f943d031f1bc38d0f69d2a3b243ea0d664fc1fbd7fec"}, + {file = "grpcio-1.71.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e6beeea5566092c5e3c4896c6d1d307fb46b1d4bdf3e70c8340b190a69198594"}, + {file = "grpcio-1.71.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d5170929109450a2c031cfe87d6716f2fae39695ad5335d9106ae88cc32dc84c"}, + {file = "grpcio-1.71.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:5b08d03ace7aca7b2fadd4baf291139b4a5f058805a8327bfe9aece7253b6d67"}, + {file = "grpcio-1.71.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:f903017db76bf9cc2b2d8bdd37bf04b505bbccad6be8a81e1542206875d0e9db"}, + {file = "grpcio-1.71.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:469f42a0b410883185eab4689060a20488a1a0a00f8bbb3cbc1061197b4c5a79"}, + {file = "grpcio-1.71.0-cp312-cp312-win32.whl", hash = "sha256:ad9f30838550695b5eb302add33f21f7301b882937460dd24f24b3cc5a95067a"}, + {file = "grpcio-1.71.0-cp312-cp312-win_amd64.whl", hash = "sha256:652350609332de6dac4ece254e5d7e1ff834e203d6afb769601f286886f6f3a8"}, + {file = "grpcio-1.71.0-cp313-cp313-linux_armv7l.whl", hash = "sha256:cebc1b34ba40a312ab480ccdb396ff3c529377a2fce72c45a741f7215bfe8379"}, + {file = "grpcio-1.71.0-cp313-cp313-macosx_10_14_universal2.whl", hash = "sha256:85da336e3649a3d2171e82f696b5cad2c6231fdd5bad52616476235681bee5b3"}, + {file = "grpcio-1.71.0-cp313-cp313-manylinux_2_17_aarch64.whl", hash = "sha256:f9a412f55bb6e8f3bb000e020dbc1e709627dcb3a56f6431fa7076b4c1aab0db"}, + {file = "grpcio-1.71.0-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:47be9584729534660416f6d2a3108aaeac1122f6b5bdbf9fd823e11fe6fbaa29"}, + {file = "grpcio-1.71.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7c9c80ac6091c916db81131d50926a93ab162a7e97e4428ffc186b6e80d6dda4"}, + {file = "grpcio-1.71.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:789d5e2a3a15419374b7b45cd680b1e83bbc1e52b9086e49308e2c0b5bbae6e3"}, + {file = "grpcio-1.71.0-cp313-cp313-musllinux_1_1_i686.whl", hash = "sha256:1be857615e26a86d7363e8a163fade914595c81fec962b3d514a4b1e8760467b"}, + {file = "grpcio-1.71.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:a76d39b5fafd79ed604c4be0a869ec3581a172a707e2a8d7a4858cb05a5a7637"}, + {file = "grpcio-1.71.0-cp313-cp313-win32.whl", hash = "sha256:74258dce215cb1995083daa17b379a1a5a87d275387b7ffe137f1d5131e2cfbb"}, + {file = "grpcio-1.71.0-cp313-cp313-win_amd64.whl", hash = "sha256:22c3bc8d488c039a199f7a003a38cb7635db6656fa96437a8accde8322ce2366"}, + {file = "grpcio-1.71.0-cp39-cp39-linux_armv7l.whl", hash = "sha256:c6a0a28450c16809f94e0b5bfe52cabff63e7e4b97b44123ebf77f448534d07d"}, + {file = "grpcio-1.71.0-cp39-cp39-macosx_10_14_universal2.whl", hash = "sha256:a371e6b6a5379d3692cc4ea1cb92754d2a47bdddeee755d3203d1f84ae08e03e"}, + {file = "grpcio-1.71.0-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:39983a9245d37394fd59de71e88c4b295eb510a3555e0a847d9965088cdbd033"}, + {file = "grpcio-1.71.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9182e0063112e55e74ee7584769ec5a0b4f18252c35787f48738627e23a62b97"}, + {file = "grpcio-1.71.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:693bc706c031aeb848849b9d1c6b63ae6bcc64057984bb91a542332b75aa4c3d"}, + {file = "grpcio-1.71.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:20e8f653abd5ec606be69540f57289274c9ca503ed38388481e98fa396ed0b41"}, + {file = "grpcio-1.71.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:8700a2a57771cc43ea295296330daaddc0d93c088f0a35cc969292b6db959bf3"}, + {file = "grpcio-1.71.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d35a95f05a8a2cbe8e02be137740138b3b2ea5f80bd004444e4f9a1ffc511e32"}, + {file = "grpcio-1.71.0-cp39-cp39-win32.whl", hash = "sha256:f9c30c464cb2ddfbc2ddf9400287701270fdc0f14be5f08a1e3939f1e749b455"}, + {file = "grpcio-1.71.0-cp39-cp39-win_amd64.whl", hash = "sha256:63e41b91032f298b3e973b3fa4093cbbc620c875e2da7b93e249d4728b54559a"}, + {file = "grpcio-1.71.0.tar.gz", hash = "sha256:2b85f7820475ad3edec209d3d89a7909ada16caab05d3f2e08a7e8ae3200a55c"}, +] + +[package.extras] +protobuf = ["grpcio-tools (>=1.71.0)"] [[package]] name = "grpcio-status" @@ -1175,6 +1809,62 @@ gevent = ["gevent (>=1.4.0)"] setproctitle = ["setproctitle"] tornado = ["tornado (>=0.2)"] +[[package]] +name = "h11" +version = "0.16.0" +description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "h11-0.16.0-py3-none-any.whl", hash = "sha256:63cf8bbe7522de3bf65932fda1d9c2772064ffb3dae62d55932da54b31cb6c86"}, + {file = "h11-0.16.0.tar.gz", hash = "sha256:4e35b956cf45792e4caa5885e69fba00bdbc6ffafbfa020300e549b208ee5ff1"}, +] + +[[package]] +name = "hf-xet" +version = "1.1.0" +description = "" +optional = false +python-versions = ">=3.8" +groups = ["main"] +markers = "platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"arm64\" or platform_machine == \"aarch64\"" +files = [ + {file = "hf_xet-1.1.0-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:0322c42551e275fcb7949c083a54a81b2898e50787c9aa74284fcb8d2c58c12c"}, + {file = "hf_xet-1.1.0-cp37-abi3-macosx_11_0_arm64.whl", hash = "sha256:667153a0304ac2debf2af95a8ff7687186f885b493f4cd16344869af270cd110"}, + {file = "hf_xet-1.1.0-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:995eeffb119636ea617b96c7d7bf3c3f5ea8727fa57974574e25d700b8532d48"}, + {file = "hf_xet-1.1.0-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:3aee847da362393331f515c4010d0aaa1c2669acfcca1f4b28946d6949cc0086"}, + {file = "hf_xet-1.1.0-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:68c5813a6074aa36e12ef5983230e3b03148cce61e0fcdd294096493795565b4"}, + {file = "hf_xet-1.1.0-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:4ee9222bf9274b1c198b88a929de0b5a49349c4962d89c5b3b2f0f7f47d9761c"}, + {file = "hf_xet-1.1.0-cp37-abi3-win_amd64.whl", hash = "sha256:73153eab9abf3d6973b21e94a67ccba5d595c3e12feb8c0bf50be02964e7f126"}, + {file = "hf_xet-1.1.0.tar.gz", hash = "sha256:a7c2a4c2b6eee9ce0a1a367a82b60d95ba634420ef1c250addad7aa4af419cf4"}, +] + +[package.extras] +tests = ["pytest"] + +[[package]] +name = "httpcore" +version = "1.0.9" +description = "A minimal low-level HTTP client." +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "httpcore-1.0.9-py3-none-any.whl", hash = "sha256:2d400746a40668fc9dec9810239072b40b4484b640a8c38fd654a024c7a1bf55"}, + {file = "httpcore-1.0.9.tar.gz", hash = "sha256:6e34463af53fd2ab5d807f399a9b45ea31c3dfa2276f15a2c3f00afff6e176e8"}, +] + +[package.dependencies] +certifi = "*" +h11 = ">=0.16" + +[package.extras] +asyncio = ["anyio (>=4.0,<5.0)"] +http2 = ["h2 (>=3,<5)"] +socks = ["socksio (==1.*)"] +trio = ["trio (>=0.22.0,<1.0)"] + [[package]] name = "httplib2" version = "0.22.0" @@ -1191,52 +1881,237 @@ files = [ pyparsing = {version = ">=2.4.2,<3.0.0 || >3.0.0,<3.0.1 || >3.0.1,<3.0.2 || >3.0.2,<3.0.3 || >3.0.3,<4", markers = "python_version > \"3.0\""} [[package]] -name = "huey" -version = "2.5.0" -description = "huey, a little task queue" +name = "httptools" +version = "0.6.4" +description = "A collection of framework independent HTTP protocol utils." optional = false -python-versions = "*" +python-versions = ">=3.8.0" groups = ["main"] files = [ - {file = "huey-2.5.0.tar.gz", hash = "sha256:2ffb52fb5c46a1b0d53c79d59df3622312b27e2ab68d81a580985a8ea4ca3480"}, + {file = "httptools-0.6.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:3c73ce323711a6ffb0d247dcd5a550b8babf0f757e86a52558fe5b86d6fefcc0"}, + {file = "httptools-0.6.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:345c288418f0944a6fe67be8e6afa9262b18c7626c3ef3c28adc5eabc06a68da"}, + {file = "httptools-0.6.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:deee0e3343f98ee8047e9f4c5bc7cedbf69f5734454a94c38ee829fb2d5fa3c1"}, + {file = "httptools-0.6.4-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ca80b7485c76f768a3bc83ea58373f8db7b015551117375e4918e2aa77ea9b50"}, + {file = "httptools-0.6.4-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:90d96a385fa941283ebd231464045187a31ad932ebfa541be8edf5b3c2328959"}, + {file = "httptools-0.6.4-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:59e724f8b332319e2875efd360e61ac07f33b492889284a3e05e6d13746876f4"}, + {file = "httptools-0.6.4-cp310-cp310-win_amd64.whl", hash = "sha256:c26f313951f6e26147833fc923f78f95604bbec812a43e5ee37f26dc9e5a686c"}, + {file = "httptools-0.6.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:f47f8ed67cc0ff862b84a1189831d1d33c963fb3ce1ee0c65d3b0cbe7b711069"}, + {file = "httptools-0.6.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:0614154d5454c21b6410fdf5262b4a3ddb0f53f1e1721cfd59d55f32138c578a"}, + {file = "httptools-0.6.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f8787367fbdfccae38e35abf7641dafc5310310a5987b689f4c32cc8cc3ee975"}, + {file = "httptools-0.6.4-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:40b0f7fe4fd38e6a507bdb751db0379df1e99120c65fbdc8ee6c1d044897a636"}, + {file = "httptools-0.6.4-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:40a5ec98d3f49904b9fe36827dcf1aadfef3b89e2bd05b0e35e94f97c2b14721"}, + {file = "httptools-0.6.4-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:dacdd3d10ea1b4ca9df97a0a303cbacafc04b5cd375fa98732678151643d4988"}, + {file = "httptools-0.6.4-cp311-cp311-win_amd64.whl", hash = "sha256:288cd628406cc53f9a541cfaf06041b4c71d751856bab45e3702191f931ccd17"}, + {file = "httptools-0.6.4-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:df017d6c780287d5c80601dafa31f17bddb170232d85c066604d8558683711a2"}, + {file = "httptools-0.6.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:85071a1e8c2d051b507161f6c3e26155b5c790e4e28d7f236422dbacc2a9cc44"}, + {file = "httptools-0.6.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69422b7f458c5af875922cdb5bd586cc1f1033295aa9ff63ee196a87519ac8e1"}, + {file = "httptools-0.6.4-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:16e603a3bff50db08cd578d54f07032ca1631450ceb972c2f834c2b860c28ea2"}, + {file = "httptools-0.6.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:ec4f178901fa1834d4a060320d2f3abc5c9e39766953d038f1458cb885f47e81"}, + {file = "httptools-0.6.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:f9eb89ecf8b290f2e293325c646a211ff1c2493222798bb80a530c5e7502494f"}, + {file = "httptools-0.6.4-cp312-cp312-win_amd64.whl", hash = "sha256:db78cb9ca56b59b016e64b6031eda5653be0589dba2b1b43453f6e8b405a0970"}, + {file = "httptools-0.6.4-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ade273d7e767d5fae13fa637f4d53b6e961fb7fd93c7797562663f0171c26660"}, + {file = "httptools-0.6.4-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:856f4bc0478ae143bad54a4242fccb1f3f86a6e1be5548fecfd4102061b3a083"}, + {file = "httptools-0.6.4-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:322d20ea9cdd1fa98bd6a74b77e2ec5b818abdc3d36695ab402a0de8ef2865a3"}, + {file = "httptools-0.6.4-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4d87b29bd4486c0093fc64dea80231f7c7f7eb4dc70ae394d70a495ab8436071"}, + {file = "httptools-0.6.4-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:342dd6946aa6bda4b8f18c734576106b8a31f2fe31492881a9a160ec84ff4bd5"}, + {file = "httptools-0.6.4-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4b36913ba52008249223042dca46e69967985fb4051951f94357ea681e1f5dc0"}, + {file = "httptools-0.6.4-cp313-cp313-win_amd64.whl", hash = "sha256:28908df1b9bb8187393d5b5db91435ccc9c8e891657f9cbb42a2541b44c82fc8"}, + {file = "httptools-0.6.4-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:d3f0d369e7ffbe59c4b6116a44d6a8eb4783aae027f2c0b366cf0aa964185dba"}, + {file = "httptools-0.6.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:94978a49b8f4569ad607cd4946b759d90b285e39c0d4640c6b36ca7a3ddf2efc"}, + {file = "httptools-0.6.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40dc6a8e399e15ea525305a2ddba998b0af5caa2566bcd79dcbe8948181eeaff"}, + {file = "httptools-0.6.4-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ab9ba8dcf59de5181f6be44a77458e45a578fc99c31510b8c65b7d5acc3cf490"}, + {file = "httptools-0.6.4-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:fc411e1c0a7dcd2f902c7c48cf079947a7e65b5485dea9decb82b9105ca71a43"}, + {file = "httptools-0.6.4-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:d54efd20338ac52ba31e7da78e4a72570cf729fac82bc31ff9199bedf1dc7440"}, + {file = "httptools-0.6.4-cp38-cp38-win_amd64.whl", hash = "sha256:df959752a0c2748a65ab5387d08287abf6779ae9165916fe053e68ae1fbdc47f"}, + {file = "httptools-0.6.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:85797e37e8eeaa5439d33e556662cc370e474445d5fab24dcadc65a8ffb04003"}, + {file = "httptools-0.6.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:db353d22843cf1028f43c3651581e4bb49374d85692a85f95f7b9a130e1b2cab"}, + {file = "httptools-0.6.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d1ffd262a73d7c28424252381a5b854c19d9de5f56f075445d33919a637e3547"}, + {file = "httptools-0.6.4-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:703c346571fa50d2e9856a37d7cd9435a25e7fd15e236c397bf224afaa355fe9"}, + {file = "httptools-0.6.4-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:aafe0f1918ed07b67c1e838f950b1c1fabc683030477e60b335649b8020e1076"}, + {file = "httptools-0.6.4-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:0e563e54979e97b6d13f1bbc05a96109923e76b901f786a5eae36e99c01237bd"}, + {file = "httptools-0.6.4-cp39-cp39-win_amd64.whl", hash = "sha256:b799de31416ecc589ad79dd85a0b2657a8fe39327944998dea368c1d4c9e55e6"}, + {file = "httptools-0.6.4.tar.gz", hash = "sha256:4e93eee4add6493b59a5c514da98c939b244fce4a0d8879cd3f466562f4b7d5c"}, ] [package.extras] -backends = ["redis (>=3.0.0)"] -redis = ["redis (>=3.0.0)"] +test = ["Cython (>=0.29.24)"] [[package]] -name = "identify" -version = "2.5.34" -description = "File identification library for Python" +name = "httpx" +version = "0.28.1" +description = "The next generation HTTP client." optional = false python-versions = ">=3.8" -groups = ["dev"] +groups = ["main"] files = [ - {file = "identify-2.5.34-py2.py3-none-any.whl", hash = "sha256:a4316013779e433d08b96e5eabb7f641e6c7942e4ab5d4c509ebd2e7a8994aed"}, - {file = "identify-2.5.34.tar.gz", hash = "sha256:ee17bc9d499899bc9eaec1ac7bf2dc9eedd480db9d88b96d123d3b64a9d34f5d"}, + {file = "httpx-0.28.1-py3-none-any.whl", hash = "sha256:d909fcccc110f8c7faf814ca82a9a4d816bc5a6dbfea25d6591d6985b8ba59ad"}, + {file = "httpx-0.28.1.tar.gz", hash = "sha256:75e98c5f16b0f35b567856f597f06ff2270a374470a5c2392242528e3e3e42fc"}, ] +[package.dependencies] +anyio = "*" +certifi = "*" +httpcore = "==1.*" +idna = "*" + [package.extras] -license = ["ukkonen"] +brotli = ["brotli ; platform_python_implementation == \"CPython\"", "brotlicffi ; platform_python_implementation != \"CPython\""] +cli = ["click (==8.*)", "pygments (==2.*)", "rich (>=10,<14)"] +http2 = ["h2 (>=3,<5)"] +socks = ["socksio (==1.*)"] +zstd = ["zstandard (>=0.18.0)"] [[package]] -name = "idna" -version = "3.6" -description = "Internationalized Domain Names in Applications (IDNA)" +name = "httpx-sse" +version = "0.4.0" +description = "Consume Server-Sent Event (SSE) messages with HTTPX." optional = false -python-versions = ">=3.5" +python-versions = ">=3.8" groups = ["main"] files = [ - {file = "idna-3.6-py3-none-any.whl", hash = "sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f"}, - {file = "idna-3.6.tar.gz", hash = "sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca"}, + {file = "httpx-sse-0.4.0.tar.gz", hash = "sha256:1e81a3a3070ce322add1d3529ed42eb5f70817f45ed6ec915ab753f961139721"}, + {file = "httpx_sse-0.4.0-py3-none-any.whl", hash = "sha256:f329af6eae57eaa2bdfd962b42524764af68075ea87370a2de920af5341e318f"}, ] [[package]] -name = "iniconfig" -version = "2.0.0" -description = "brain-dead simple config-ini parsing" -optional = false +name = "huey" +version = "2.5.0" +description = "huey, a little task queue" +optional = false +python-versions = "*" +groups = ["main"] +files = [ + {file = "huey-2.5.0.tar.gz", hash = "sha256:2ffb52fb5c46a1b0d53c79d59df3622312b27e2ab68d81a580985a8ea4ca3480"}, +] + +[package.extras] +backends = ["redis (>=3.0.0)"] +redis = ["redis (>=3.0.0)"] + +[[package]] +name = "huggingface-hub" +version = "0.31.1" +description = "Client library to download and publish models, datasets and other repos on the huggingface.co hub" +optional = false +python-versions = ">=3.8.0" +groups = ["main"] +files = [ + {file = "huggingface_hub-0.31.1-py3-none-any.whl", hash = "sha256:43f73124819b48b42d140cbc0d7a2e6bd15b2853b1b9d728d4d55ad1750cac5b"}, + {file = "huggingface_hub-0.31.1.tar.gz", hash = "sha256:492bb5f545337aa9e2f59b75ef4c5f535a371e8958a6ce90af056387e67f1180"}, +] + +[package.dependencies] +filelock = "*" +fsspec = ">=2023.5.0" +hf-xet = {version = ">=1.1.0,<2.0.0", markers = "platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"arm64\" or platform_machine == \"aarch64\""} +packaging = ">=20.9" +pyyaml = ">=5.1" +requests = "*" +tqdm = ">=4.42.1" +typing-extensions = ">=3.7.4.3" + +[package.extras] +all = ["InquirerPy (==0.3.4)", "Jinja2", "Pillow", "aiohttp", "fastapi", "gradio (>=4.0.0)", "jedi", "libcst (==1.4.0)", "mypy (==1.5.1)", "numpy", "pytest (>=8.1.1,<8.2.2)", "pytest-asyncio", "pytest-cov", "pytest-env", "pytest-mock", "pytest-rerunfailures", "pytest-vcr", "pytest-xdist", "ruff (>=0.9.0)", "soundfile", "types-PyYAML", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3", "typing-extensions (>=4.8.0)", "urllib3 (<2.0)"] +cli = ["InquirerPy (==0.3.4)"] +dev = ["InquirerPy (==0.3.4)", "Jinja2", "Pillow", "aiohttp", "fastapi", "gradio (>=4.0.0)", "jedi", "libcst (==1.4.0)", "mypy (==1.5.1)", "numpy", "pytest (>=8.1.1,<8.2.2)", "pytest-asyncio", "pytest-cov", "pytest-env", "pytest-mock", "pytest-rerunfailures", "pytest-vcr", "pytest-xdist", "ruff (>=0.9.0)", "soundfile", "types-PyYAML", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3", "typing-extensions (>=4.8.0)", "urllib3 (<2.0)"] +fastai = ["fastai (>=2.4)", "fastcore (>=1.3.27)", "toml"] +hf-transfer = ["hf-transfer (>=0.1.4)"] +hf-xet = ["hf-xet (>=1.1.0,<2.0.0)"] +inference = ["aiohttp"] +quality = ["libcst (==1.4.0)", "mypy (==1.5.1)", "ruff (>=0.9.0)"] +tensorflow = ["graphviz", "pydot", "tensorflow"] +tensorflow-testing = ["keras (<3.0)", "tensorflow"] +testing = ["InquirerPy (==0.3.4)", "Jinja2", "Pillow", "aiohttp", "fastapi", "gradio (>=4.0.0)", "jedi", "numpy", "pytest (>=8.1.1,<8.2.2)", "pytest-asyncio", "pytest-cov", "pytest-env", "pytest-mock", "pytest-rerunfailures", "pytest-vcr", "pytest-xdist", "soundfile", "urllib3 (<2.0)"] +torch = ["safetensors[torch]", "torch"] +typing = ["types-PyYAML", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3", "typing-extensions (>=4.8.0)"] + +[[package]] +name = "humanfriendly" +version = "10.0" +description = "Human friendly output for text interfaces using Python" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +groups = ["main"] +files = [ + {file = "humanfriendly-10.0-py2.py3-none-any.whl", hash = "sha256:1697e1a8a8f550fd43c2865cd84542fc175a61dcb779b6fee18cf6b6ccba1477"}, + {file = "humanfriendly-10.0.tar.gz", hash = "sha256:6b0b831ce8f15f7300721aa49829fc4e83921a9a301cc7f606be6686a2288ddc"}, +] + +[package.dependencies] +pyreadline3 = {version = "*", markers = "sys_platform == \"win32\" and python_version >= \"3.8\""} + +[[package]] +name = "identify" +version = "2.5.34" +description = "File identification library for Python" +optional = false +python-versions = ">=3.8" +groups = ["dev"] +files = [ + {file = "identify-2.5.34-py2.py3-none-any.whl", hash = "sha256:a4316013779e433d08b96e5eabb7f641e6c7942e4ab5d4c509ebd2e7a8994aed"}, + {file = "identify-2.5.34.tar.gz", hash = "sha256:ee17bc9d499899bc9eaec1ac7bf2dc9eedd480db9d88b96d123d3b64a9d34f5d"}, +] + +[package.extras] +license = ["ukkonen"] + +[[package]] +name = "idna" +version = "3.6" +description = "Internationalized Domain Names in Applications (IDNA)" +optional = false +python-versions = ">=3.5" +groups = ["main"] +files = [ + {file = "idna-3.6-py3-none-any.whl", hash = "sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f"}, + {file = "idna-3.6.tar.gz", hash = "sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca"}, +] + +[[package]] +name = "importlib-metadata" +version = "8.4.0" +description = "Read metadata from Python packages" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "importlib_metadata-8.4.0-py3-none-any.whl", hash = "sha256:66f342cc6ac9818fc6ff340576acd24d65ba0b3efabb2b4ac08b598965a4a2f1"}, + {file = "importlib_metadata-8.4.0.tar.gz", hash = "sha256:9a547d3bc3608b025f93d403fdd1aae741c24fbb8314df4b155675742ce303c5"}, +] + +[package.dependencies] +zipp = ">=0.5" + +[package.extras] +doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +perf = ["ipython"] +test = ["flufl.flake8", "importlib-resources (>=1.3) ; python_version < \"3.9\"", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy", "pytest-perf (>=0.9.2)", "pytest-ruff (>=0.2.1) ; sys_platform != \"cygwin\""] + +[[package]] +name = "importlib-resources" +version = "6.5.2" +description = "Read resources from Python packages" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "importlib_resources-6.5.2-py3-none-any.whl", hash = "sha256:789cfdc3ed28c78b67a06acb8126751ced69a3d5f79c095a98298cd8a760ccec"}, + {file = "importlib_resources-6.5.2.tar.gz", hash = "sha256:185f87adef5bcc288449d98fb4fba07cea78bc036455dd44c5fc4a2fe78fed2c"}, +] + +[package.extras] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1) ; sys_platform != \"cygwin\""] +cover = ["pytest-cov"] +doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +enabler = ["pytest-enabler (>=2.2)"] +test = ["jaraco.test (>=5.4)", "pytest (>=6,!=8.1.*)", "zipp (>=3.17)"] +type = ["pytest-mypy"] + +[[package]] +name = "iniconfig" +version = "2.0.0" +description = "brain-dead simple config-ini parsing" +optional = false python-versions = ">=3.7" groups = ["test"] files = [ @@ -1244,6 +2119,400 @@ files = [ {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, ] +[[package]] +name = "jiter" +version = "0.9.0" +description = "Fast iterable JSON parser." +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "jiter-0.9.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:816ec9b60fdfd1fec87da1d7ed46c66c44ffec37ab2ef7de5b147b2fce3fd5ad"}, + {file = "jiter-0.9.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9b1d3086f8a3ee0194ecf2008cf81286a5c3e540d977fa038ff23576c023c0ea"}, + {file = "jiter-0.9.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1339f839b91ae30b37c409bf16ccd3dc453e8b8c3ed4bd1d6a567193651a4a51"}, + {file = "jiter-0.9.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ffba79584b3b670fefae66ceb3a28822365d25b7bf811e030609a3d5b876f538"}, + {file = "jiter-0.9.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5cfc7d0a8e899089d11f065e289cb5b2daf3d82fbe028f49b20d7b809193958d"}, + {file = "jiter-0.9.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e00a1a2bbfaaf237e13c3d1592356eab3e9015d7efd59359ac8b51eb56390a12"}, + {file = "jiter-0.9.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d1d9870561eb26b11448854dce0ff27a9a27cb616b632468cafc938de25e9e51"}, + {file = "jiter-0.9.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9872aeff3f21e437651df378cb75aeb7043e5297261222b6441a620218b58708"}, + {file = "jiter-0.9.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:1fd19112d1049bdd47f17bfbb44a2c0001061312dcf0e72765bfa8abd4aa30e5"}, + {file = "jiter-0.9.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:6ef5da104664e526836070e4a23b5f68dec1cc673b60bf1edb1bfbe8a55d0678"}, + {file = "jiter-0.9.0-cp310-cp310-win32.whl", hash = "sha256:cb12e6d65ebbefe5518de819f3eda53b73187b7089040b2d17f5b39001ff31c4"}, + {file = "jiter-0.9.0-cp310-cp310-win_amd64.whl", hash = "sha256:c43ca669493626d8672be3b645dbb406ef25af3f4b6384cfd306da7eb2e70322"}, + {file = "jiter-0.9.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:6c4d99c71508912a7e556d631768dcdef43648a93660670986916b297f1c54af"}, + {file = "jiter-0.9.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8f60fb8ce7df529812bf6c625635a19d27f30806885139e367af93f6e734ef58"}, + {file = "jiter-0.9.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:51c4e1a4f8ea84d98b7b98912aa4290ac3d1eabfde8e3c34541fae30e9d1f08b"}, + {file = "jiter-0.9.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5f4c677c424dc76684fea3e7285a7a2a7493424bea89ac441045e6a1fb1d7b3b"}, + {file = "jiter-0.9.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2221176dfec87f3470b21e6abca056e6b04ce9bff72315cb0b243ca9e835a4b5"}, + {file = "jiter-0.9.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3c7adb66f899ffa25e3c92bfcb593391ee1947dbdd6a9a970e0d7e713237d572"}, + {file = "jiter-0.9.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c98d27330fdfb77913c1097a7aab07f38ff2259048949f499c9901700789ac15"}, + {file = "jiter-0.9.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:eda3f8cc74df66892b1d06b5d41a71670c22d95a1ca2cbab73654745ce9d0419"}, + {file = "jiter-0.9.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:dd5ab5ddc11418dce28343123644a100f487eaccf1de27a459ab36d6cca31043"}, + {file = "jiter-0.9.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:42f8a68a69f047b310319ef8e2f52fdb2e7976fb3313ef27df495cf77bcad965"}, + {file = "jiter-0.9.0-cp311-cp311-win32.whl", hash = "sha256:a25519efb78a42254d59326ee417d6f5161b06f5da827d94cf521fed961b1ff2"}, + {file = "jiter-0.9.0-cp311-cp311-win_amd64.whl", hash = "sha256:923b54afdd697dfd00d368b7ccad008cccfeb1efb4e621f32860c75e9f25edbd"}, + {file = "jiter-0.9.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:7b46249cfd6c48da28f89eb0be3f52d6fdb40ab88e2c66804f546674e539ec11"}, + {file = "jiter-0.9.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:609cf3c78852f1189894383cf0b0b977665f54cb38788e3e6b941fa6d982c00e"}, + {file = "jiter-0.9.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d726a3890a54561e55a9c5faea1f7655eda7f105bd165067575ace6e65f80bb2"}, + {file = "jiter-0.9.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2e89dc075c1fef8fa9be219e249f14040270dbc507df4215c324a1839522ea75"}, + {file = "jiter-0.9.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:04e8ffa3c353b1bc4134f96f167a2082494351e42888dfcf06e944f2729cbe1d"}, + {file = "jiter-0.9.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:203f28a72a05ae0e129b3ed1f75f56bc419d5f91dfacd057519a8bd137b00c42"}, + {file = "jiter-0.9.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fca1a02ad60ec30bb230f65bc01f611c8608b02d269f998bc29cca8619a919dc"}, + {file = "jiter-0.9.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:237e5cee4d5d2659aaf91bbf8ec45052cc217d9446070699441a91b386ae27dc"}, + {file = "jiter-0.9.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:528b6b71745e7326eed73c53d4aa57e2a522242320b6f7d65b9c5af83cf49b6e"}, + {file = "jiter-0.9.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:9f48e86b57bc711eb5acdfd12b6cb580a59cc9a993f6e7dcb6d8b50522dcd50d"}, + {file = "jiter-0.9.0-cp312-cp312-win32.whl", hash = "sha256:699edfde481e191d81f9cf6d2211debbfe4bd92f06410e7637dffb8dd5dfde06"}, + {file = "jiter-0.9.0-cp312-cp312-win_amd64.whl", hash = "sha256:099500d07b43f61d8bd780466d429c45a7b25411b334c60ca875fa775f68ccb0"}, + {file = "jiter-0.9.0-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:2764891d3f3e8b18dce2cff24949153ee30c9239da7c00f032511091ba688ff7"}, + {file = "jiter-0.9.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:387b22fbfd7a62418d5212b4638026d01723761c75c1c8232a8b8c37c2f1003b"}, + {file = "jiter-0.9.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40d8da8629ccae3606c61d9184970423655fb4e33d03330bcdfe52d234d32f69"}, + {file = "jiter-0.9.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a1be73d8982bdc278b7b9377426a4b44ceb5c7952073dd7488e4ae96b88e1103"}, + {file = "jiter-0.9.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2228eaaaa111ec54b9e89f7481bffb3972e9059301a878d085b2b449fbbde635"}, + {file = "jiter-0.9.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:11509bfecbc319459647d4ac3fd391d26fdf530dad00c13c4dadabf5b81f01a4"}, + {file = "jiter-0.9.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3f22238da568be8bbd8e0650e12feeb2cfea15eda4f9fc271d3b362a4fa0604d"}, + {file = "jiter-0.9.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:17f5d55eb856597607562257c8e36c42bc87f16bef52ef7129b7da11afc779f3"}, + {file = "jiter-0.9.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:6a99bed9fbb02f5bed416d137944419a69aa4c423e44189bc49718859ea83bc5"}, + {file = "jiter-0.9.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:e057adb0cd1bd39606100be0eafe742de2de88c79df632955b9ab53a086b3c8d"}, + {file = "jiter-0.9.0-cp313-cp313-win32.whl", hash = "sha256:f7e6850991f3940f62d387ccfa54d1a92bd4bb9f89690b53aea36b4364bcab53"}, + {file = "jiter-0.9.0-cp313-cp313-win_amd64.whl", hash = "sha256:c8ae3bf27cd1ac5e6e8b7a27487bf3ab5f82318211ec2e1346a5b058756361f7"}, + {file = "jiter-0.9.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:f0b2827fb88dda2cbecbbc3e596ef08d69bda06c6f57930aec8e79505dc17001"}, + {file = "jiter-0.9.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:062b756ceb1d40b0b28f326cba26cfd575a4918415b036464a52f08632731e5a"}, + {file = "jiter-0.9.0-cp313-cp313t-win_amd64.whl", hash = "sha256:6f7838bc467ab7e8ef9f387bd6de195c43bad82a569c1699cb822f6609dd4cdf"}, + {file = "jiter-0.9.0-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:4a2d16360d0642cd68236f931b85fe50288834c383492e4279d9f1792e309571"}, + {file = "jiter-0.9.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:e84ed1c9c9ec10bbb8c37f450077cbe3c0d4e8c2b19f0a49a60ac7ace73c7452"}, + {file = "jiter-0.9.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9f3c848209ccd1bfa344a1240763975ca917de753c7875c77ec3034f4151d06c"}, + {file = "jiter-0.9.0-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:7825f46e50646bee937e0f849d14ef3a417910966136f59cd1eb848b8b5bb3e4"}, + {file = "jiter-0.9.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d82a811928b26d1a6311a886b2566f68ccf2b23cf3bfed042e18686f1f22c2d7"}, + {file = "jiter-0.9.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0c058ecb51763a67f019ae423b1cbe3fa90f7ee6280c31a1baa6ccc0c0e2d06e"}, + {file = "jiter-0.9.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9897115ad716c48f0120c1f0c4efae348ec47037319a6c63b2d7838bb53aaef4"}, + {file = "jiter-0.9.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:351f4c90a24c4fb8c87c6a73af2944c440494ed2bea2094feecacb75c50398ae"}, + {file = "jiter-0.9.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:d45807b0f236c485e1e525e2ce3a854807dfe28ccf0d013dd4a563395e28008a"}, + {file = "jiter-0.9.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:1537a890724ba00fdba21787010ac6f24dad47f763410e9e1093277913592784"}, + {file = "jiter-0.9.0-cp38-cp38-win32.whl", hash = "sha256:e3630ec20cbeaddd4b65513fa3857e1b7c4190d4481ef07fb63d0fad59033321"}, + {file = "jiter-0.9.0-cp38-cp38-win_amd64.whl", hash = "sha256:2685f44bf80e95f8910553bf2d33b9c87bf25fceae6e9f0c1355f75d2922b0ee"}, + {file = "jiter-0.9.0-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:9ef340fae98065071ccd5805fe81c99c8f80484e820e40043689cf97fb66b3e2"}, + {file = "jiter-0.9.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:efb767d92c63b2cd9ec9f24feeb48f49574a713870ec87e9ba0c2c6e9329c3e2"}, + {file = "jiter-0.9.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:113f30f87fb1f412510c6d7ed13e91422cfd329436364a690c34c8b8bd880c42"}, + {file = "jiter-0.9.0-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8793b6df019b988526f5a633fdc7456ea75e4a79bd8396a3373c371fc59f5c9b"}, + {file = "jiter-0.9.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7a9aaa5102dba4e079bb728076fadd5a2dca94c05c04ce68004cfd96f128ea34"}, + {file = "jiter-0.9.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d838650f6ebaf4ccadfb04522463e74a4c378d7e667e0eb1865cfe3990bfac49"}, + {file = "jiter-0.9.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0194f813efdf4b8865ad5f5c5f50f8566df7d770a82c51ef593d09e0b347020"}, + {file = "jiter-0.9.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a7954a401d0a8a0b8bc669199db78af435aae1e3569187c2939c477c53cb6a0a"}, + {file = "jiter-0.9.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:4feafe787eb8a8d98168ab15637ca2577f6ddf77ac6c8c66242c2d028aa5420e"}, + {file = "jiter-0.9.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:27cd1f2e8bb377f31d3190b34e4328d280325ad7ef55c6ac9abde72f79e84d2e"}, + {file = "jiter-0.9.0-cp39-cp39-win32.whl", hash = "sha256:161d461dcbe658cf0bd0aa375b30a968b087cdddc624fc585f3867c63c6eca95"}, + {file = "jiter-0.9.0-cp39-cp39-win_amd64.whl", hash = "sha256:e8b36d8a16a61993be33e75126ad3d8aa29cf450b09576f3c427d27647fcb4aa"}, + {file = "jiter-0.9.0.tar.gz", hash = "sha256:aadba0964deb424daa24492abc3d229c60c4a31bfee205aedbf1acc7639d7893"}, +] + +[[package]] +name = "jsonpatch" +version = "1.33" +description = "Apply JSON-Patches (RFC 6902)" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, !=3.6.*" +groups = ["main"] +files = [ + {file = "jsonpatch-1.33-py2.py3-none-any.whl", hash = "sha256:0ae28c0cd062bbd8b8ecc26d7d164fbbea9652a1a3693f3b956c1eae5145dade"}, + {file = "jsonpatch-1.33.tar.gz", hash = "sha256:9fcd4009c41e6d12348b4a0ff2563ba56a2923a7dfee731d004e212e1ee5030c"}, +] + +[package.dependencies] +jsonpointer = ">=1.9" + +[[package]] +name = "jsonpointer" +version = "3.0.0" +description = "Identify specific nodes in a JSON document (RFC 6901)" +optional = false +python-versions = ">=3.7" +groups = ["main"] +files = [ + {file = "jsonpointer-3.0.0-py2.py3-none-any.whl", hash = "sha256:13e088adc14fca8b6aa8177c044e12701e6ad4b28ff10e65f2267a90109c9942"}, + {file = "jsonpointer-3.0.0.tar.gz", hash = "sha256:2b2d729f2091522d61c3b31f82e11870f60b68f43fbc705cb76bf4b832af59ef"}, +] + +[[package]] +name = "kubernetes" +version = "32.0.1" +description = "Kubernetes python client" +optional = false +python-versions = ">=3.6" +groups = ["main"] +files = [ + {file = "kubernetes-32.0.1-py2.py3-none-any.whl", hash = "sha256:35282ab8493b938b08ab5526c7ce66588232df00ef5e1dbe88a419107dc10998"}, + {file = "kubernetes-32.0.1.tar.gz", hash = "sha256:42f43d49abd437ada79a79a16bd48a604d3471a117a8347e87db693f2ba0ba28"}, +] + +[package.dependencies] +certifi = ">=14.05.14" +durationpy = ">=0.7" +google-auth = ">=1.0.1" +oauthlib = ">=3.2.2" +python-dateutil = ">=2.5.3" +pyyaml = ">=5.4.1" +requests = "*" +requests-oauthlib = "*" +six = ">=1.9.0" +urllib3 = ">=1.24.2" +websocket-client = ">=0.32.0,<0.40.0 || >0.40.0,<0.41.dev0 || >=0.43.dev0" + +[package.extras] +adal = ["adal (>=1.0.2)"] + +[[package]] +name = "langchain" +version = "0.3.20" +description = "Building applications with LLMs through composability" +optional = false +python-versions = "<4.0,>=3.9" +groups = ["main"] +files = [ + {file = "langchain-0.3.20-py3-none-any.whl", hash = "sha256:273287f8e61ffdf7e811cf8799e6a71e9381325b8625fd6618900faba79cfdd0"}, + {file = "langchain-0.3.20.tar.gz", hash = "sha256:edcc3241703e1f6557ef5a5c35cd56f9ccc25ff12e38b4829c66d94971737a93"}, +] + +[package.dependencies] +async-timeout = {version = ">=4.0.0,<5.0.0", markers = "python_version < \"3.11\""} +langchain-core = ">=0.3.41,<1.0.0" +langchain-text-splitters = ">=0.3.6,<1.0.0" +langsmith = ">=0.1.17,<0.4" +pydantic = ">=2.7.4,<3.0.0" +PyYAML = ">=5.3" +requests = ">=2,<3" +SQLAlchemy = ">=1.4,<3" + +[package.extras] +anthropic = ["langchain-anthropic"] +aws = ["langchain-aws"] +cohere = ["langchain-cohere"] +community = ["langchain-community"] +deepseek = ["langchain-deepseek"] +fireworks = ["langchain-fireworks"] +google-genai = ["langchain-google-genai"] +google-vertexai = ["langchain-google-vertexai"] +groq = ["langchain-groq"] +huggingface = ["langchain-huggingface"] +mistralai = ["langchain-mistralai"] +ollama = ["langchain-ollama"] +openai = ["langchain-openai"] +together = ["langchain-together"] +xai = ["langchain-xai"] + +[[package]] +name = "langchain-chroma" +version = "0.2.2" +description = "An integration package connecting Chroma and LangChain" +optional = false +python-versions = "<4,>=3.9" +groups = ["main"] +files = [ + {file = "langchain_chroma-0.2.2-py3-none-any.whl", hash = "sha256:7766335f16975c2059bb6e8ea75a59a4082c52e6c9d66827681d1bce2c2756a2"}, + {file = "langchain_chroma-0.2.2.tar.gz", hash = "sha256:11225ca6077b2bf919b84d74e4d343121e077c0fa3274db1929a270fef9d1002"}, +] + +[package.dependencies] +chromadb = ">=0.4.0,<0.5.4 || >0.5.4,<0.5.5 || >0.5.5,<0.5.7 || >0.5.7,<0.5.9 || >0.5.9,<0.5.10 || >0.5.10,<0.5.11 || >0.5.11,<0.5.12 || >0.5.12,<0.7.0" +langchain-core = ">=0.2.43,<0.3.0 || >0.3.0,<0.3.1 || >0.3.1,<0.3.2 || >0.3.2,<0.3.3 || >0.3.3,<0.3.4 || >0.3.4,<0.3.5 || >0.3.5,<0.3.6 || >0.3.6,<0.3.7 || >0.3.7,<0.3.8 || >0.3.8,<0.3.9 || >0.3.9,<0.3.10 || >0.3.10,<0.3.11 || >0.3.11,<0.3.12 || >0.3.12,<0.3.13 || >0.3.13,<0.3.14 || >0.3.14,<0.4.0" +numpy = [ + {version = ">=1.22.4,<2.0.0", markers = "python_version < \"3.12\""}, + {version = ">=1.26.2,<2.0.0", markers = "python_version >= \"3.12\""}, +] + +[[package]] +name = "langchain-core" +version = "0.3.59" +description = "Building applications with LLMs through composability" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "langchain_core-0.3.59-py3-none-any.whl", hash = "sha256:9686baaff43f2c8175535da13faf40e6866769015e93130c3c1e4243e7244d70"}, + {file = "langchain_core-0.3.59.tar.gz", hash = "sha256:052a37cf298c505144f007e5aeede6ecff2dc92c827525d1ef59101eb3a4551c"}, +] + +[package.dependencies] +jsonpatch = ">=1.33,<2.0" +langsmith = ">=0.1.125,<0.4" +packaging = ">=23.2,<25" +pydantic = [ + {version = ">=2.5.2,<3.0.0", markers = "python_full_version < \"3.12.4\""}, + {version = ">=2.7.4,<3.0.0", markers = "python_full_version >= \"3.12.4\""}, +] +PyYAML = ">=5.3" +tenacity = ">=8.1.0,<8.4.0 || >8.4.0,<10.0.0" +typing-extensions = ">=4.7" + +[[package]] +name = "langchain-google-vertexai" +version = "2.0.15" +description = "An integration package connecting Google VertexAI and LangChain" +optional = false +python-versions = "<4.0,>=3.9" +groups = ["main"] +files = [ + {file = "langchain_google_vertexai-2.0.15-py3-none-any.whl", hash = "sha256:994d6ab6430ce4d13541f4ca363c1f26b5ec53ad7fef29a0e72f98d3335eb64f"}, + {file = "langchain_google_vertexai-2.0.15.tar.gz", hash = "sha256:92ccdb02bab323be6f11a3dfd68de07b0a3160e0019f114d954ce648fab84e98"}, +] + +[package.dependencies] +google-cloud-aiplatform = ">=1.81.0,<2.0.0" +google-cloud-storage = ">=2.18.0,<3.0.0" +httpx = ">=0.28.0,<0.29.0" +httpx-sse = ">=0.4.0,<0.5.0" +langchain-core = ">=0.3.31,<0.4" +pydantic = ">=2.9,<3.0" + +[package.extras] +anthropic = ["anthropic[vertexai] (>=0.35.0,<1)"] +mistral = ["langchain-mistralai (>=0.2.0,<1)"] + +[[package]] +name = "langchain-openai" +version = "0.3.9" +description = "An integration package connecting OpenAI and LangChain" +optional = false +python-versions = "<4.0,>=3.9" +groups = ["main"] +files = [ + {file = "langchain_openai-0.3.9-py3-none-any.whl", hash = "sha256:1ad95c09a620910c39a8eb826eb146bd96bfbc55e4fca78b1e28ffd5e4f5b261"}, + {file = "langchain_openai-0.3.9.tar.gz", hash = "sha256:a2897d15765a435eff3fed7043235c25ec1e192e6c45a81e9e4fae2951335fb3"}, +] + +[package.dependencies] +langchain-core = ">=0.3.45,<1.0.0" +openai = ">=1.66.3,<2.0.0" +tiktoken = ">=0.7,<1" + +[[package]] +name = "langchain-text-splitters" +version = "0.3.8" +description = "LangChain text splitting utilities" +optional = false +python-versions = "<4.0,>=3.9" +groups = ["main"] +files = [ + {file = "langchain_text_splitters-0.3.8-py3-none-any.whl", hash = "sha256:e75cc0f4ae58dcf07d9f18776400cf8ade27fadd4ff6d264df6278bb302f6f02"}, + {file = "langchain_text_splitters-0.3.8.tar.gz", hash = "sha256:116d4b9f2a22dda357d0b79e30acf005c5518177971c66a9f1ab0edfdb0f912e"}, +] + +[package.dependencies] +langchain-core = ">=0.3.51,<1.0.0" + +[[package]] +name = "langgraph" +version = "0.3.11" +description = "Building stateful, multi-actor applications with LLMs" +optional = false +python-versions = "<4.0,>=3.9.0" +groups = ["main"] +files = [ + {file = "langgraph-0.3.11-py3-none-any.whl", hash = "sha256:b7b5b8a07ed37649d6b928b46462a06911b5bc89b5ee329d53a41ad2f33d56a8"}, + {file = "langgraph-0.3.11.tar.gz", hash = "sha256:bcbc78e7b4b584c858f42362734d2190802a7c75966faf1f603057f296f358bc"}, +] + +[package.dependencies] +langchain-core = ">=0.1,<0.4" +langgraph-checkpoint = ">=2.0.10,<3.0.0" +langgraph-prebuilt = ">=0.1.1,<0.2" +langgraph-sdk = ">=0.1.42,<0.2.0" + +[[package]] +name = "langgraph-checkpoint" +version = "2.0.25" +description = "Library with base interfaces for LangGraph checkpoint savers." +optional = false +python-versions = "<4.0.0,>=3.9.0" +groups = ["main"] +files = [ + {file = "langgraph_checkpoint-2.0.25-py3-none-any.whl", hash = "sha256:23416a0f5bc9dd712ac10918fc13e8c9c4530c419d2985a441df71a38fc81602"}, + {file = "langgraph_checkpoint-2.0.25.tar.gz", hash = "sha256:77a63cab7b5f84dec1d49db561326ec28bdd48bcefb7fe4ac372069d2609287b"}, +] + +[package.dependencies] +langchain-core = ">=0.2.38,<0.4" +ormsgpack = ">=1.8.0,<2.0.0" + +[[package]] +name = "langgraph-checkpoint-postgres" +version = "2.0.17" +description = "Library with a Postgres implementation of LangGraph checkpoint saver." +optional = false +python-versions = "<4.0.0,>=3.9.0" +groups = ["main"] +files = [ + {file = "langgraph_checkpoint_postgres-2.0.17-py3-none-any.whl", hash = "sha256:b5b8a385b129d0395f19cb01cbe050c585eca951e995a984285d39fb8dab3e65"}, + {file = "langgraph_checkpoint_postgres-2.0.17.tar.gz", hash = "sha256:ab88e8c003833c68e1d5a2f9623535697fcd536507e2423d1ec1b27393ad41d6"}, +] + +[package.dependencies] +langgraph-checkpoint = ">=2.0.15,<3.0.0" +orjson = ">=3.10.1" +psycopg = ">=3.2.0,<4.0.0" +psycopg-pool = ">=3.2.0,<4.0.0" + +[[package]] +name = "langgraph-prebuilt" +version = "0.1.8" +description = "Library with high-level APIs for creating and executing LangGraph agents and tools." +optional = false +python-versions = "<4.0.0,>=3.9.0" +groups = ["main"] +files = [ + {file = "langgraph_prebuilt-0.1.8-py3-none-any.whl", hash = "sha256:ae97b828ae00be2cefec503423aa782e1bff165e9b94592e224da132f2526968"}, + {file = "langgraph_prebuilt-0.1.8.tar.gz", hash = "sha256:4de7659151829b2b955b6798df6800e580e617782c15c2c5b29b139697491831"}, +] + +[package.dependencies] +langchain-core = ">=0.2.43,<0.3.0 || >0.3.0,<0.3.1 || >0.3.1,<0.3.2 || >0.3.2,<0.3.3 || >0.3.3,<0.3.4 || >0.3.4,<0.3.5 || >0.3.5,<0.3.6 || >0.3.6,<0.3.7 || >0.3.7,<0.3.8 || >0.3.8,<0.3.9 || >0.3.9,<0.3.10 || >0.3.10,<0.3.11 || >0.3.11,<0.3.12 || >0.3.12,<0.3.13 || >0.3.13,<0.3.14 || >0.3.14,<0.3.15 || >0.3.15,<0.3.16 || >0.3.16,<0.3.17 || >0.3.17,<0.3.18 || >0.3.18,<0.3.19 || >0.3.19,<0.3.20 || >0.3.20,<0.3.21 || >0.3.21,<0.3.22 || >0.3.22,<0.4.0" +langgraph-checkpoint = ">=2.0.10,<3.0.0" + +[[package]] +name = "langgraph-sdk" +version = "0.1.66" +description = "SDK for interacting with LangGraph API" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "langgraph_sdk-0.1.66-py3-none-any.whl", hash = "sha256:f781c63f3e913d3d6bedb02cb84d775cda64e3cdf3282fd387bdd8faaf53c603"}, + {file = "langgraph_sdk-0.1.66.tar.gz", hash = "sha256:81474ad4555a06004cc7a2f4ab477135d5eaf7db11fbcf2a69257fb2d717582e"}, +] + +[package.dependencies] +httpx = ">=0.25.2" +orjson = ">=3.10.1" + +[[package]] +name = "langsmith" +version = "0.3.42" +description = "Client library to connect to the LangSmith LLM Tracing and Evaluation Platform." +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "langsmith-0.3.42-py3-none-any.whl", hash = "sha256:18114327f3364385dae4026ebfd57d1c1cb46d8f80931098f0f10abe533475ff"}, + {file = "langsmith-0.3.42.tar.gz", hash = "sha256:2b5cbc450ab808b992362aac6943bb1d285579aa68a3a8be901d30a393458f25"}, +] + +[package.dependencies] +httpx = ">=0.23.0,<1" +orjson = {version = ">=3.9.14,<4.0.0", markers = "platform_python_implementation != \"PyPy\""} +packaging = ">=23.2" +pydantic = [ + {version = ">=1,<3", markers = "python_full_version < \"3.12.4\""}, + {version = ">=2.7.4,<3.0.0", markers = "python_full_version >= \"3.12.4\""}, +] +requests = ">=2,<3" +requests-toolbelt = ">=1.0.0,<2.0.0" +zstandard = ">=0.23.0,<0.24.0" + +[package.extras] +langsmith-pyo3 = ["langsmith-pyo3 (>=0.1.0rc2,<0.2.0)"] +openai-agents = ["openai-agents (>=0.0.3,<0.1)"] +otel = ["opentelemetry-api (>=1.30.0,<2.0.0)", "opentelemetry-exporter-otlp-proto-http (>=1.30.0,<2.0.0)", "opentelemetry-sdk (>=1.30.0,<2.0.0)"] +pytest = ["pytest (>=7.0.0)", "rich (>=13.9.4,<14.0.0)"] + [[package]] name = "loguru" version = "0.7.2" @@ -1280,6 +2549,160 @@ click = ">=8.0.1,<9.0.0" pydantic = ">=2.5.3,<3.0.0" toml = ">=0.10.2,<0.11.0" +[[package]] +name = "markdown-it-py" +version = "3.0.0" +description = "Python port of markdown-it. Markdown parsing, done right!" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "markdown-it-py-3.0.0.tar.gz", hash = "sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb"}, + {file = "markdown_it_py-3.0.0-py3-none-any.whl", hash = "sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1"}, +] + +[package.dependencies] +mdurl = ">=0.1,<1.0" + +[package.extras] +benchmarking = ["psutil", "pytest", "pytest-benchmark"] +code-style = ["pre-commit (>=3.0,<4.0)"] +compare = ["commonmark (>=0.9,<1.0)", "markdown (>=3.4,<4.0)", "mistletoe (>=1.0,<2.0)", "mistune (>=2.0,<3.0)", "panflute (>=2.3,<3.0)"] +linkify = ["linkify-it-py (>=1,<3)"] +plugins = ["mdit-py-plugins"] +profiling = ["gprof2dot"] +rtd = ["jupyter_sphinx", "mdit-py-plugins", "myst-parser", "pyyaml", "sphinx", "sphinx-copybutton", "sphinx-design", "sphinx_book_theme"] +testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"] + +[[package]] +name = "mdurl" +version = "0.1.2" +description = "Markdown URL utilities" +optional = false +python-versions = ">=3.7" +groups = ["main"] +files = [ + {file = "mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8"}, + {file = "mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba"}, +] + +[[package]] +name = "mmh3" +version = "5.1.0" +description = "Python extension for MurmurHash (MurmurHash3), a set of fast and robust hash functions." +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "mmh3-5.1.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:eaf4ac5c6ee18ca9232238364d7f2a213278ae5ca97897cafaa123fcc7bb8bec"}, + {file = "mmh3-5.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:48f9aa8ccb9ad1d577a16104834ac44ff640d8de8c0caed09a2300df7ce8460a"}, + {file = "mmh3-5.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d4ba8cac21e1f2d4e436ce03a82a7f87cda80378691f760e9ea55045ec480a3d"}, + {file = "mmh3-5.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d69281c281cb01994f054d862a6bb02a2e7acfe64917795c58934b0872b9ece4"}, + {file = "mmh3-5.1.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4d05ed3962312fbda2a1589b97359d2467f677166952f6bd410d8c916a55febf"}, + {file = "mmh3-5.1.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:78ae6a03f4cff4aa92ddd690611168856f8c33a141bd3e5a1e0a85521dc21ea0"}, + {file = "mmh3-5.1.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:95f983535b39795d9fb7336438faae117424c6798f763d67c6624f6caf2c4c01"}, + {file = "mmh3-5.1.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d46fdd80d4c7ecadd9faa6181e92ccc6fe91c50991c9af0e371fdf8b8a7a6150"}, + {file = "mmh3-5.1.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:0f16e976af7365ea3b5c425124b2a7f0147eed97fdbb36d99857f173c8d8e096"}, + {file = "mmh3-5.1.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:6fa97f7d1e1f74ad1565127229d510f3fd65d931fdedd707c1e15100bc9e5ebb"}, + {file = "mmh3-5.1.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:4052fa4a8561bd62648e9eb993c8f3af3bdedadf3d9687aa4770d10e3709a80c"}, + {file = "mmh3-5.1.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:3f0e8ae9f961037f812afe3cce7da57abf734285961fffbeff9a4c011b737732"}, + {file = "mmh3-5.1.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:99297f207db967814f1f02135bb7fe7628b9eacb046134a34e1015b26b06edce"}, + {file = "mmh3-5.1.0-cp310-cp310-win32.whl", hash = "sha256:2e6c8dc3631a5e22007fbdb55e993b2dbce7985c14b25b572dd78403c2e79182"}, + {file = "mmh3-5.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:e4e8c7ad5a4dddcfde35fd28ef96744c1ee0f9d9570108aa5f7e77cf9cfdf0bf"}, + {file = "mmh3-5.1.0-cp310-cp310-win_arm64.whl", hash = "sha256:45da549269883208912868a07d0364e1418d8292c4259ca11699ba1b2475bd26"}, + {file = "mmh3-5.1.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0b529dcda3f951ff363a51d5866bc6d63cf57f1e73e8961f864ae5010647079d"}, + {file = "mmh3-5.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4db1079b3ace965e562cdfc95847312f9273eb2ad3ebea983435c8423e06acd7"}, + {file = "mmh3-5.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:22d31e3a0ff89b8eb3b826d6fc8e19532998b2aa6b9143698043a1268da413e1"}, + {file = "mmh3-5.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2139bfbd354cd6cb0afed51c4b504f29bcd687a3b1460b7e89498329cc28a894"}, + {file = "mmh3-5.1.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8c8105c6a435bc2cd6ea2ef59558ab1a2976fd4a4437026f562856d08996673a"}, + {file = "mmh3-5.1.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:57730067174a7f36fcd6ce012fe359bd5510fdaa5fe067bc94ed03e65dafb769"}, + {file = "mmh3-5.1.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bde80eb196d7fdc765a318604ded74a4378f02c5b46c17aa48a27d742edaded2"}, + {file = "mmh3-5.1.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e9c8eddcb441abddeb419c16c56fd74b3e2df9e57f7aa2903221996718435c7a"}, + {file = "mmh3-5.1.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:99e07e4acafbccc7a28c076a847fb060ffc1406036bc2005acb1b2af620e53c3"}, + {file = "mmh3-5.1.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:9e25ba5b530e9a7d65f41a08d48f4b3fedc1e89c26486361166a5544aa4cad33"}, + {file = "mmh3-5.1.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:bb9bf7475b4d99156ce2f0cf277c061a17560c8c10199c910a680869a278ddc7"}, + {file = "mmh3-5.1.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:2a1b0878dd281ea3003368ab53ff6f568e175f1b39f281df1da319e58a19c23a"}, + {file = "mmh3-5.1.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:25f565093ac8b8aefe0f61f8f95c9a9d11dd69e6a9e9832ff0d293511bc36258"}, + {file = "mmh3-5.1.0-cp311-cp311-win32.whl", hash = "sha256:1e3554d8792387eac73c99c6eaea0b3f884e7130eb67986e11c403e4f9b6d372"}, + {file = "mmh3-5.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:8ad777a48197882492af50bf3098085424993ce850bdda406a358b6ab74be759"}, + {file = "mmh3-5.1.0-cp311-cp311-win_arm64.whl", hash = "sha256:f29dc4efd99bdd29fe85ed6c81915b17b2ef2cf853abf7213a48ac6fb3eaabe1"}, + {file = "mmh3-5.1.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:45712987367cb9235026e3cbf4334670522a97751abfd00b5bc8bfa022c3311d"}, + {file = "mmh3-5.1.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:b1020735eb35086ab24affbea59bb9082f7f6a0ad517cb89f0fc14f16cea4dae"}, + {file = "mmh3-5.1.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:babf2a78ce5513d120c358722a2e3aa7762d6071cd10cede026f8b32452be322"}, + {file = "mmh3-5.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d4f47f58cd5cbef968c84a7c1ddc192fef0a36b48b0b8a3cb67354531aa33b00"}, + {file = "mmh3-5.1.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2044a601c113c981f2c1e14fa33adc9b826c9017034fe193e9eb49a6882dbb06"}, + {file = "mmh3-5.1.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c94d999c9f2eb2da44d7c2826d3fbffdbbbbcde8488d353fee7c848ecc42b968"}, + {file = "mmh3-5.1.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a015dcb24fa0c7a78f88e9419ac74f5001c1ed6a92e70fd1803f74afb26a4c83"}, + {file = "mmh3-5.1.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:457da019c491a2d20e2022c7d4ce723675e4c081d9efc3b4d8b9f28a5ea789bd"}, + {file = "mmh3-5.1.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:71408579a570193a4ac9c77344d68ddefa440b00468a0b566dcc2ba282a9c559"}, + {file = "mmh3-5.1.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:8b3a04bc214a6e16c81f02f855e285c6df274a2084787eeafaa45f2fbdef1b63"}, + {file = "mmh3-5.1.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:832dae26a35514f6d3c1e267fa48e8de3c7b978afdafa0529c808ad72e13ada3"}, + {file = "mmh3-5.1.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:bf658a61fc92ef8a48945ebb1076ef4ad74269e353fffcb642dfa0890b13673b"}, + {file = "mmh3-5.1.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:3313577453582b03383731b66447cdcdd28a68f78df28f10d275d7d19010c1df"}, + {file = "mmh3-5.1.0-cp312-cp312-win32.whl", hash = "sha256:1d6508504c531ab86c4424b5a5ff07c1132d063863339cf92f6657ff7a580f76"}, + {file = "mmh3-5.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:aa75981fcdf3f21759d94f2c81b6a6e04a49dfbcdad88b152ba49b8e20544776"}, + {file = "mmh3-5.1.0-cp312-cp312-win_arm64.whl", hash = "sha256:a4c1a76808dfea47f7407a0b07aaff9087447ef6280716fd0783409b3088bb3c"}, + {file = "mmh3-5.1.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:7a523899ca29cfb8a5239618474a435f3d892b22004b91779fcb83504c0d5b8c"}, + {file = "mmh3-5.1.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:17cef2c3a6ca2391ca7171a35ed574b5dab8398163129a3e3a4c05ab85a4ff40"}, + {file = "mmh3-5.1.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:52e12895b30110f3d89dae59a888683cc886ed0472dd2eca77497edef6161997"}, + {file = "mmh3-5.1.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e0d6719045cda75c3f40397fc24ab67b18e0cb8f69d3429ab4c39763c4c608dd"}, + {file = "mmh3-5.1.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d19fa07d303a91f8858982c37e6939834cb11893cb3ff20e6ee6fa2a7563826a"}, + {file = "mmh3-5.1.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:31b47a620d622fbde8ca1ca0435c5d25de0ac57ab507209245e918128e38e676"}, + {file = "mmh3-5.1.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:00f810647c22c179b6821079f7aa306d51953ac893587ee09cf1afb35adf87cb"}, + {file = "mmh3-5.1.0-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f6128b610b577eed1e89ac7177ab0c33d06ade2aba93f5c89306032306b5f1c6"}, + {file = "mmh3-5.1.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:1e550a45d2ff87a1c11b42015107f1778c93f4c6f8e731bf1b8fa770321b8cc4"}, + {file = "mmh3-5.1.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:785ae09276342f79fd8092633e2d52c0f7c44d56e8cfda8274ccc9b76612dba2"}, + {file = "mmh3-5.1.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:0f4be3703a867ef976434afd3661a33884abe73ceb4ee436cac49d3b4c2aaa7b"}, + {file = "mmh3-5.1.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:e513983830c4ff1f205ab97152a0050cf7164f1b4783d702256d39c637b9d107"}, + {file = "mmh3-5.1.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:b9135c300535c828c0bae311b659f33a31c941572eae278568d1a953c4a57b59"}, + {file = "mmh3-5.1.0-cp313-cp313-win32.whl", hash = "sha256:c65dbd12885a5598b70140d24de5839551af5a99b29f9804bb2484b29ef07692"}, + {file = "mmh3-5.1.0-cp313-cp313-win_amd64.whl", hash = "sha256:10db7765201fc65003fa998faa067417ef6283eb5f9bba8f323c48fd9c33e91f"}, + {file = "mmh3-5.1.0-cp313-cp313-win_arm64.whl", hash = "sha256:b22fe2e54be81f6c07dcb36b96fa250fb72effe08aa52fbb83eade6e1e2d5fd7"}, + {file = "mmh3-5.1.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:166b67749a1d8c93b06f5e90576f1ba838a65c8e79f28ffd9dfafba7c7d0a084"}, + {file = "mmh3-5.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:adba83c7ba5cc8ea201ee1e235f8413a68e7f7b8a657d582cc6c6c9d73f2830e"}, + {file = "mmh3-5.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a61f434736106804eb0b1612d503c4e6eb22ba31b16e6a2f987473de4226fa55"}, + {file = "mmh3-5.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ba9ce59816b30866093f048b3312c2204ff59806d3a02adee71ff7bd22b87554"}, + {file = "mmh3-5.1.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cd51597bef1e503363b05cb579db09269e6e6c39d419486626b255048daf545b"}, + {file = "mmh3-5.1.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d51a1ed642d3fb37b8f4cab966811c52eb246c3e1740985f701ef5ad4cdd2145"}, + {file = "mmh3-5.1.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:709bfe81c53bf8a3609efcbd65c72305ade60944f66138f697eefc1a86b6e356"}, + {file = "mmh3-5.1.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e01a9b0092b6f82e861137c8e9bb9899375125b24012eb5219e61708be320032"}, + {file = "mmh3-5.1.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:27e46a2c13c9a805e03c9ec7de0ca8e096794688ab2125bdce4229daf60c4a56"}, + {file = "mmh3-5.1.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:5766299c1d26f6bfd0a638e070bd17dbd98d4ccb067d64db3745bf178e700ef0"}, + {file = "mmh3-5.1.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:7785205e3e4443fdcbb73766798c7647f94c2f538b90f666688f3e757546069e"}, + {file = "mmh3-5.1.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:8e574fbd39afb433b3ab95683b1b4bf18313dc46456fc9daaddc2693c19ca565"}, + {file = "mmh3-5.1.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:1b6727a5a20e32cbf605743749f3862abe5f5e097cbf2afc7be5aafd32a549ae"}, + {file = "mmh3-5.1.0-cp39-cp39-win32.whl", hash = "sha256:d6eaa711d4b9220fe5252032a44bf68e5dcfb7b21745a96efc9e769b0dd57ec2"}, + {file = "mmh3-5.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:49d444913f6c02980e5241a53fe9af2338f2043d6ce5b6f5ea7d302c52c604ac"}, + {file = "mmh3-5.1.0-cp39-cp39-win_arm64.whl", hash = "sha256:0daaeaedd78773b70378f2413c7d6b10239a75d955d30d54f460fb25d599942d"}, + {file = "mmh3-5.1.0.tar.gz", hash = "sha256:136e1e670500f177f49ec106a4ebf0adf20d18d96990cc36ea492c651d2b406c"}, +] + +[package.extras] +benchmark = ["pymmh3 (==0.0.5)", "pyperf (==2.8.1)", "xxhash (==3.5.0)"] +docs = ["myst-parser (==4.0.0)", "shibuya (==2024.12.21)", "sphinx (==8.1.3)", "sphinx-copybutton (==0.5.2)"] +lint = ["black (==24.10.0)", "clang-format (==19.1.7)", "isort (==5.13.2)", "pylint (==3.3.3)"] +plot = ["matplotlib (==3.10.0)", "pandas (==2.2.3)"] +test = ["pytest (==8.3.4)", "pytest-sugar (==1.0.0)"] +type = ["mypy (==1.14.1)"] + +[[package]] +name = "mpmath" +version = "1.3.0" +description = "Python library for arbitrary-precision floating-point arithmetic" +optional = false +python-versions = "*" +groups = ["main"] +files = [ + {file = "mpmath-1.3.0-py3-none-any.whl", hash = "sha256:a0b2b9fe80bbcd81a6647ff13108738cfb482d481d826cc0e02f5b35e5c88d2c"}, + {file = "mpmath-1.3.0.tar.gz", hash = "sha256:7a28eb2a9774d00c7bc92411c19a89209d5da7c4c9a9e227be8330a23a25b91f"}, +] + +[package.extras] +develop = ["codecov", "pycodestyle", "pytest (>=4.6)", "pytest-cov", "wheel"] +docs = ["sphinx"] +gmpy = ["gmpy2 (>=2.1.0a4) ; platform_python_implementation != \"PyPy\""] +tests = ["pytest (>=4.6)"] + [[package]] name = "nodeenv" version = "1.8.0" @@ -1358,6 +2781,387 @@ rsa = ["cryptography (>=3.0.0)"] signals = ["blinker (>=1.4.0)"] signedtoken = ["cryptography (>=3.0.0)", "pyjwt (>=2.0.0,<3)"] +[[package]] +name = "onnxruntime" +version = "1.21.1" +description = "ONNX Runtime is a runtime accelerator for Machine Learning models" +optional = false +python-versions = ">=3.10" +groups = ["main"] +files = [ + {file = "onnxruntime-1.21.1-cp310-cp310-macosx_13_0_universal2.whl", hash = "sha256:daedb5d33d8963062a25f4a3c788262074587f685a19478ef759a911b4b12c25"}, + {file = "onnxruntime-1.21.1-cp310-cp310-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3a402f9bda0b1cc791d9cf31d23c471e8189a55369b49ef2b9d0854eb11d22c4"}, + {file = "onnxruntime-1.21.1-cp310-cp310-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:15656a2d0126f4f66295381e39c8812a6d845ccb1bb1f7bf6dd0a46d7d602e7f"}, + {file = "onnxruntime-1.21.1-cp310-cp310-win_amd64.whl", hash = "sha256:79bbedfd1263065532967a2132fb365a27ffe5f7ed962e16fec55cca741f72aa"}, + {file = "onnxruntime-1.21.1-cp311-cp311-macosx_13_0_universal2.whl", hash = "sha256:8bee9b5ba7b88ae7bfccb4f97bbe1b4bae801b0fb05d686b28a722cb27c89931"}, + {file = "onnxruntime-1.21.1-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4b6a29a1767b92d543091349f5397a1c7619eaca746cd1bc47f8b4ec5a9f1a6c"}, + {file = "onnxruntime-1.21.1-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:982dcc04a6688e1af9e3da1d4ef2bdeb11417cf3f8dde81f8f721043c1919a4f"}, + {file = "onnxruntime-1.21.1-cp311-cp311-win_amd64.whl", hash = "sha256:2b6052c04b9125319293abb9bdcce40e806db3e097f15b82242d4cd72d81fd0c"}, + {file = "onnxruntime-1.21.1-cp312-cp312-macosx_13_0_universal2.whl", hash = "sha256:f615c05869a523a94d0a4de1f0936d0199a473cf104d630fc26174bebd5759bd"}, + {file = "onnxruntime-1.21.1-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:79dfb1f47386c4edd115b21015354b2f05f5566c40c98606251f15a64add3cbe"}, + {file = "onnxruntime-1.21.1-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2742935d6610fe0f58e1995018d9db7e8239d0201d9ebbdb7964a61386b5390a"}, + {file = "onnxruntime-1.21.1-cp312-cp312-win_amd64.whl", hash = "sha256:a7afdb3fcb162f5536225e13c2b245018068964b1d0eee05303ea6823ca6785e"}, + {file = "onnxruntime-1.21.1-cp313-cp313-macosx_13_0_universal2.whl", hash = "sha256:ed4f9771233a92edcab9f11f537702371d450fe6cd79a727b672d37b9dab0cde"}, + {file = "onnxruntime-1.21.1-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1bc100fd1f4f95258e7d0f7068ec69dec2a47cc693f745eec9cf4561ee8d952a"}, + {file = "onnxruntime-1.21.1-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0fea0d2b98eecf4bebe01f7ce9a265a5d72b3050e9098063bfe65fa2b0633a8e"}, + {file = "onnxruntime-1.21.1-cp313-cp313-win_amd64.whl", hash = "sha256:da606061b9ed1b05b63a37be38c2014679a3e725903f58036ffd626df45c0e47"}, + {file = "onnxruntime-1.21.1-cp313-cp313t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:94674315d40d521952bfc28007ce9b6728e87753e1f18d243c8cd953f25903b8"}, + {file = "onnxruntime-1.21.1-cp313-cp313t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5c9e4571ff5b2a5d377d414bc85cd9450ba233a9a92f766493874f1093976453"}, +] + +[package.dependencies] +coloredlogs = "*" +flatbuffers = "*" +numpy = ">=1.21.6" +packaging = "*" +protobuf = "*" +sympy = "*" + +[[package]] +name = "openai" +version = "1.78.0" +description = "The official Python library for the openai API" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "openai-1.78.0-py3-none-any.whl", hash = "sha256:1ade6a48cd323ad8a7715e7e1669bb97a17e1a5b8a916644261aaef4bf284778"}, + {file = "openai-1.78.0.tar.gz", hash = "sha256:254aef4980688468e96cbddb1f348ed01d274d02c64c6c69b0334bf001fb62b3"}, +] + +[package.dependencies] +anyio = ">=3.5.0,<5" +distro = ">=1.7.0,<2" +httpx = ">=0.23.0,<1" +jiter = ">=0.4.0,<1" +pydantic = ">=1.9.0,<3" +sniffio = "*" +tqdm = ">4" +typing-extensions = ">=4.11,<5" + +[package.extras] +datalib = ["numpy (>=1)", "pandas (>=1.2.3)", "pandas-stubs (>=1.1.0.11)"] +realtime = ["websockets (>=13,<16)"] +voice-helpers = ["numpy (>=2.0.2)", "sounddevice (>=0.5.1)"] + +[[package]] +name = "opentelemetry-api" +version = "1.27.0" +description = "OpenTelemetry Python API" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "opentelemetry_api-1.27.0-py3-none-any.whl", hash = "sha256:953d5871815e7c30c81b56d910c707588000fff7a3ca1c73e6531911d53065e7"}, + {file = "opentelemetry_api-1.27.0.tar.gz", hash = "sha256:ed673583eaa5f81b5ce5e86ef7cdaf622f88ef65f0b9aab40b843dcae5bef342"}, +] + +[package.dependencies] +deprecated = ">=1.2.6" +importlib-metadata = ">=6.0,<=8.4.0" + +[[package]] +name = "opentelemetry-exporter-otlp-proto-common" +version = "1.27.0" +description = "OpenTelemetry Protobuf encoding" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "opentelemetry_exporter_otlp_proto_common-1.27.0-py3-none-any.whl", hash = "sha256:675db7fffcb60946f3a5c43e17d1168a3307a94a930ecf8d2ea1f286f3d4f79a"}, + {file = "opentelemetry_exporter_otlp_proto_common-1.27.0.tar.gz", hash = "sha256:159d27cf49f359e3798c4c3eb8da6ef4020e292571bd8c5604a2a573231dd5c8"}, +] + +[package.dependencies] +opentelemetry-proto = "1.27.0" + +[[package]] +name = "opentelemetry-exporter-otlp-proto-grpc" +version = "1.27.0" +description = "OpenTelemetry Collector Protobuf over gRPC Exporter" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "opentelemetry_exporter_otlp_proto_grpc-1.27.0-py3-none-any.whl", hash = "sha256:56b5bbd5d61aab05e300d9d62a6b3c134827bbd28d0b12f2649c2da368006c9e"}, + {file = "opentelemetry_exporter_otlp_proto_grpc-1.27.0.tar.gz", hash = "sha256:af6f72f76bcf425dfb5ad11c1a6d6eca2863b91e63575f89bb7b4b55099d968f"}, +] + +[package.dependencies] +deprecated = ">=1.2.6" +googleapis-common-protos = ">=1.52,<2.0" +grpcio = ">=1.0.0,<2.0.0" +opentelemetry-api = ">=1.15,<2.0" +opentelemetry-exporter-otlp-proto-common = "1.27.0" +opentelemetry-proto = "1.27.0" +opentelemetry-sdk = ">=1.27.0,<1.28.0" + +[[package]] +name = "opentelemetry-instrumentation" +version = "0.48b0" +description = "Instrumentation Tools & Auto Instrumentation for OpenTelemetry Python" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "opentelemetry_instrumentation-0.48b0-py3-none-any.whl", hash = "sha256:a69750dc4ba6a5c3eb67986a337185a25b739966d80479befe37b546fc870b44"}, + {file = "opentelemetry_instrumentation-0.48b0.tar.gz", hash = "sha256:94929685d906380743a71c3970f76b5f07476eea1834abd5dd9d17abfe23cc35"}, +] + +[package.dependencies] +opentelemetry-api = ">=1.4,<2.0" +setuptools = ">=16.0" +wrapt = ">=1.0.0,<2.0.0" + +[[package]] +name = "opentelemetry-instrumentation-asgi" +version = "0.48b0" +description = "ASGI instrumentation for OpenTelemetry" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "opentelemetry_instrumentation_asgi-0.48b0-py3-none-any.whl", hash = "sha256:ddb1b5fc800ae66e85a4e2eca4d9ecd66367a8c7b556169d9e7b57e10676e44d"}, + {file = "opentelemetry_instrumentation_asgi-0.48b0.tar.gz", hash = "sha256:04c32174b23c7fa72ddfe192dad874954968a6a924608079af9952964ecdf785"}, +] + +[package.dependencies] +asgiref = ">=3.0,<4.0" +opentelemetry-api = ">=1.12,<2.0" +opentelemetry-instrumentation = "0.48b0" +opentelemetry-semantic-conventions = "0.48b0" +opentelemetry-util-http = "0.48b0" + +[package.extras] +instruments = ["asgiref (>=3.0,<4.0)"] + +[[package]] +name = "opentelemetry-instrumentation-fastapi" +version = "0.48b0" +description = "OpenTelemetry FastAPI Instrumentation" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "opentelemetry_instrumentation_fastapi-0.48b0-py3-none-any.whl", hash = "sha256:afeb820a59e139d3e5d96619600f11ce0187658b8ae9e3480857dd790bc024f2"}, + {file = "opentelemetry_instrumentation_fastapi-0.48b0.tar.gz", hash = "sha256:21a72563ea412c0b535815aeed75fc580240f1f02ebc72381cfab672648637a2"}, +] + +[package.dependencies] +opentelemetry-api = ">=1.12,<2.0" +opentelemetry-instrumentation = "0.48b0" +opentelemetry-instrumentation-asgi = "0.48b0" +opentelemetry-semantic-conventions = "0.48b0" +opentelemetry-util-http = "0.48b0" + +[package.extras] +instruments = ["fastapi (>=0.58,<1.0)"] + +[[package]] +name = "opentelemetry-proto" +version = "1.27.0" +description = "OpenTelemetry Python Proto" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "opentelemetry_proto-1.27.0-py3-none-any.whl", hash = "sha256:b133873de5581a50063e1e4b29cdcf0c5e253a8c2d8dc1229add20a4c3830ace"}, + {file = "opentelemetry_proto-1.27.0.tar.gz", hash = "sha256:33c9345d91dafd8a74fc3d7576c5a38f18b7fdf8d02983ac67485386132aedd6"}, +] + +[package.dependencies] +protobuf = ">=3.19,<5.0" + +[[package]] +name = "opentelemetry-sdk" +version = "1.27.0" +description = "OpenTelemetry Python SDK" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "opentelemetry_sdk-1.27.0-py3-none-any.whl", hash = "sha256:365f5e32f920faf0fd9e14fdfd92c086e317eaa5f860edba9cdc17a380d9197d"}, + {file = "opentelemetry_sdk-1.27.0.tar.gz", hash = "sha256:d525017dea0ccce9ba4e0245100ec46ecdc043f2d7b8315d56b19aff0904fa6f"}, +] + +[package.dependencies] +opentelemetry-api = "1.27.0" +opentelemetry-semantic-conventions = "0.48b0" +typing-extensions = ">=3.7.4" + +[[package]] +name = "opentelemetry-semantic-conventions" +version = "0.48b0" +description = "OpenTelemetry Semantic Conventions" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "opentelemetry_semantic_conventions-0.48b0-py3-none-any.whl", hash = "sha256:a0de9f45c413a8669788a38569c7e0a11ce6ce97861a628cca785deecdc32a1f"}, + {file = "opentelemetry_semantic_conventions-0.48b0.tar.gz", hash = "sha256:12d74983783b6878162208be57c9effcb89dc88691c64992d70bb89dc00daa1a"}, +] + +[package.dependencies] +deprecated = ">=1.2.6" +opentelemetry-api = "1.27.0" + +[[package]] +name = "opentelemetry-util-http" +version = "0.48b0" +description = "Web util for OpenTelemetry" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "opentelemetry_util_http-0.48b0-py3-none-any.whl", hash = "sha256:76f598af93aab50328d2a69c786beaedc8b6a7770f7a818cc307eb353debfffb"}, + {file = "opentelemetry_util_http-0.48b0.tar.gz", hash = "sha256:60312015153580cc20f322e5cdc3d3ecad80a71743235bdb77716e742814623c"}, +] + +[[package]] +name = "orjson" +version = "3.10.18" +description = "Fast, correct Python JSON library supporting dataclasses, datetimes, and numpy" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "orjson-3.10.18-cp310-cp310-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:a45e5d68066b408e4bc383b6e4ef05e717c65219a9e1390abc6155a520cac402"}, + {file = "orjson-3.10.18-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:be3b9b143e8b9db05368b13b04c84d37544ec85bb97237b3a923f076265ec89c"}, + {file = "orjson-3.10.18-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9b0aa09745e2c9b3bf779b096fa71d1cc2d801a604ef6dd79c8b1bfef52b2f92"}, + {file = "orjson-3.10.18-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:53a245c104d2792e65c8d225158f2b8262749ffe64bc7755b00024757d957a13"}, + {file = "orjson-3.10.18-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f9495ab2611b7f8a0a8a505bcb0f0cbdb5469caafe17b0e404c3c746f9900469"}, + {file = "orjson-3.10.18-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:73be1cbcebadeabdbc468f82b087df435843c809cd079a565fb16f0f3b23238f"}, + {file = "orjson-3.10.18-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fe8936ee2679e38903df158037a2f1c108129dee218975122e37847fb1d4ac68"}, + {file = "orjson-3.10.18-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:7115fcbc8525c74e4c2b608129bef740198e9a120ae46184dac7683191042056"}, + {file = "orjson-3.10.18-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:771474ad34c66bc4d1c01f645f150048030694ea5b2709b87d3bda273ffe505d"}, + {file = "orjson-3.10.18-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:7c14047dbbea52886dd87169f21939af5d55143dad22d10db6a7514f058156a8"}, + {file = "orjson-3.10.18-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:641481b73baec8db14fdf58f8967e52dc8bda1f2aba3aa5f5c1b07ed6df50b7f"}, + {file = "orjson-3.10.18-cp310-cp310-win32.whl", hash = "sha256:607eb3ae0909d47280c1fc657c4284c34b785bae371d007595633f4b1a2bbe06"}, + {file = "orjson-3.10.18-cp310-cp310-win_amd64.whl", hash = "sha256:8770432524ce0eca50b7efc2a9a5f486ee0113a5fbb4231526d414e6254eba92"}, + {file = "orjson-3.10.18-cp311-cp311-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:e0a183ac3b8e40471e8d843105da6fbe7c070faab023be3b08188ee3f85719b8"}, + {file = "orjson-3.10.18-cp311-cp311-macosx_15_0_arm64.whl", hash = "sha256:5ef7c164d9174362f85238d0cd4afdeeb89d9e523e4651add6a5d458d6f7d42d"}, + {file = "orjson-3.10.18-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:afd14c5d99cdc7bf93f22b12ec3b294931518aa019e2a147e8aa2f31fd3240f7"}, + {file = "orjson-3.10.18-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:7b672502323b6cd133c4af6b79e3bea36bad2d16bca6c1f645903fce83909a7a"}, + {file = "orjson-3.10.18-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:51f8c63be6e070ec894c629186b1c0fe798662b8687f3d9fdfa5e401c6bd7679"}, + {file = "orjson-3.10.18-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3f9478ade5313d724e0495d167083c6f3be0dd2f1c9c8a38db9a9e912cdaf947"}, + {file = "orjson-3.10.18-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:187aefa562300a9d382b4b4eb9694806e5848b0cedf52037bb5c228c61bb66d4"}, + {file = "orjson-3.10.18-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9da552683bc9da222379c7a01779bddd0ad39dd699dd6300abaf43eadee38334"}, + {file = "orjson-3.10.18-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:e450885f7b47a0231979d9c49b567ed1c4e9f69240804621be87c40bc9d3cf17"}, + {file = "orjson-3.10.18-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:5e3c9cc2ba324187cd06287ca24f65528f16dfc80add48dc99fa6c836bb3137e"}, + {file = "orjson-3.10.18-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:50ce016233ac4bfd843ac5471e232b865271d7d9d44cf9d33773bcd883ce442b"}, + {file = "orjson-3.10.18-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b3ceff74a8f7ffde0b2785ca749fc4e80e4315c0fd887561144059fb1c138aa7"}, + {file = "orjson-3.10.18-cp311-cp311-win32.whl", hash = "sha256:fdba703c722bd868c04702cac4cb8c6b8ff137af2623bc0ddb3b3e6a2c8996c1"}, + {file = "orjson-3.10.18-cp311-cp311-win_amd64.whl", hash = "sha256:c28082933c71ff4bc6ccc82a454a2bffcef6e1d7379756ca567c772e4fb3278a"}, + {file = "orjson-3.10.18-cp311-cp311-win_arm64.whl", hash = "sha256:a6c7c391beaedd3fa63206e5c2b7b554196f14debf1ec9deb54b5d279b1b46f5"}, + {file = "orjson-3.10.18-cp312-cp312-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:50c15557afb7f6d63bc6d6348e0337a880a04eaa9cd7c9d569bcb4e760a24753"}, + {file = "orjson-3.10.18-cp312-cp312-macosx_15_0_arm64.whl", hash = "sha256:356b076f1662c9813d5fa56db7d63ccceef4c271b1fb3dd522aca291375fcf17"}, + {file = "orjson-3.10.18-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:559eb40a70a7494cd5beab2d73657262a74a2c59aff2068fdba8f0424ec5b39d"}, + {file = "orjson-3.10.18-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f3c29eb9a81e2fbc6fd7ddcfba3e101ba92eaff455b8d602bf7511088bbc0eae"}, + {file = "orjson-3.10.18-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6612787e5b0756a171c7d81ba245ef63a3533a637c335aa7fcb8e665f4a0966f"}, + {file = "orjson-3.10.18-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7ac6bd7be0dcab5b702c9d43d25e70eb456dfd2e119d512447468f6405b4a69c"}, + {file = "orjson-3.10.18-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9f72f100cee8dde70100406d5c1abba515a7df926d4ed81e20a9730c062fe9ad"}, + {file = "orjson-3.10.18-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9dca85398d6d093dd41dc0983cbf54ab8e6afd1c547b6b8a311643917fbf4e0c"}, + {file = "orjson-3.10.18-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:22748de2a07fcc8781a70edb887abf801bb6142e6236123ff93d12d92db3d406"}, + {file = "orjson-3.10.18-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:3a83c9954a4107b9acd10291b7f12a6b29e35e8d43a414799906ea10e75438e6"}, + {file = "orjson-3.10.18-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:303565c67a6c7b1f194c94632a4a39918e067bd6176a48bec697393865ce4f06"}, + {file = "orjson-3.10.18-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:86314fdb5053a2f5a5d881f03fca0219bfdf832912aa88d18676a5175c6916b5"}, + {file = "orjson-3.10.18-cp312-cp312-win32.whl", hash = "sha256:187ec33bbec58c76dbd4066340067d9ece6e10067bb0cc074a21ae3300caa84e"}, + {file = "orjson-3.10.18-cp312-cp312-win_amd64.whl", hash = "sha256:f9f94cf6d3f9cd720d641f8399e390e7411487e493962213390d1ae45c7814fc"}, + {file = "orjson-3.10.18-cp312-cp312-win_arm64.whl", hash = "sha256:3d600be83fe4514944500fa8c2a0a77099025ec6482e8087d7659e891f23058a"}, + {file = "orjson-3.10.18-cp313-cp313-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:69c34b9441b863175cc6a01f2935de994025e773f814412030f269da4f7be147"}, + {file = "orjson-3.10.18-cp313-cp313-macosx_15_0_arm64.whl", hash = "sha256:1ebeda919725f9dbdb269f59bc94f861afbe2a27dce5608cdba2d92772364d1c"}, + {file = "orjson-3.10.18-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5adf5f4eed520a4959d29ea80192fa626ab9a20b2ea13f8f6dc58644f6927103"}, + {file = "orjson-3.10.18-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:7592bb48a214e18cd670974f289520f12b7aed1fa0b2e2616b8ed9e069e08595"}, + {file = "orjson-3.10.18-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f872bef9f042734110642b7a11937440797ace8c87527de25e0c53558b579ccc"}, + {file = "orjson-3.10.18-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0315317601149c244cb3ecef246ef5861a64824ccbcb8018d32c66a60a84ffbc"}, + {file = "orjson-3.10.18-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e0da26957e77e9e55a6c2ce2e7182a36a6f6b180ab7189315cb0995ec362e049"}, + {file = "orjson-3.10.18-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bb70d489bc79b7519e5803e2cc4c72343c9dc1154258adf2f8925d0b60da7c58"}, + {file = "orjson-3.10.18-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:e9e86a6af31b92299b00736c89caf63816f70a4001e750bda179e15564d7a034"}, + {file = "orjson-3.10.18-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:c382a5c0b5931a5fc5405053d36c1ce3fd561694738626c77ae0b1dfc0242ca1"}, + {file = "orjson-3.10.18-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:8e4b2ae732431127171b875cb2668f883e1234711d3c147ffd69fe5be51a8012"}, + {file = "orjson-3.10.18-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:2d808e34ddb24fc29a4d4041dcfafbae13e129c93509b847b14432717d94b44f"}, + {file = "orjson-3.10.18-cp313-cp313-win32.whl", hash = "sha256:ad8eacbb5d904d5591f27dee4031e2c1db43d559edb8f91778efd642d70e6bea"}, + {file = "orjson-3.10.18-cp313-cp313-win_amd64.whl", hash = "sha256:aed411bcb68bf62e85588f2a7e03a6082cc42e5a2796e06e72a962d7c6310b52"}, + {file = "orjson-3.10.18-cp313-cp313-win_arm64.whl", hash = "sha256:f54c1385a0e6aba2f15a40d703b858bedad36ded0491e55d35d905b2c34a4cc3"}, + {file = "orjson-3.10.18-cp39-cp39-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:c95fae14225edfd699454e84f61c3dd938df6629a00c6ce15e704f57b58433bb"}, + {file = "orjson-3.10.18-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5232d85f177f98e0cefabb48b5e7f60cff6f3f0365f9c60631fecd73849b2a82"}, + {file = "orjson-3.10.18-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2783e121cafedf0d85c148c248a20470018b4ffd34494a68e125e7d5857655d1"}, + {file = "orjson-3.10.18-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e54ee3722caf3db09c91f442441e78f916046aa58d16b93af8a91500b7bbf273"}, + {file = "orjson-3.10.18-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2daf7e5379b61380808c24f6fc182b7719301739e4271c3ec88f2984a2d61f89"}, + {file = "orjson-3.10.18-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7f39b371af3add20b25338f4b29a8d6e79a8c7ed0e9dd49e008228a065d07781"}, + {file = "orjson-3.10.18-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2b819ed34c01d88c6bec290e6842966f8e9ff84b7694632e88341363440d4cc0"}, + {file = "orjson-3.10.18-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:2f6c57debaef0b1aa13092822cbd3698a1fb0209a9ea013a969f4efa36bdea57"}, + {file = "orjson-3.10.18-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:755b6d61ffdb1ffa1e768330190132e21343757c9aa2308c67257cc81a1a6f5a"}, + {file = "orjson-3.10.18-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:ce8d0a875a85b4c8579eab5ac535fb4b2a50937267482be402627ca7e7570ee3"}, + {file = "orjson-3.10.18-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:57b5d0673cbd26781bebc2bf86f99dd19bd5a9cb55f71cc4f66419f6b50f3d77"}, + {file = "orjson-3.10.18-cp39-cp39-win32.whl", hash = "sha256:951775d8b49d1d16ca8818b1f20c4965cae9157e7b562a2ae34d3967b8f21c8e"}, + {file = "orjson-3.10.18-cp39-cp39-win_amd64.whl", hash = "sha256:fdd9d68f83f0bc4406610b1ac68bdcded8c5ee58605cc69e643a06f4d075f429"}, + {file = "orjson-3.10.18.tar.gz", hash = "sha256:e8da3947d92123eda795b68228cafe2724815621fe35e8e320a9e9593a4bcd53"}, +] + +[[package]] +name = "ormsgpack" +version = "1.9.1" +description = "Fast, correct Python msgpack library supporting dataclasses, datetimes, and numpy" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "ormsgpack-1.9.1-cp310-cp310-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:f1f804fd9c0fd84213a6022c34172f82323b34afa7052a4af18797582cf56365"}, + {file = "ormsgpack-1.9.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eab5cec99c46276b37071d570aab98603f3d0309b3818da3247eb64bb95e5cfc"}, + {file = "ormsgpack-1.9.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:1c12c6bb30e6df6fc0213b77f0a5e143f371d618be2e8eb4d555340ce01c6900"}, + {file = "ormsgpack-1.9.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:994d4bbb7ee333264a3e55e30ccee063df6635d785f21a08bf52f67821454a51"}, + {file = "ormsgpack-1.9.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:a668a584cf4bb6e1a6ef5a35f3f0d0fdae80cfb7237344ad19a50cce8c79317b"}, + {file = "ormsgpack-1.9.1-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:aaf77699203822638014c604d100f132583844d4fd01eb639a2266970c02cfdf"}, + {file = "ormsgpack-1.9.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:003d7e1992b447898caf25a820b3037ec68a57864b3e2f34b64693b7d60a9984"}, + {file = "ormsgpack-1.9.1-cp310-cp310-win_amd64.whl", hash = "sha256:67fefc77e4ba9469f79426769eb4c78acf21f22bef3ab1239a72dd728036ffc2"}, + {file = "ormsgpack-1.9.1-cp311-cp311-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:16eaf32c33ab4249e242181d59e2509b8e0330d6f65c1d8bf08c3dea38fd7c02"}, + {file = "ormsgpack-1.9.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c70f2e5b2f9975536e8f7936a9721601dc54febe363d2d82f74c9b31d4fe1c65"}, + {file = "ormsgpack-1.9.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:17c9e18b07d69e3db2e0f8af4731040175e11bdfde78ad8e28126e9e66ec5167"}, + {file = "ormsgpack-1.9.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:73538d749096bb6470328601a2be8f7bdec28849ec6fd19595c232a5848d7124"}, + {file = "ormsgpack-1.9.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:827ff71de228cfd6d07b9d6b47911aa61b1e8dc995dec3caf8fdcdf4f874bcd0"}, + {file = "ormsgpack-1.9.1-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:7307f808b3df282c8e8ed92c6ebceeb3eea3d8eeec808438f3f212226b25e217"}, + {file = "ormsgpack-1.9.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:f30aad7fb083bed1c540a3c163c6a9f63a94e3c538860bf8f13386c29b560ad5"}, + {file = "ormsgpack-1.9.1-cp311-cp311-win_amd64.whl", hash = "sha256:829a1b4c5bc3c38ece0c55cf91ebc09c3b987fceb24d3f680c2bcd03fd3789a4"}, + {file = "ormsgpack-1.9.1-cp312-cp312-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:1ede445fc3fdba219bb0e0d1f289df26a9c7602016b7daac6fafe8fe4e91548f"}, + {file = "ormsgpack-1.9.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:db50b9f918e25b289114312ed775794d0978b469831b992bdc65bfe20b91fe30"}, + {file = "ormsgpack-1.9.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8c7d8fc58e4333308f58ec720b1ee6b12b2b3fe2d2d8f0766ab751cb351e8757"}, + {file = "ormsgpack-1.9.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aeee6d08c040db265cb8563444aba343ecb32cbdbe2414a489dcead9f70c6765"}, + {file = "ormsgpack-1.9.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:2fbb8181c198bdc413a4e889e5200f010724eea4b6d5a9a7eee2df039ac04aca"}, + {file = "ormsgpack-1.9.1-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:16488f094ac0e2250cceea6caf72962614aa432ee11dd57ef45e1ad25ece3eff"}, + {file = "ormsgpack-1.9.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:422d960bfd6ad88be20794f50ec7953d8f7a0f2df60e19d0e8feb994e2ed64ee"}, + {file = "ormsgpack-1.9.1-cp312-cp312-win_amd64.whl", hash = "sha256:e6e2f9eab527cf43fb4a4293e493370276b1c8716cf305689202d646c6a782ef"}, + {file = "ormsgpack-1.9.1-cp313-cp313-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:ac61c18d9dd085e8519b949f7e655f7fb07909fd09c53b4338dd33309012e289"}, + {file = "ormsgpack-1.9.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:134840b8c6615da2c24ce77bd12a46098015c808197a9995c7a2d991e1904eec"}, + {file = "ormsgpack-1.9.1-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:38fd42618f626394b2c7713c5d4bcbc917254e9753d5d4cde460658b51b11a74"}, + {file = "ormsgpack-1.9.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9d36397333ad07b9eba4c2e271fa78951bd81afc059c85a6e9f6c0eb2de07cda"}, + {file = "ormsgpack-1.9.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:603063089597917d04e4c1b1d53988a34f7dc2ff1a03adcfd1cf4ae966d5fba6"}, + {file = "ormsgpack-1.9.1-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:94bbf2b185e0cb721ceaba20e64b7158e6caf0cecd140ca29b9f05a8d5e91e2f"}, + {file = "ormsgpack-1.9.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:c38f380b1e8c96a712eb302b9349347385161a8e29046868ae2bfdfcb23e2692"}, + {file = "ormsgpack-1.9.1-cp313-cp313-win_amd64.whl", hash = "sha256:a4bc63fb30db94075611cedbbc3d261dd17cf2aa8ff75a0fd684cd45ca29cb1b"}, + {file = "ormsgpack-1.9.1-cp39-cp39-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:e95909248bece8e88a310a913838f17ff5a39190aa4e61de909c3cd27f59744b"}, + {file = "ormsgpack-1.9.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a3939188810c5c641d6b207f29994142ae2b1c70534f7839bbd972d857ac2072"}, + {file = "ormsgpack-1.9.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:25b6476344a585aea00a2acc9fd07355bf2daac04062cfdd480fa83ec3e2403b"}, + {file = "ormsgpack-1.9.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a7d8b9d53da82b31662ce5a3834b65479cf794a34befb9fc50baa51518383250"}, + {file = "ormsgpack-1.9.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:3933d4b0c0d404ee234dbc372836d6f2d2f4b6330c2a2fb9709ba4eaebfae7ba"}, + {file = "ormsgpack-1.9.1-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:f824e94a7969f0aee9a6847ec232cf731a03b8734951c2a774dd4762308ea2d2"}, + {file = "ormsgpack-1.9.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:c1f3f2295374020f9650e4aa7af6403ff016a0d92778b4a48bb3901fd801232d"}, + {file = "ormsgpack-1.9.1-cp39-cp39-win_amd64.whl", hash = "sha256:92eb1b4f7b168da47f547329b4b58d16d8f19508a97ce5266567385d42d81968"}, + {file = "ormsgpack-1.9.1.tar.gz", hash = "sha256:3da6e63d82565e590b98178545e64f0f8506137b92bd31a2d04fd7c82baf5794"}, +] + +[[package]] +name = "overrides" +version = "7.7.0" +description = "A decorator to automatically detect mismatch when overriding a method." +optional = false +python-versions = ">=3.6" +groups = ["main"] +files = [ + {file = "overrides-7.7.0-py3-none-any.whl", hash = "sha256:c7ed9d062f78b8e4c1a7b70bd8796b35ead4d9f510227ef9c5dc7626c60d7e49"}, + {file = "overrides-7.7.0.tar.gz", hash = "sha256:55158fa3d93b98cc75299b1e67078ad9003ca27945c76162c1c0766d6f91820a"}, +] + [[package]] name = "packaging" version = "23.2" @@ -1412,8 +3216,8 @@ files = [ [package.dependencies] numpy = [ {version = ">=1.23.2,<2", markers = "python_version == \"3.11\""}, - {version = ">=1.26.0,<2", markers = "python_version >= \"3.12\""}, {version = ">=1.22.4,<2", markers = "python_version < \"3.11\""}, + {version = ">=1.26.0,<2", markers = "python_version >= \"3.12\""}, ] python-dateutil = ">=2.8.2" pytz = ">=2020.1" @@ -1583,6 +3387,31 @@ files = [ dev = ["pre-commit", "tox"] testing = ["pytest", "pytest-benchmark"] +[[package]] +name = "posthog" +version = "4.0.1" +description = "Integrate PostHog into any python application." +optional = false +python-versions = "*" +groups = ["main"] +files = [ + {file = "posthog-4.0.1-py2.py3-none-any.whl", hash = "sha256:0c76cbab3e5ab0096c4f591c0b536465478357270f926d11ff833c97984659d8"}, + {file = "posthog-4.0.1.tar.gz", hash = "sha256:77e7ebfc6086972db421d3e05c91d5431b2b964865d33a9a32e55dd88da4bff8"}, +] + +[package.dependencies] +backoff = ">=1.10.0" +distro = ">=1.5.0" +python-dateutil = ">=2.2" +requests = ">=2.7,<3.0" +six = ">=1.5" + +[package.extras] +dev = ["black", "django-stubs", "flake8", "flake8-print", "isort", "lxml", "mypy", "mypy-baseline", "pre-commit", "pydantic", "types-mock", "types-python-dateutil", "types-requests", "types-setuptools", "types-six"] +langchain = ["langchain (>=0.2.0)"] +sentry = ["django", "sentry-sdk"] +test = ["anthropic", "coverage", "django", "flake8", "freezegun (==1.5.1)", "langchain-anthropic (>=0.2.0)", "langchain-community (>=0.2.0)", "langchain-openai (>=0.2.0)", "langgraph", "mock (>=2.0.0)", "openai", "parameterized (>=0.8.1)", "pydantic", "pylint", "pytest", "pytest-asyncio", "pytest-timeout"] + [[package]] name = "pre-commit" version = "3.6.1" @@ -1658,6 +3487,45 @@ files = [ {file = "protobuf-4.25.2.tar.gz", hash = "sha256:fe599e175cb347efc8ee524bcd4b902d11f7262c0e569ececcb89995c15f0a5e"}, ] +[[package]] +name = "psycopg" +version = "3.2.6" +description = "PostgreSQL database adapter for Python" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "psycopg-3.2.6-py3-none-any.whl", hash = "sha256:f3ff5488525890abb0566c429146add66b329e20d6d4835662b920cbbf90ac58"}, + {file = "psycopg-3.2.6.tar.gz", hash = "sha256:16fa094efa2698f260f2af74f3710f781e4a6f226efe9d1fd0c37f384639ed8a"}, +] + +[package.dependencies] +typing-extensions = {version = ">=4.6", markers = "python_version < \"3.13\""} +tzdata = {version = "*", markers = "sys_platform == \"win32\""} + +[package.extras] +binary = ["psycopg-binary (==3.2.6) ; implementation_name != \"pypy\""] +c = ["psycopg-c (==3.2.6) ; implementation_name != \"pypy\""] +dev = ["ast-comments (>=1.1.2)", "black (>=24.1.0)", "codespell (>=2.2)", "dnspython (>=2.1)", "flake8 (>=4.0)", "isort-psycopg", "isort[colors] (>=6.0)", "mypy (>=1.14)", "pre-commit (>=4.0.1)", "types-setuptools (>=57.4)", "wheel (>=0.37)"] +docs = ["Sphinx (>=5.0)", "furo (==2022.6.21)", "sphinx-autobuild (>=2021.3.14)", "sphinx-autodoc-typehints (>=1.12)"] +pool = ["psycopg-pool"] +test = ["anyio (>=4.0)", "mypy (>=1.14)", "pproxy (>=2.7)", "pytest (>=6.2.5)", "pytest-cov (>=3.0)", "pytest-randomly (>=3.5)"] + +[[package]] +name = "psycopg-pool" +version = "3.2.6" +description = "Connection Pool for Psycopg" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "psycopg_pool-3.2.6-py3-none-any.whl", hash = "sha256:5887318a9f6af906d041a0b1dc1c60f8f0dda8340c2572b74e10907b51ed5da7"}, + {file = "psycopg_pool-3.2.6.tar.gz", hash = "sha256:0f92a7817719517212fbfe2fd58b8c35c1850cdd2a80d36b581ba2085d9148e5"}, +] + +[package.dependencies] +typing-extensions = ">=4.6" + [[package]] name = "psycopg2-binary" version = "2.9.9" @@ -1816,113 +3684,148 @@ files = [ [package.dependencies] pyasn1 = ">=0.4.6,<0.6.0" +[[package]] +name = "pycparser" +version = "2.22" +description = "C parser in Python" +optional = false +python-versions = ">=3.8" +groups = ["main"] +markers = "platform_python_implementation == \"PyPy\"" +files = [ + {file = "pycparser-2.22-py3-none-any.whl", hash = "sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc"}, + {file = "pycparser-2.22.tar.gz", hash = "sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6"}, +] + [[package]] name = "pydantic" -version = "2.6.1" +version = "2.11.4" description = "Data validation using Python type hints" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" groups = ["main", "dev"] files = [ - {file = "pydantic-2.6.1-py3-none-any.whl", hash = "sha256:0b6a909df3192245cb736509a92ff69e4fef76116feffec68e93a567347bae6f"}, - {file = "pydantic-2.6.1.tar.gz", hash = "sha256:4fd5c182a2488dc63e6d32737ff19937888001e2a6d86e94b3f233104a5d1fa9"}, + {file = "pydantic-2.11.4-py3-none-any.whl", hash = "sha256:d9615eaa9ac5a063471da949c8fc16376a84afb5024688b3ff885693506764eb"}, + {file = "pydantic-2.11.4.tar.gz", hash = "sha256:32738d19d63a226a52eed76645a98ee07c1f410ee41d93b4afbfa85ed8111c2d"}, ] [package.dependencies] -annotated-types = ">=0.4.0" -pydantic-core = "2.16.2" -typing-extensions = ">=4.6.1" +annotated-types = ">=0.6.0" +pydantic-core = "2.33.2" +typing-extensions = ">=4.12.2" +typing-inspection = ">=0.4.0" [package.extras] email = ["email-validator (>=2.0.0)"] +timezone = ["tzdata ; python_version >= \"3.9\" and platform_system == \"Windows\""] [[package]] name = "pydantic-core" -version = "2.16.2" -description = "" +version = "2.33.2" +description = "Core functionality for Pydantic validation and serialization" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" groups = ["main", "dev"] files = [ - {file = "pydantic_core-2.16.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:3fab4e75b8c525a4776e7630b9ee48aea50107fea6ca9f593c98da3f4d11bf7c"}, - {file = "pydantic_core-2.16.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8bde5b48c65b8e807409e6f20baee5d2cd880e0fad00b1a811ebc43e39a00ab2"}, - {file = "pydantic_core-2.16.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2924b89b16420712e9bb8192396026a8fbd6d8726224f918353ac19c4c043d2a"}, - {file = "pydantic_core-2.16.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:16aa02e7a0f539098e215fc193c8926c897175d64c7926d00a36188917717a05"}, - {file = "pydantic_core-2.16.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:936a787f83db1f2115ee829dd615c4f684ee48ac4de5779ab4300994d8af325b"}, - {file = "pydantic_core-2.16.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:459d6be6134ce3b38e0ef76f8a672924460c455d45f1ad8fdade36796df1ddc8"}, - {file = "pydantic_core-2.16.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f9ee4febb249c591d07b2d4dd36ebcad0ccd128962aaa1801508320896575ef"}, - {file = "pydantic_core-2.16.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:40a0bd0bed96dae5712dab2aba7d334a6c67cbcac2ddfca7dbcc4a8176445990"}, - {file = "pydantic_core-2.16.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:870dbfa94de9b8866b37b867a2cb37a60c401d9deb4a9ea392abf11a1f98037b"}, - {file = "pydantic_core-2.16.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:308974fdf98046db28440eb3377abba274808bf66262e042c412eb2adf852731"}, - {file = "pydantic_core-2.16.2-cp310-none-win32.whl", hash = "sha256:a477932664d9611d7a0816cc3c0eb1f8856f8a42435488280dfbf4395e141485"}, - {file = "pydantic_core-2.16.2-cp310-none-win_amd64.whl", hash = "sha256:8f9142a6ed83d90c94a3efd7af8873bf7cefed2d3d44387bf848888482e2d25f"}, - {file = "pydantic_core-2.16.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:406fac1d09edc613020ce9cf3f2ccf1a1b2f57ab00552b4c18e3d5276c67eb11"}, - {file = "pydantic_core-2.16.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ce232a6170dd6532096cadbf6185271e4e8c70fc9217ebe105923ac105da9978"}, - {file = "pydantic_core-2.16.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a90fec23b4b05a09ad988e7a4f4e081711a90eb2a55b9c984d8b74597599180f"}, - {file = "pydantic_core-2.16.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8aafeedb6597a163a9c9727d8a8bd363a93277701b7bfd2749fbefee2396469e"}, - {file = "pydantic_core-2.16.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9957433c3a1b67bdd4c63717eaf174ebb749510d5ea612cd4e83f2d9142f3fc8"}, - {file = "pydantic_core-2.16.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b0d7a9165167269758145756db43a133608a531b1e5bb6a626b9ee24bc38a8f7"}, - {file = "pydantic_core-2.16.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dffaf740fe2e147fedcb6b561353a16243e654f7fe8e701b1b9db148242e1272"}, - {file = "pydantic_core-2.16.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f8ed79883b4328b7f0bd142733d99c8e6b22703e908ec63d930b06be3a0e7113"}, - {file = "pydantic_core-2.16.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:cf903310a34e14651c9de056fcc12ce090560864d5a2bb0174b971685684e1d8"}, - {file = "pydantic_core-2.16.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:46b0d5520dbcafea9a8645a8164658777686c5c524d381d983317d29687cce97"}, - {file = "pydantic_core-2.16.2-cp311-none-win32.whl", hash = "sha256:70651ff6e663428cea902dac297066d5c6e5423fda345a4ca62430575364d62b"}, - {file = "pydantic_core-2.16.2-cp311-none-win_amd64.whl", hash = "sha256:98dc6f4f2095fc7ad277782a7c2c88296badcad92316b5a6e530930b1d475ebc"}, - {file = "pydantic_core-2.16.2-cp311-none-win_arm64.whl", hash = "sha256:ef6113cd31411eaf9b39fc5a8848e71c72656fd418882488598758b2c8c6dfa0"}, - {file = "pydantic_core-2.16.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:88646cae28eb1dd5cd1e09605680c2b043b64d7481cdad7f5003ebef401a3039"}, - {file = "pydantic_core-2.16.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:7b883af50eaa6bb3299780651e5be921e88050ccf00e3e583b1e92020333304b"}, - {file = "pydantic_core-2.16.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7bf26c2e2ea59d32807081ad51968133af3025c4ba5753e6a794683d2c91bf6e"}, - {file = "pydantic_core-2.16.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:99af961d72ac731aae2a1b55ccbdae0733d816f8bfb97b41909e143de735f522"}, - {file = "pydantic_core-2.16.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:02906e7306cb8c5901a1feb61f9ab5e5c690dbbeaa04d84c1b9ae2a01ebe9379"}, - {file = "pydantic_core-2.16.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d5362d099c244a2d2f9659fb3c9db7c735f0004765bbe06b99be69fbd87c3f15"}, - {file = "pydantic_core-2.16.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ac426704840877a285d03a445e162eb258924f014e2f074e209d9b4ff7bf380"}, - {file = "pydantic_core-2.16.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b94cbda27267423411c928208e89adddf2ea5dd5f74b9528513f0358bba019cb"}, - {file = "pydantic_core-2.16.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:6db58c22ac6c81aeac33912fb1af0e930bc9774166cdd56eade913d5f2fff35e"}, - {file = "pydantic_core-2.16.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:396fdf88b1b503c9c59c84a08b6833ec0c3b5ad1a83230252a9e17b7dfb4cffc"}, - {file = "pydantic_core-2.16.2-cp312-none-win32.whl", hash = "sha256:7c31669e0c8cc68400ef0c730c3a1e11317ba76b892deeefaf52dcb41d56ed5d"}, - {file = "pydantic_core-2.16.2-cp312-none-win_amd64.whl", hash = "sha256:a3b7352b48fbc8b446b75f3069124e87f599d25afb8baa96a550256c031bb890"}, - {file = "pydantic_core-2.16.2-cp312-none-win_arm64.whl", hash = "sha256:a9e523474998fb33f7c1a4d55f5504c908d57add624599e095c20fa575b8d943"}, - {file = "pydantic_core-2.16.2-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:ae34418b6b389d601b31153b84dce480351a352e0bb763684a1b993d6be30f17"}, - {file = "pydantic_core-2.16.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:732bd062c9e5d9582a30e8751461c1917dd1ccbdd6cafb032f02c86b20d2e7ec"}, - {file = "pydantic_core-2.16.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e4b52776a2e3230f4854907a1e0946eec04d41b1fc64069ee774876bbe0eab55"}, - {file = "pydantic_core-2.16.2-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ef551c053692b1e39e3f7950ce2296536728871110e7d75c4e7753fb30ca87f4"}, - {file = "pydantic_core-2.16.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ebb892ed8599b23fa8f1799e13a12c87a97a6c9d0f497525ce9858564c4575a4"}, - {file = "pydantic_core-2.16.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:aa6c8c582036275997a733427b88031a32ffa5dfc3124dc25a730658c47a572f"}, - {file = "pydantic_core-2.16.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4ba0884a91f1aecce75202473ab138724aa4fb26d7707f2e1fa6c3e68c84fbf"}, - {file = "pydantic_core-2.16.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:7924e54f7ce5d253d6160090ddc6df25ed2feea25bfb3339b424a9dd591688bc"}, - {file = "pydantic_core-2.16.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:69a7b96b59322a81c2203be537957313b07dd333105b73db0b69212c7d867b4b"}, - {file = "pydantic_core-2.16.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:7e6231aa5bdacda78e96ad7b07d0c312f34ba35d717115f4b4bff6cb87224f0f"}, - {file = "pydantic_core-2.16.2-cp38-none-win32.whl", hash = "sha256:41dac3b9fce187a25c6253ec79a3f9e2a7e761eb08690e90415069ea4a68ff7a"}, - {file = "pydantic_core-2.16.2-cp38-none-win_amd64.whl", hash = "sha256:f685dbc1fdadb1dcd5b5e51e0a378d4685a891b2ddaf8e2bba89bd3a7144e44a"}, - {file = "pydantic_core-2.16.2-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:55749f745ebf154c0d63d46c8c58594d8894b161928aa41adbb0709c1fe78b77"}, - {file = "pydantic_core-2.16.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b30b0dd58a4509c3bd7eefddf6338565c4905406aee0c6e4a5293841411a1286"}, - {file = "pydantic_core-2.16.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18de31781cdc7e7b28678df7c2d7882f9692ad060bc6ee3c94eb15a5d733f8f7"}, - {file = "pydantic_core-2.16.2-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5864b0242f74b9dd0b78fd39db1768bc3f00d1ffc14e596fd3e3f2ce43436a33"}, - {file = "pydantic_core-2.16.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b8f9186ca45aee030dc8234118b9c0784ad91a0bb27fc4e7d9d6608a5e3d386c"}, - {file = "pydantic_core-2.16.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cc6f6c9be0ab6da37bc77c2dda5f14b1d532d5dbef00311ee6e13357a418e646"}, - {file = "pydantic_core-2.16.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa057095f621dad24a1e906747179a69780ef45cc8f69e97463692adbcdae878"}, - {file = "pydantic_core-2.16.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6ad84731a26bcfb299f9eab56c7932d46f9cad51c52768cace09e92a19e4cf55"}, - {file = "pydantic_core-2.16.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:3b052c753c4babf2d1edc034c97851f867c87d6f3ea63a12e2700f159f5c41c3"}, - {file = "pydantic_core-2.16.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:e0f686549e32ccdb02ae6f25eee40cc33900910085de6aa3790effd391ae10c2"}, - {file = "pydantic_core-2.16.2-cp39-none-win32.whl", hash = "sha256:7afb844041e707ac9ad9acad2188a90bffce2c770e6dc2318be0c9916aef1469"}, - {file = "pydantic_core-2.16.2-cp39-none-win_amd64.whl", hash = "sha256:9da90d393a8227d717c19f5397688a38635afec89f2e2d7af0df037f3249c39a"}, - {file = "pydantic_core-2.16.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5f60f920691a620b03082692c378661947d09415743e437a7478c309eb0e4f82"}, - {file = "pydantic_core-2.16.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:47924039e785a04d4a4fa49455e51b4eb3422d6eaacfde9fc9abf8fdef164e8a"}, - {file = "pydantic_core-2.16.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e6294e76b0380bb7a61eb8a39273c40b20beb35e8c87ee101062834ced19c545"}, - {file = "pydantic_core-2.16.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fe56851c3f1d6f5384b3051c536cc81b3a93a73faf931f404fef95217cf1e10d"}, - {file = "pydantic_core-2.16.2-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9d776d30cde7e541b8180103c3f294ef7c1862fd45d81738d156d00551005784"}, - {file = "pydantic_core-2.16.2-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:72f7919af5de5ecfaf1eba47bf9a5d8aa089a3340277276e5636d16ee97614d7"}, - {file = "pydantic_core-2.16.2-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:4bfcbde6e06c56b30668a0c872d75a7ef3025dc3c1823a13cf29a0e9b33f67e8"}, - {file = "pydantic_core-2.16.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:ff7c97eb7a29aba230389a2661edf2e9e06ce616c7e35aa764879b6894a44b25"}, - {file = "pydantic_core-2.16.2-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:9b5f13857da99325dcabe1cc4e9e6a3d7b2e2c726248ba5dd4be3e8e4a0b6d0e"}, - {file = "pydantic_core-2.16.2-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:a7e41e3ada4cca5f22b478c08e973c930e5e6c7ba3588fb8e35f2398cdcc1545"}, - {file = "pydantic_core-2.16.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:60eb8ceaa40a41540b9acae6ae7c1f0a67d233c40dc4359c256ad2ad85bdf5e5"}, - {file = "pydantic_core-2.16.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7beec26729d496a12fd23cf8da9944ee338c8b8a17035a560b585c36fe81af20"}, - {file = "pydantic_core-2.16.2-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:22c5f022799f3cd6741e24f0443ead92ef42be93ffda0d29b2597208c94c3753"}, - {file = "pydantic_core-2.16.2-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:eca58e319f4fd6df004762419612122b2c7e7d95ffafc37e890252f869f3fb2a"}, - {file = "pydantic_core-2.16.2-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:ed957db4c33bc99895f3a1672eca7e80e8cda8bd1e29a80536b4ec2153fa9804"}, - {file = "pydantic_core-2.16.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:459c0d338cc55d099798618f714b21b7ece17eb1a87879f2da20a3ff4c7628e2"}, - {file = "pydantic_core-2.16.2.tar.gz", hash = "sha256:0ba503850d8b8dcc18391f10de896ae51d37fe5fe43dbfb6a35c5c5cad271a06"}, + {file = "pydantic_core-2.33.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:2b3d326aaef0c0399d9afffeb6367d5e26ddc24d351dbc9c636840ac355dc5d8"}, + {file = "pydantic_core-2.33.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0e5b2671f05ba48b94cb90ce55d8bdcaaedb8ba00cc5359f6810fc918713983d"}, + {file = "pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0069c9acc3f3981b9ff4cdfaf088e98d83440a4c7ea1bc07460af3d4dc22e72d"}, + {file = "pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d53b22f2032c42eaaf025f7c40c2e3b94568ae077a606f006d206a463bc69572"}, + {file = "pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0405262705a123b7ce9f0b92f123334d67b70fd1f20a9372b907ce1080c7ba02"}, + {file = "pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4b25d91e288e2c4e0662b8038a28c6a07eaac3e196cfc4ff69de4ea3db992a1b"}, + {file = "pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6bdfe4b3789761f3bcb4b1ddf33355a71079858958e3a552f16d5af19768fef2"}, + {file = "pydantic_core-2.33.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:efec8db3266b76ef9607c2c4c419bdb06bf335ae433b80816089ea7585816f6a"}, + {file = "pydantic_core-2.33.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:031c57d67ca86902726e0fae2214ce6770bbe2f710dc33063187a68744a5ecac"}, + {file = "pydantic_core-2.33.2-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:f8de619080e944347f5f20de29a975c2d815d9ddd8be9b9b7268e2e3ef68605a"}, + {file = "pydantic_core-2.33.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:73662edf539e72a9440129f231ed3757faab89630d291b784ca99237fb94db2b"}, + {file = "pydantic_core-2.33.2-cp310-cp310-win32.whl", hash = "sha256:0a39979dcbb70998b0e505fb1556a1d550a0781463ce84ebf915ba293ccb7e22"}, + {file = "pydantic_core-2.33.2-cp310-cp310-win_amd64.whl", hash = "sha256:b0379a2b24882fef529ec3b4987cb5d003b9cda32256024e6fe1586ac45fc640"}, + {file = "pydantic_core-2.33.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:4c5b0a576fb381edd6d27f0a85915c6daf2f8138dc5c267a57c08a62900758c7"}, + {file = "pydantic_core-2.33.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e799c050df38a639db758c617ec771fd8fb7a5f8eaaa4b27b101f266b216a246"}, + {file = "pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dc46a01bf8d62f227d5ecee74178ffc448ff4e5197c756331f71efcc66dc980f"}, + {file = "pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a144d4f717285c6d9234a66778059f33a89096dfb9b39117663fd8413d582dcc"}, + {file = "pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:73cf6373c21bc80b2e0dc88444f41ae60b2f070ed02095754eb5a01df12256de"}, + {file = "pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3dc625f4aa79713512d1976fe9f0bc99f706a9dee21dfd1810b4bbbf228d0e8a"}, + {file = "pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:881b21b5549499972441da4758d662aeea93f1923f953e9cbaff14b8b9565aef"}, + {file = "pydantic_core-2.33.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:bdc25f3681f7b78572699569514036afe3c243bc3059d3942624e936ec93450e"}, + {file = "pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:fe5b32187cbc0c862ee201ad66c30cf218e5ed468ec8dc1cf49dec66e160cc4d"}, + {file = "pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:bc7aee6f634a6f4a95676fcb5d6559a2c2a390330098dba5e5a5f28a2e4ada30"}, + {file = "pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:235f45e5dbcccf6bd99f9f472858849f73d11120d76ea8707115415f8e5ebebf"}, + {file = "pydantic_core-2.33.2-cp311-cp311-win32.whl", hash = "sha256:6368900c2d3ef09b69cb0b913f9f8263b03786e5b2a387706c5afb66800efd51"}, + {file = "pydantic_core-2.33.2-cp311-cp311-win_amd64.whl", hash = "sha256:1e063337ef9e9820c77acc768546325ebe04ee38b08703244c1309cccc4f1bab"}, + {file = "pydantic_core-2.33.2-cp311-cp311-win_arm64.whl", hash = "sha256:6b99022f1d19bc32a4c2a0d544fc9a76e3be90f0b3f4af413f87d38749300e65"}, + {file = "pydantic_core-2.33.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:a7ec89dc587667f22b6a0b6579c249fca9026ce7c333fc142ba42411fa243cdc"}, + {file = "pydantic_core-2.33.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3c6db6e52c6d70aa0d00d45cdb9b40f0433b96380071ea80b09277dba021ddf7"}, + {file = "pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e61206137cbc65e6d5256e1166f88331d3b6238e082d9f74613b9b765fb9025"}, + {file = "pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:eb8c529b2819c37140eb51b914153063d27ed88e3bdc31b71198a198e921e011"}, + {file = "pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c52b02ad8b4e2cf14ca7b3d918f3eb0ee91e63b3167c32591e57c4317e134f8f"}, + {file = "pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:96081f1605125ba0855dfda83f6f3df5ec90c61195421ba72223de35ccfb2f88"}, + {file = "pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f57a69461af2a5fa6e6bbd7a5f60d3b7e6cebb687f55106933188e79ad155c1"}, + {file = "pydantic_core-2.33.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:572c7e6c8bb4774d2ac88929e3d1f12bc45714ae5ee6d9a788a9fb35e60bb04b"}, + {file = "pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:db4b41f9bd95fbe5acd76d89920336ba96f03e149097365afe1cb092fceb89a1"}, + {file = "pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:fa854f5cf7e33842a892e5c73f45327760bc7bc516339fda888c75ae60edaeb6"}, + {file = "pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:5f483cfb75ff703095c59e365360cb73e00185e01aaea067cd19acffd2ab20ea"}, + {file = "pydantic_core-2.33.2-cp312-cp312-win32.whl", hash = "sha256:9cb1da0f5a471435a7bc7e439b8a728e8b61e59784b2af70d7c169f8dd8ae290"}, + {file = "pydantic_core-2.33.2-cp312-cp312-win_amd64.whl", hash = "sha256:f941635f2a3d96b2973e867144fde513665c87f13fe0e193c158ac51bfaaa7b2"}, + {file = "pydantic_core-2.33.2-cp312-cp312-win_arm64.whl", hash = "sha256:cca3868ddfaccfbc4bfb1d608e2ccaaebe0ae628e1416aeb9c4d88c001bb45ab"}, + {file = "pydantic_core-2.33.2-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:1082dd3e2d7109ad8b7da48e1d4710c8d06c253cbc4a27c1cff4fbcaa97a9e3f"}, + {file = "pydantic_core-2.33.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f517ca031dfc037a9c07e748cefd8d96235088b83b4f4ba8939105d20fa1dcd6"}, + {file = "pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0a9f2c9dd19656823cb8250b0724ee9c60a82f3cdf68a080979d13092a3b0fef"}, + {file = "pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2b0a451c263b01acebe51895bfb0e1cc842a5c666efe06cdf13846c7418caa9a"}, + {file = "pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ea40a64d23faa25e62a70ad163571c0b342b8bf66d5fa612ac0dec4f069d916"}, + {file = "pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0fb2d542b4d66f9470e8065c5469ec676978d625a8b7a363f07d9a501a9cb36a"}, + {file = "pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9fdac5d6ffa1b5a83bca06ffe7583f5576555e6c8b3a91fbd25ea7780f825f7d"}, + {file = "pydantic_core-2.33.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:04a1a413977ab517154eebb2d326da71638271477d6ad87a769102f7c2488c56"}, + {file = "pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:c8e7af2f4e0194c22b5b37205bfb293d166a7344a5b0d0eaccebc376546d77d5"}, + {file = "pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:5c92edd15cd58b3c2d34873597a1e20f13094f59cf88068adb18947df5455b4e"}, + {file = "pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:65132b7b4a1c0beded5e057324b7e16e10910c106d43675d9bd87d4f38dde162"}, + {file = "pydantic_core-2.33.2-cp313-cp313-win32.whl", hash = "sha256:52fb90784e0a242bb96ec53f42196a17278855b0f31ac7c3cc6f5c1ec4811849"}, + {file = "pydantic_core-2.33.2-cp313-cp313-win_amd64.whl", hash = "sha256:c083a3bdd5a93dfe480f1125926afcdbf2917ae714bdb80b36d34318b2bec5d9"}, + {file = "pydantic_core-2.33.2-cp313-cp313-win_arm64.whl", hash = "sha256:e80b087132752f6b3d714f041ccf74403799d3b23a72722ea2e6ba2e892555b9"}, + {file = "pydantic_core-2.33.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:61c18fba8e5e9db3ab908620af374db0ac1baa69f0f32df4f61ae23f15e586ac"}, + {file = "pydantic_core-2.33.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95237e53bb015f67b63c91af7518a62a8660376a6a0db19b89acc77a4d6199f5"}, + {file = "pydantic_core-2.33.2-cp313-cp313t-win_amd64.whl", hash = "sha256:c2fc0a768ef76c15ab9238afa6da7f69895bb5d1ee83aeea2e3509af4472d0b9"}, + {file = "pydantic_core-2.33.2-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:a2b911a5b90e0374d03813674bf0a5fbbb7741570dcd4b4e85a2e48d17def29d"}, + {file = "pydantic_core-2.33.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:6fa6dfc3e4d1f734a34710f391ae822e0a8eb8559a85c6979e14e65ee6ba2954"}, + {file = "pydantic_core-2.33.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c54c939ee22dc8e2d545da79fc5381f1c020d6d3141d3bd747eab59164dc89fb"}, + {file = "pydantic_core-2.33.2-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:53a57d2ed685940a504248187d5685e49eb5eef0f696853647bf37c418c538f7"}, + {file = "pydantic_core-2.33.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:09fb9dd6571aacd023fe6aaca316bd01cf60ab27240d7eb39ebd66a3a15293b4"}, + {file = "pydantic_core-2.33.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0e6116757f7959a712db11f3e9c0a99ade00a5bbedae83cb801985aa154f071b"}, + {file = "pydantic_core-2.33.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d55ab81c57b8ff8548c3e4947f119551253f4e3787a7bbc0b6b3ca47498a9d3"}, + {file = "pydantic_core-2.33.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c20c462aa4434b33a2661701b861604913f912254e441ab8d78d30485736115a"}, + {file = "pydantic_core-2.33.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:44857c3227d3fb5e753d5fe4a3420d6376fa594b07b621e220cd93703fe21782"}, + {file = "pydantic_core-2.33.2-cp39-cp39-musllinux_1_1_armv7l.whl", hash = "sha256:eb9b459ca4df0e5c87deb59d37377461a538852765293f9e6ee834f0435a93b9"}, + {file = "pydantic_core-2.33.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:9fcd347d2cc5c23b06de6d3b7b8275be558a0c90549495c699e379a80bf8379e"}, + {file = "pydantic_core-2.33.2-cp39-cp39-win32.whl", hash = "sha256:83aa99b1285bc8f038941ddf598501a86f1536789740991d7d8756e34f1e74d9"}, + {file = "pydantic_core-2.33.2-cp39-cp39-win_amd64.whl", hash = "sha256:f481959862f57f29601ccced557cc2e817bce7533ab8e01a797a48b49c9692b3"}, + {file = "pydantic_core-2.33.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5c4aa4e82353f65e548c476b37e64189783aa5384903bfea4f41580f255fddfa"}, + {file = "pydantic_core-2.33.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:d946c8bf0d5c24bf4fe333af284c59a19358aa3ec18cb3dc4370080da1e8ad29"}, + {file = "pydantic_core-2.33.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:87b31b6846e361ef83fedb187bb5b4372d0da3f7e28d85415efa92d6125d6e6d"}, + {file = "pydantic_core-2.33.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa9d91b338f2df0508606f7009fde642391425189bba6d8c653afd80fd6bb64e"}, + {file = "pydantic_core-2.33.2-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2058a32994f1fde4ca0480ab9d1e75a0e8c87c22b53a3ae66554f9af78f2fe8c"}, + {file = "pydantic_core-2.33.2-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:0e03262ab796d986f978f79c943fc5f620381be7287148b8010b4097f79a39ec"}, + {file = "pydantic_core-2.33.2-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:1a8695a8d00c73e50bff9dfda4d540b7dee29ff9b8053e38380426a85ef10052"}, + {file = "pydantic_core-2.33.2-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:fa754d1850735a0b0e03bcffd9d4b4343eb417e47196e4485d9cca326073a42c"}, + {file = "pydantic_core-2.33.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:a11c8d26a50bfab49002947d3d237abe4d9e4b5bdc8846a63537b6488e197808"}, + {file = "pydantic_core-2.33.2-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:dd14041875d09cc0f9308e37a6f8b65f5585cf2598a53aa0123df8b129d481f8"}, + {file = "pydantic_core-2.33.2-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:d87c561733f66531dced0da6e864f44ebf89a8fba55f31407b00c2f7f9449593"}, + {file = "pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2f82865531efd18d6e07a04a17331af02cb7a651583c418df8266f17a63c6612"}, + {file = "pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bfb5112df54209d820d7bf9317c7a6c9025ea52e49f46b6a2060104bba37de7"}, + {file = "pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:64632ff9d614e5eecfb495796ad51b0ed98c453e447a76bcbeeb69615079fc7e"}, + {file = "pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:f889f7a40498cc077332c7ab6b4608d296d852182211787d4f3ee377aaae66e8"}, + {file = "pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:de4b83bb311557e439b9e186f733f6c645b9417c84e2eb8203f3f820a4b988bf"}, + {file = "pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:82f68293f055f51b51ea42fafc74b6aad03e70e191799430b90c13d643059ebb"}, + {file = "pydantic_core-2.33.2-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:329467cecfb529c925cf2bbd4d60d2c509bc2fb52a20c1045bf09bb70971a9c1"}, + {file = "pydantic_core-2.33.2-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:87acbfcf8e90ca885206e98359d7dca4bcbb35abdc0ff66672a293e1d7a19101"}, + {file = "pydantic_core-2.33.2-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:7f92c15cd1e97d4b12acd1cc9004fa092578acfa57b67ad5e43a197175d01a64"}, + {file = "pydantic_core-2.33.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d3f26877a748dc4251cfcfda9dfb5f13fcb034f5308388066bcfe9031b63ae7d"}, + {file = "pydantic_core-2.33.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dac89aea9af8cd672fa7b510e7b8c33b0bba9a43186680550ccf23020f32d535"}, + {file = "pydantic_core-2.33.2-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:970919794d126ba8645f3837ab6046fb4e72bbc057b3709144066204c19a455d"}, + {file = "pydantic_core-2.33.2-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:3eb3fe62804e8f859c49ed20a8451342de53ed764150cb14ca71357c765dc2a6"}, + {file = "pydantic_core-2.33.2-pp39-pypy39_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:3abcd9392a36025e3bd55f9bd38d908bd17962cc49bc6da8e7e96285336e2bca"}, + {file = "pydantic_core-2.33.2-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:3a1c81334778f9e3af2f8aeb7a960736e5cab1dfebfb26aabca09afd2906c039"}, + {file = "pydantic_core-2.33.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:2807668ba86cb38c6817ad9bc66215ab8584d1d304030ce4f0887336f28a5e27"}, + {file = "pydantic_core-2.33.2.tar.gz", hash = "sha256:7cb8bc3605c29176e1b105350d2e6474142d7c1bd1d9327c4a9bdb46bf827acc"}, ] [package.dependencies] @@ -1936,14 +3839,29 @@ optional = false python-versions = "*" groups = ["main"] files = [ - {file = "pydata-google-auth-1.8.2.tar.gz", hash = "sha256:547b6c0fbea657dcecd50887c5db8640ebec062a59a2b88e8ff8e53a04818303"}, - {file = "pydata_google_auth-1.8.2-py2.py3-none-any.whl", hash = "sha256:a9dce59af4a170ea60c4b2ebbc83ee1f74d34255a4f97b2469ae9a4a0dc98e99"}, + {file = "pydata-google-auth-1.8.2.tar.gz", hash = "sha256:547b6c0fbea657dcecd50887c5db8640ebec062a59a2b88e8ff8e53a04818303"}, + {file = "pydata_google_auth-1.8.2-py2.py3-none-any.whl", hash = "sha256:a9dce59af4a170ea60c4b2ebbc83ee1f74d34255a4f97b2469ae9a4a0dc98e99"}, +] + +[package.dependencies] +google-auth = {version = ">=1.25.0,<3.0dev", markers = "python_version >= \"3.6\""} +google-auth-oauthlib = {version = ">=0.4.0", markers = "python_version >= \"3.6\""} +setuptools = "*" + +[[package]] +name = "pygments" +version = "2.19.1" +description = "Pygments is a syntax highlighting package written in Python." +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "pygments-2.19.1-py3-none-any.whl", hash = "sha256:9ea1544ad55cecf4b8242fab6dd35a93bbce657034b0611ee383099054ab6d8c"}, + {file = "pygments-2.19.1.tar.gz", hash = "sha256:61c16d2a8576dc0649d9f39e089b5f02bcd27fba10d8fb4dcc28173f7a45151f"}, ] -[package.dependencies] -google-auth = {version = ">=1.25.0,<3.0dev", markers = "python_version >= \"3.6\""} -google-auth-oauthlib = {version = ">=0.4.0", markers = "python_version >= \"3.6\""} -setuptools = "*" +[package.extras] +windows-terminal = ["colorama (>=0.4.6)"] [[package]] name = "pyjwt" @@ -1978,6 +3896,45 @@ files = [ [package.extras] diagrams = ["jinja2", "railroad-diagrams"] +[[package]] +name = "pypika" +version = "0.48.9" +description = "A SQL query builder API for Python" +optional = false +python-versions = "*" +groups = ["main"] +files = [ + {file = "PyPika-0.48.9.tar.gz", hash = "sha256:838836a61747e7c8380cd1b7ff638694b7a7335345d0f559b04b2cd832ad5378"}, +] + +[[package]] +name = "pyproject-hooks" +version = "1.2.0" +description = "Wrappers to call pyproject.toml-based build backend hooks." +optional = false +python-versions = ">=3.7" +groups = ["main"] +files = [ + {file = "pyproject_hooks-1.2.0-py3-none-any.whl", hash = "sha256:9e5c6bfa8dcc30091c74b0cf803c81fdd29d94f01992a7707bc97babb1141913"}, + {file = "pyproject_hooks-1.2.0.tar.gz", hash = "sha256:1e859bd5c40fae9448642dd871adf459e5e2084186e8d2c2a79a824c970da1f8"}, +] + +[[package]] +name = "pyreadline3" +version = "3.5.4" +description = "A python implementation of GNU readline." +optional = false +python-versions = ">=3.8" +groups = ["main"] +markers = "sys_platform == \"win32\"" +files = [ + {file = "pyreadline3-3.5.4-py3-none-any.whl", hash = "sha256:eaf8e6cc3c49bcccf145fc6067ba8643d1df34d604a1ec0eccbf7a18e6d3fae6"}, + {file = "pyreadline3-3.5.4.tar.gz", hash = "sha256:8d57d53039a1c75adba8e50dd3d992b28143480816187ea5efbd5c78e6c885b7"}, +] + +[package.extras] +dev = ["build", "flake8", "mypy", "pytest", "twine"] + [[package]] name = "pytest" version = "7.4.4" @@ -2054,6 +4011,21 @@ files = [ [package.dependencies] six = ">=1.5" +[[package]] +name = "python-dotenv" +version = "1.1.0" +description = "Read key-value pairs from a .env file and set them as environment variables" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "python_dotenv-1.1.0-py3-none-any.whl", hash = "sha256:d7c01d9e2293916c18baf562d95698754b0dbbb5e74d457c45d4f6561fb9d55d"}, + {file = "python_dotenv-1.1.0.tar.gz", hash = "sha256:41f90bc6f5f177fb41f53e87666db362025010eb28f60a01c9143bfa33a2b2d5"}, +] + +[package.extras] +cli = ["click (>=5.0)"] + [[package]] name = "pytz" version = "2024.1" @@ -2072,7 +4044,7 @@ version = "6.0.1" description = "YAML parser and emitter for Python" optional = false python-versions = ">=3.6" -groups = ["dev"] +groups = ["main", "dev"] files = [ {file = "PyYAML-6.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d858aa552c999bc8a8d57426ed01e40bef403cd8ccdd0fc5f6f04a00414cac2a"}, {file = "PyYAML-6.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fd66fc5d0da6d9815ba2cebeb4205f95818ff4b79c3ebe268e75d961704af52f"}, @@ -2146,6 +4118,110 @@ async-timeout = {version = ">=4.0.2", markers = "python_full_version <= \"3.11.2 hiredis = ["hiredis (>=1.0.0)"] ocsp = ["cryptography (>=36.0.1)", "pyopenssl (==20.0.1)", "requests (>=2.26.0)"] +[[package]] +name = "regex" +version = "2024.11.6" +description = "Alternative regular expression module, to replace re." +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "regex-2024.11.6-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:ff590880083d60acc0433f9c3f713c51f7ac6ebb9adf889c79a261ecf541aa91"}, + {file = "regex-2024.11.6-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:658f90550f38270639e83ce492f27d2c8d2cd63805c65a13a14d36ca126753f0"}, + {file = "regex-2024.11.6-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:164d8b7b3b4bcb2068b97428060b2a53be050085ef94eca7f240e7947f1b080e"}, + {file = "regex-2024.11.6-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d3660c82f209655a06b587d55e723f0b813d3a7db2e32e5e7dc64ac2a9e86fde"}, + {file = "regex-2024.11.6-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d22326fcdef5e08c154280b71163ced384b428343ae16a5ab2b3354aed12436e"}, + {file = "regex-2024.11.6-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f1ac758ef6aebfc8943560194e9fd0fa18bcb34d89fd8bd2af18183afd8da3a2"}, + {file = "regex-2024.11.6-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:997d6a487ff00807ba810e0f8332c18b4eb8d29463cfb7c820dc4b6e7562d0cf"}, + {file = "regex-2024.11.6-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:02a02d2bb04fec86ad61f3ea7f49c015a0681bf76abb9857f945d26159d2968c"}, + {file = "regex-2024.11.6-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f02f93b92358ee3f78660e43b4b0091229260c5d5c408d17d60bf26b6c900e86"}, + {file = "regex-2024.11.6-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:06eb1be98df10e81ebaded73fcd51989dcf534e3c753466e4b60c4697a003b67"}, + {file = "regex-2024.11.6-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:040df6fe1a5504eb0f04f048e6d09cd7c7110fef851d7c567a6b6e09942feb7d"}, + {file = "regex-2024.11.6-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:fdabbfc59f2c6edba2a6622c647b716e34e8e3867e0ab975412c5c2f79b82da2"}, + {file = "regex-2024.11.6-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:8447d2d39b5abe381419319f942de20b7ecd60ce86f16a23b0698f22e1b70008"}, + {file = "regex-2024.11.6-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:da8f5fc57d1933de22a9e23eec290a0d8a5927a5370d24bda9a6abe50683fe62"}, + {file = "regex-2024.11.6-cp310-cp310-win32.whl", hash = "sha256:b489578720afb782f6ccf2840920f3a32e31ba28a4b162e13900c3e6bd3f930e"}, + {file = "regex-2024.11.6-cp310-cp310-win_amd64.whl", hash = "sha256:5071b2093e793357c9d8b2929dfc13ac5f0a6c650559503bb81189d0a3814519"}, + {file = "regex-2024.11.6-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:5478c6962ad548b54a591778e93cd7c456a7a29f8eca9c49e4f9a806dcc5d638"}, + {file = "regex-2024.11.6-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:2c89a8cc122b25ce6945f0423dc1352cb9593c68abd19223eebbd4e56612c5b7"}, + {file = "regex-2024.11.6-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:94d87b689cdd831934fa3ce16cc15cd65748e6d689f5d2b8f4f4df2065c9fa20"}, + {file = "regex-2024.11.6-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1062b39a0a2b75a9c694f7a08e7183a80c63c0d62b301418ffd9c35f55aaa114"}, + {file = "regex-2024.11.6-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:167ed4852351d8a750da48712c3930b031f6efdaa0f22fa1933716bfcd6bf4a3"}, + {file = "regex-2024.11.6-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2d548dafee61f06ebdb584080621f3e0c23fff312f0de1afc776e2a2ba99a74f"}, + {file = "regex-2024.11.6-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f2a19f302cd1ce5dd01a9099aaa19cae6173306d1302a43b627f62e21cf18ac0"}, + {file = "regex-2024.11.6-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bec9931dfb61ddd8ef2ebc05646293812cb6b16b60cf7c9511a832b6f1854b55"}, + {file = "regex-2024.11.6-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:9714398225f299aa85267fd222f7142fcb5c769e73d7733344efc46f2ef5cf89"}, + {file = "regex-2024.11.6-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:202eb32e89f60fc147a41e55cb086db2a3f8cb82f9a9a88440dcfc5d37faae8d"}, + {file = "regex-2024.11.6-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:4181b814e56078e9b00427ca358ec44333765f5ca1b45597ec7446d3a1ef6e34"}, + {file = "regex-2024.11.6-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:068376da5a7e4da51968ce4c122a7cd31afaaec4fccc7856c92f63876e57b51d"}, + {file = "regex-2024.11.6-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:ac10f2c4184420d881a3475fb2c6f4d95d53a8d50209a2500723d831036f7c45"}, + {file = "regex-2024.11.6-cp311-cp311-win32.whl", hash = "sha256:c36f9b6f5f8649bb251a5f3f66564438977b7ef8386a52460ae77e6070d309d9"}, + {file = "regex-2024.11.6-cp311-cp311-win_amd64.whl", hash = "sha256:02e28184be537f0e75c1f9b2f8847dc51e08e6e171c6bde130b2687e0c33cf60"}, + {file = "regex-2024.11.6-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:52fb28f528778f184f870b7cf8f225f5eef0a8f6e3778529bdd40c7b3920796a"}, + {file = "regex-2024.11.6-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:fdd6028445d2460f33136c55eeb1f601ab06d74cb3347132e1c24250187500d9"}, + {file = "regex-2024.11.6-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:805e6b60c54bf766b251e94526ebad60b7de0c70f70a4e6210ee2891acb70bf2"}, + {file = "regex-2024.11.6-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b85c2530be953a890eaffde05485238f07029600e8f098cdf1848d414a8b45e4"}, + {file = "regex-2024.11.6-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bb26437975da7dc36b7efad18aa9dd4ea569d2357ae6b783bf1118dabd9ea577"}, + {file = "regex-2024.11.6-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:abfa5080c374a76a251ba60683242bc17eeb2c9818d0d30117b4486be10c59d3"}, + {file = "regex-2024.11.6-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70b7fa6606c2881c1db9479b0eaa11ed5dfa11c8d60a474ff0e095099f39d98e"}, + {file = "regex-2024.11.6-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0c32f75920cf99fe6b6c539c399a4a128452eaf1af27f39bce8909c9a3fd8cbe"}, + {file = "regex-2024.11.6-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:982e6d21414e78e1f51cf595d7f321dcd14de1f2881c5dc6a6e23bbbbd68435e"}, + {file = "regex-2024.11.6-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:a7c2155f790e2fb448faed6dd241386719802296ec588a8b9051c1f5c481bc29"}, + {file = "regex-2024.11.6-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:149f5008d286636e48cd0b1dd65018548944e495b0265b45e1bffecce1ef7f39"}, + {file = "regex-2024.11.6-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:e5364a4502efca094731680e80009632ad6624084aff9a23ce8c8c6820de3e51"}, + {file = "regex-2024.11.6-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:0a86e7eeca091c09e021db8eb72d54751e527fa47b8d5787caf96d9831bd02ad"}, + {file = "regex-2024.11.6-cp312-cp312-win32.whl", hash = "sha256:32f9a4c643baad4efa81d549c2aadefaeba12249b2adc5af541759237eee1c54"}, + {file = "regex-2024.11.6-cp312-cp312-win_amd64.whl", hash = "sha256:a93c194e2df18f7d264092dc8539b8ffb86b45b899ab976aa15d48214138e81b"}, + {file = "regex-2024.11.6-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:a6ba92c0bcdf96cbf43a12c717eae4bc98325ca3730f6b130ffa2e3c3c723d84"}, + {file = "regex-2024.11.6-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:525eab0b789891ac3be914d36893bdf972d483fe66551f79d3e27146191a37d4"}, + {file = "regex-2024.11.6-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:086a27a0b4ca227941700e0b31425e7a28ef1ae8e5e05a33826e17e47fbfdba0"}, + {file = "regex-2024.11.6-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bde01f35767c4a7899b7eb6e823b125a64de314a8ee9791367c9a34d56af18d0"}, + {file = "regex-2024.11.6-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b583904576650166b3d920d2bcce13971f6f9e9a396c673187f49811b2769dc7"}, + {file = "regex-2024.11.6-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1c4de13f06a0d54fa0d5ab1b7138bfa0d883220965a29616e3ea61b35d5f5fc7"}, + {file = "regex-2024.11.6-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3cde6e9f2580eb1665965ce9bf17ff4952f34f5b126beb509fee8f4e994f143c"}, + {file = "regex-2024.11.6-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0d7f453dca13f40a02b79636a339c5b62b670141e63efd511d3f8f73fba162b3"}, + {file = "regex-2024.11.6-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:59dfe1ed21aea057a65c6b586afd2a945de04fc7db3de0a6e3ed5397ad491b07"}, + {file = "regex-2024.11.6-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:b97c1e0bd37c5cd7902e65f410779d39eeda155800b65fc4d04cc432efa9bc6e"}, + {file = "regex-2024.11.6-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:f9d1e379028e0fc2ae3654bac3cbbef81bf3fd571272a42d56c24007979bafb6"}, + {file = "regex-2024.11.6-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:13291b39131e2d002a7940fb176e120bec5145f3aeb7621be6534e46251912c4"}, + {file = "regex-2024.11.6-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4f51f88c126370dcec4908576c5a627220da6c09d0bff31cfa89f2523843316d"}, + {file = "regex-2024.11.6-cp313-cp313-win32.whl", hash = "sha256:63b13cfd72e9601125027202cad74995ab26921d8cd935c25f09c630436348ff"}, + {file = "regex-2024.11.6-cp313-cp313-win_amd64.whl", hash = "sha256:2b3361af3198667e99927da8b84c1b010752fa4b1115ee30beaa332cabc3ef1a"}, + {file = "regex-2024.11.6-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:3a51ccc315653ba012774efca4f23d1d2a8a8f278a6072e29c7147eee7da446b"}, + {file = "regex-2024.11.6-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ad182d02e40de7459b73155deb8996bbd8e96852267879396fb274e8700190e3"}, + {file = "regex-2024.11.6-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:ba9b72e5643641b7d41fa1f6d5abda2c9a263ae835b917348fc3c928182ad467"}, + {file = "regex-2024.11.6-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40291b1b89ca6ad8d3f2b82782cc33807f1406cf68c8d440861da6304d8ffbbd"}, + {file = "regex-2024.11.6-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cdf58d0e516ee426a48f7b2c03a332a4114420716d55769ff7108c37a09951bf"}, + {file = "regex-2024.11.6-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a36fdf2af13c2b14738f6e973aba563623cb77d753bbbd8d414d18bfaa3105dd"}, + {file = "regex-2024.11.6-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d1cee317bfc014c2419a76bcc87f071405e3966da434e03e13beb45f8aced1a6"}, + {file = "regex-2024.11.6-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:50153825ee016b91549962f970d6a4442fa106832e14c918acd1c8e479916c4f"}, + {file = "regex-2024.11.6-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:ea1bfda2f7162605f6e8178223576856b3d791109f15ea99a9f95c16a7636fb5"}, + {file = "regex-2024.11.6-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:df951c5f4a1b1910f1a99ff42c473ff60f8225baa1cdd3539fe2819d9543e9df"}, + {file = "regex-2024.11.6-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:072623554418a9911446278f16ecb398fb3b540147a7828c06e2011fa531e773"}, + {file = "regex-2024.11.6-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:f654882311409afb1d780b940234208a252322c24a93b442ca714d119e68086c"}, + {file = "regex-2024.11.6-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:89d75e7293d2b3e674db7d4d9b1bee7f8f3d1609428e293771d1a962617150cc"}, + {file = "regex-2024.11.6-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:f65557897fc977a44ab205ea871b690adaef6b9da6afda4790a2484b04293a5f"}, + {file = "regex-2024.11.6-cp38-cp38-win32.whl", hash = "sha256:6f44ec28b1f858c98d3036ad5d7d0bfc568bdd7a74f9c24e25f41ef1ebfd81a4"}, + {file = "regex-2024.11.6-cp38-cp38-win_amd64.whl", hash = "sha256:bb8f74f2f10dbf13a0be8de623ba4f9491faf58c24064f32b65679b021ed0001"}, + {file = "regex-2024.11.6-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:5704e174f8ccab2026bd2f1ab6c510345ae8eac818b613d7d73e785f1310f839"}, + {file = "regex-2024.11.6-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:220902c3c5cc6af55d4fe19ead504de80eb91f786dc102fbd74894b1551f095e"}, + {file = "regex-2024.11.6-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5e7e351589da0850c125f1600a4c4ba3c722efefe16b297de54300f08d734fbf"}, + {file = "regex-2024.11.6-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5056b185ca113c88e18223183aa1a50e66507769c9640a6ff75859619d73957b"}, + {file = "regex-2024.11.6-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2e34b51b650b23ed3354b5a07aab37034d9f923db2a40519139af34f485f77d0"}, + {file = "regex-2024.11.6-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5670bce7b200273eee1840ef307bfa07cda90b38ae56e9a6ebcc9f50da9c469b"}, + {file = "regex-2024.11.6-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:08986dce1339bc932923e7d1232ce9881499a0e02925f7402fb7c982515419ef"}, + {file = "regex-2024.11.6-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:93c0b12d3d3bc25af4ebbf38f9ee780a487e8bf6954c115b9f015822d3bb8e48"}, + {file = "regex-2024.11.6-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:764e71f22ab3b305e7f4c21f1a97e1526a25ebdd22513e251cf376760213da13"}, + {file = "regex-2024.11.6-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:f056bf21105c2515c32372bbc057f43eb02aae2fda61052e2f7622c801f0b4e2"}, + {file = "regex-2024.11.6-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:69ab78f848845569401469da20df3e081e6b5a11cb086de3eed1d48f5ed57c95"}, + {file = "regex-2024.11.6-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:86fddba590aad9208e2fa8b43b4c098bb0ec74f15718bb6a704e3c63e2cef3e9"}, + {file = "regex-2024.11.6-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:684d7a212682996d21ca12ef3c17353c021fe9de6049e19ac8481ec35574a70f"}, + {file = "regex-2024.11.6-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:a03e02f48cd1abbd9f3b7e3586d97c8f7a9721c436f51a5245b3b9483044480b"}, + {file = "regex-2024.11.6-cp39-cp39-win32.whl", hash = "sha256:41758407fc32d5c3c5de163888068cfee69cb4c2be844e7ac517a52770f9af57"}, + {file = "regex-2024.11.6-cp39-cp39-win_amd64.whl", hash = "sha256:b2837718570f95dd41675328e111345f9b7095d821bac435aac173ac80b19983"}, + {file = "regex-2024.11.6.tar.gz", hash = "sha256:7ab159b063c52a0333c884e4679f8d7a85112ee3078fe3d9004b2dd875585519"}, +] + [[package]] name = "requests" version = "2.31.0" @@ -2187,6 +4263,41 @@ requests = ">=2.0.0" [package.extras] rsa = ["oauthlib[signedtoken] (>=3.0.0)"] +[[package]] +name = "requests-toolbelt" +version = "1.0.0" +description = "A utility belt for advanced users of python-requests" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +groups = ["main"] +files = [ + {file = "requests-toolbelt-1.0.0.tar.gz", hash = "sha256:7681a0a3d047012b5bdc0ee37d7f8f07ebe76ab08caeccfc3921ce23c88d5bc6"}, + {file = "requests_toolbelt-1.0.0-py2.py3-none-any.whl", hash = "sha256:cccfdd665f0a24fcf4726e690f65639d272bb0637b9b92dfd91a5568ccf6bd06"}, +] + +[package.dependencies] +requests = ">=2.0.1,<3.0.0" + +[[package]] +name = "rich" +version = "14.0.0" +description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" +optional = false +python-versions = ">=3.8.0" +groups = ["main"] +files = [ + {file = "rich-14.0.0-py3-none-any.whl", hash = "sha256:1c9491e1951aac09caffd42f448ee3d04e58923ffe14993f6e83068dc395d7e0"}, + {file = "rich-14.0.0.tar.gz", hash = "sha256:82f1bc23a6a21ebca4ae0c45af9bdbc492ed20231dcb63f297d6d1021a9d5725"}, +] + +[package.dependencies] +markdown-it-py = ">=2.2.0" +pygments = ">=2.13.0,<3.0.0" +typing-extensions = {version = ">=4.0.0,<5.0", markers = "python_version < \"3.11\""} + +[package.extras] +jupyter = ["ipywidgets (>=7.5.1,<9)"] + [[package]] name = "rsa" version = "4.9" @@ -2287,6 +4398,76 @@ tqdm = ">=4.0,<5.0" jinjafmt = ["black"] sqlfmt-primer = ["gitpython (>=3.1.24,<4.0.0)"] +[[package]] +name = "shapely" +version = "2.1.0" +description = "Manipulation and analysis of geometric objects" +optional = false +python-versions = ">=3.10" +groups = ["main"] +files = [ + {file = "shapely-2.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d3e5c5e3864d4dc431dd85a8e5137ebd39c8ac287b009d3fa80a07017b29c940"}, + {file = "shapely-2.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d6eea89b16f5f3a064659126455d23fa3066bc3d6cd385c35214f06bf5871aa6"}, + {file = "shapely-2.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:183174ad0b21a81ee661f05e7c47aa92ebfae01814cd3cbe54adea7a4213f5f4"}, + {file = "shapely-2.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f239c1484af66bc14b81a76f2a8e0fada29d59010423253ff857d0ccefdaa93f"}, + {file = "shapely-2.1.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:6220a466d1475141dad0cd8065d2549a5c2ed3fa4e2e02fb8ea65d494cfd5b07"}, + {file = "shapely-2.1.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:4822d3ed3efb06145c34d29d5b56792f72b7d713300f603bfd5d825892c6f79f"}, + {file = "shapely-2.1.0-cp310-cp310-win32.whl", hash = "sha256:ea51ddf3d3c60866dca746081b56c75f34ff1b01acbd4d44269071a673c735b9"}, + {file = "shapely-2.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:a6f5e02e2cded9f4ec5709900a296c7f2cce5f8e9e9d80ba7d89ae2f4ed89d7b"}, + {file = "shapely-2.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c8323031ef7c1bdda7a92d5ddbc7b6b62702e73ba37e9a8ccc8da99ec2c0b87c"}, + {file = "shapely-2.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4da7c6cd748d86ec6aace99ad17129d30954ccf5e73e9911cdb5f0fa9658b4f8"}, + {file = "shapely-2.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f0cdf85ff80831137067e7a237085a3ee72c225dba1b30beef87f7d396cf02b"}, + {file = "shapely-2.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:41f2be5d79aac39886f23000727cf02001aef3af8810176c29ee12cdc3ef3a50"}, + {file = "shapely-2.1.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:21a4515009f56d7a159cf5c2554264e82f56405b4721f9a422cb397237c5dca8"}, + {file = "shapely-2.1.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:15cebc323cec2cb6b2eaa310fdfc621f6dbbfaf6bde336d13838fcea76c885a9"}, + {file = "shapely-2.1.0-cp311-cp311-win32.whl", hash = "sha256:cad51b7a5c8f82f5640472944a74f0f239123dde9a63042b3c5ea311739b7d20"}, + {file = "shapely-2.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:d4005309dde8658e287ad9c435c81877f6a95a9419b932fa7a1f34b120f270ae"}, + {file = "shapely-2.1.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:53e7ee8bd8609cf12ee6dce01ea5affe676976cf7049315751d53d8db6d2b4b2"}, + {file = "shapely-2.1.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3cab20b665d26dbec0b380e15749bea720885a481fa7b1eedc88195d4a98cfa4"}, + {file = "shapely-2.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f4a38b39a09340273c3c92b3b9a374272a12cc7e468aeeea22c1c46217a03e5c"}, + {file = "shapely-2.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:edaec656bdd9b71278b98e6f77c464b1c3b2daa9eace78012ff0f0b4b5b15b04"}, + {file = "shapely-2.1.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:c8a732ddd9b25e7a54aa748e7df8fd704e23e5d5d35b7d376d80bffbfc376d04"}, + {file = "shapely-2.1.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:9c93693ad8adfdc9138a5a2d42da02da94f728dd2e82d2f0f442f10e25027f5f"}, + {file = "shapely-2.1.0-cp312-cp312-win32.whl", hash = "sha256:d8ac6604eefe807e71a908524de23a37920133a1729fe3a4dfe0ed82c044cbf4"}, + {file = "shapely-2.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:f4f47e631aa4f9ec5576eac546eb3f38802e2f82aeb0552f9612cb9a14ece1db"}, + {file = "shapely-2.1.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:b64423295b563f43a043eb786e7a03200ebe68698e36d2b4b1c39f31dfb50dfb"}, + {file = "shapely-2.1.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:1b5578f45adc25b235b22d1ccb9a0348c8dc36f31983e57ea129a88f96f7b870"}, + {file = "shapely-2.1.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d1a7e83d383b27f02b684e50ab7f34e511c92e33b6ca164a6a9065705dd64bcb"}, + {file = "shapely-2.1.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:942031eb4d8f7b3b22f43ba42c09c7aa3d843aa10d5cc1619fe816e923b66e55"}, + {file = "shapely-2.1.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:d2843c456a2e5627ee6271800f07277c0d2652fb287bf66464571a057dbc00b3"}, + {file = "shapely-2.1.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:8c4b17469b7f39a5e6a7cfea79f38ae08a275427f41fe8b48c372e1449147908"}, + {file = "shapely-2.1.0-cp313-cp313-win32.whl", hash = "sha256:30e967abd08fce49513d4187c01b19f139084019f33bec0673e8dbeb557c45e4"}, + {file = "shapely-2.1.0-cp313-cp313-win_amd64.whl", hash = "sha256:1dc8d4364483a14aba4c844b7bd16a6fa3728887e2c33dfa1afa34a3cf4d08a5"}, + {file = "shapely-2.1.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:673e073fea099d1c82f666fb7ab0a00a77eff2999130a69357ce11941260d855"}, + {file = "shapely-2.1.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:6d1513f915a56de67659fe2047c1ad5ff0f8cbff3519d1e74fced69c9cb0e7da"}, + {file = "shapely-2.1.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0d6a7043178890b9e028d80496ff4c79dc7629bff4d78a2f25323b661756bab8"}, + {file = "shapely-2.1.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cb638378dc3d76f7e85b67d7e2bb1366811912430ac9247ac00c127c2b444cdc"}, + {file = "shapely-2.1.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:737124e87d91d616acf9a911f74ac55e05db02a43a6a7245b3d663817b876055"}, + {file = "shapely-2.1.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:8e6c229e7bb87aae5df82fa00b6718987a43ec168cc5affe095cca59d233f314"}, + {file = "shapely-2.1.0-cp313-cp313t-win32.whl", hash = "sha256:a9580bda119b1f42f955aa8e52382d5c73f7957e0203bc0c0c60084846f3db94"}, + {file = "shapely-2.1.0-cp313-cp313t-win_amd64.whl", hash = "sha256:e8ff4e5cfd799ba5b6f37b5d5527dbd85b4a47c65b6d459a03d0962d2a9d4d10"}, + {file = "shapely-2.1.0.tar.gz", hash = "sha256:2cbe90e86fa8fc3ca8af6ffb00a77b246b918c7cf28677b7c21489b678f6b02e"}, +] + +[package.dependencies] +numpy = ">=1.21" + +[package.extras] +docs = ["matplotlib", "numpydoc (==1.1.*)", "sphinx", "sphinx-book-theme", "sphinx-remove-toctrees"] +test = ["pytest", "pytest-cov", "scipy-doctest"] + +[[package]] +name = "shellingham" +version = "1.5.4" +description = "Tool to Detect Surrounding Shell" +optional = false +python-versions = ">=3.7" +groups = ["main"] +files = [ + {file = "shellingham-1.5.4-py2.py3-none-any.whl", hash = "sha256:7ecfff8f2fd72616f7481040475a65b2bf8af90a56c89140852d1120324e8686"}, + {file = "shellingham-1.5.4.tar.gz", hash = "sha256:8dbca0739d487e5bd35ab3ca4b36e11c4078f3a234bfce294b0a0291363404de"}, +] + [[package]] name = "six" version = "1.16.0" @@ -2299,22 +4480,147 @@ files = [ {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, ] +[[package]] +name = "sniffio" +version = "1.3.1" +description = "Sniff out which async library your code is running under" +optional = false +python-versions = ">=3.7" +groups = ["main"] +files = [ + {file = "sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2"}, + {file = "sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc"}, +] + +[[package]] +name = "sqlalchemy" +version = "2.0.40" +description = "Database Abstraction Library" +optional = false +python-versions = ">=3.7" +groups = ["main"] +files = [ + {file = "SQLAlchemy-2.0.40-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:ae9597cab738e7cc823f04a704fb754a9249f0b6695a6aeb63b74055cd417a96"}, + {file = "SQLAlchemy-2.0.40-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:37a5c21ab099a83d669ebb251fddf8f5cee4d75ea40a5a1653d9c43d60e20867"}, + {file = "SQLAlchemy-2.0.40-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bece9527f5a98466d67fb5d34dc560c4da964240d8b09024bb21c1246545e04e"}, + {file = "SQLAlchemy-2.0.40-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:8bb131ffd2165fae48162c7bbd0d97c84ab961deea9b8bab16366543deeab625"}, + {file = "SQLAlchemy-2.0.40-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:9408fd453d5f8990405cc9def9af46bfbe3183e6110401b407c2d073c3388f47"}, + {file = "SQLAlchemy-2.0.40-cp37-cp37m-win32.whl", hash = "sha256:00a494ea6f42a44c326477b5bee4e0fc75f6a80c01570a32b57e89cf0fbef85a"}, + {file = "SQLAlchemy-2.0.40-cp37-cp37m-win_amd64.whl", hash = "sha256:c7b927155112ac858357ccf9d255dd8c044fd9ad2dc6ce4c4149527c901fa4c3"}, + {file = "sqlalchemy-2.0.40-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f1ea21bef99c703f44444ad29c2c1b6bd55d202750b6de8e06a955380f4725d7"}, + {file = "sqlalchemy-2.0.40-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:afe63b208153f3a7a2d1a5b9df452b0673082588933e54e7c8aac457cf35e758"}, + {file = "sqlalchemy-2.0.40-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a8aae085ea549a1eddbc9298b113cffb75e514eadbb542133dd2b99b5fb3b6af"}, + {file = "sqlalchemy-2.0.40-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5ea9181284754d37db15156eb7be09c86e16e50fbe77610e9e7bee09291771a1"}, + {file = "sqlalchemy-2.0.40-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:5434223b795be5c5ef8244e5ac98056e290d3a99bdcc539b916e282b160dda00"}, + {file = "sqlalchemy-2.0.40-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:15d08d5ef1b779af6a0909b97be6c1fd4298057504eb6461be88bd1696cb438e"}, + {file = "sqlalchemy-2.0.40-cp310-cp310-win32.whl", hash = "sha256:cd2f75598ae70bcfca9117d9e51a3b06fe29edd972fdd7fd57cc97b4dbf3b08a"}, + {file = "sqlalchemy-2.0.40-cp310-cp310-win_amd64.whl", hash = "sha256:2cbafc8d39ff1abdfdda96435f38fab141892dc759a2165947d1a8fffa7ef596"}, + {file = "sqlalchemy-2.0.40-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f6bacab7514de6146a1976bc56e1545bee247242fab030b89e5f70336fc0003e"}, + {file = "sqlalchemy-2.0.40-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5654d1ac34e922b6c5711631f2da497d3a7bffd6f9f87ac23b35feea56098011"}, + {file = "sqlalchemy-2.0.40-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:35904d63412db21088739510216e9349e335f142ce4a04b69e2528020ee19ed4"}, + {file = "sqlalchemy-2.0.40-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c7a80ed86d6aaacb8160a1caef6680d4ddd03c944d985aecee940d168c411d1"}, + {file = "sqlalchemy-2.0.40-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:519624685a51525ddaa7d8ba8265a1540442a2ec71476f0e75241eb8263d6f51"}, + {file = "sqlalchemy-2.0.40-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:2ee5f9999a5b0e9689bed96e60ee53c3384f1a05c2dd8068cc2e8361b0df5b7a"}, + {file = "sqlalchemy-2.0.40-cp311-cp311-win32.whl", hash = "sha256:c0cae71e20e3c02c52f6b9e9722bca70e4a90a466d59477822739dc31ac18b4b"}, + {file = "sqlalchemy-2.0.40-cp311-cp311-win_amd64.whl", hash = "sha256:574aea2c54d8f1dd1699449f332c7d9b71c339e04ae50163a3eb5ce4c4325ee4"}, + {file = "sqlalchemy-2.0.40-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:9d3b31d0a1c44b74d3ae27a3de422dfccd2b8f0b75e51ecb2faa2bf65ab1ba0d"}, + {file = "sqlalchemy-2.0.40-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:37f7a0f506cf78c80450ed1e816978643d3969f99c4ac6b01104a6fe95c5490a"}, + {file = "sqlalchemy-2.0.40-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0bb933a650323e476a2e4fbef8997a10d0003d4da996aad3fd7873e962fdde4d"}, + {file = "sqlalchemy-2.0.40-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6959738971b4745eea16f818a2cd086fb35081383b078272c35ece2b07012716"}, + {file = "sqlalchemy-2.0.40-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:110179728e442dae85dd39591beb74072ae4ad55a44eda2acc6ec98ead80d5f2"}, + {file = "sqlalchemy-2.0.40-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e8040680eaacdce4d635f12c55c714f3d4c7f57da2bc47a01229d115bd319191"}, + {file = "sqlalchemy-2.0.40-cp312-cp312-win32.whl", hash = "sha256:650490653b110905c10adac69408380688cefc1f536a137d0d69aca1069dc1d1"}, + {file = "sqlalchemy-2.0.40-cp312-cp312-win_amd64.whl", hash = "sha256:2be94d75ee06548d2fc591a3513422b873490efb124048f50556369a834853b0"}, + {file = "sqlalchemy-2.0.40-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:915866fd50dd868fdcc18d61d8258db1bf9ed7fbd6dfec960ba43365952f3b01"}, + {file = "sqlalchemy-2.0.40-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:4a4c5a2905a9ccdc67a8963e24abd2f7afcd4348829412483695c59e0af9a705"}, + {file = "sqlalchemy-2.0.40-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:55028d7a3ebdf7ace492fab9895cbc5270153f75442a0472d8516e03159ab364"}, + {file = "sqlalchemy-2.0.40-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6cfedff6878b0e0d1d0a50666a817ecd85051d12d56b43d9d425455e608b5ba0"}, + {file = "sqlalchemy-2.0.40-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:bb19e30fdae77d357ce92192a3504579abe48a66877f476880238a962e5b96db"}, + {file = "sqlalchemy-2.0.40-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:16d325ea898f74b26ffcd1cf8c593b0beed8714f0317df2bed0d8d1de05a8f26"}, + {file = "sqlalchemy-2.0.40-cp313-cp313-win32.whl", hash = "sha256:a669cbe5be3c63f75bcbee0b266779706f1a54bcb1000f302685b87d1b8c1500"}, + {file = "sqlalchemy-2.0.40-cp313-cp313-win_amd64.whl", hash = "sha256:641ee2e0834812d657862f3a7de95e0048bdcb6c55496f39c6fa3d435f6ac6ad"}, + {file = "sqlalchemy-2.0.40-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:50f5885bbed261fc97e2e66c5156244f9704083a674b8d17f24c72217d29baf5"}, + {file = "sqlalchemy-2.0.40-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:cf0e99cdb600eabcd1d65cdba0d3c91418fee21c4aa1d28db47d095b1064a7d8"}, + {file = "sqlalchemy-2.0.40-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fe147fcd85aaed53ce90645c91ed5fca0cc88a797314c70dfd9d35925bd5d106"}, + {file = "sqlalchemy-2.0.40-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:baf7cee56bd552385c1ee39af360772fbfc2f43be005c78d1140204ad6148438"}, + {file = "sqlalchemy-2.0.40-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:4aeb939bcac234b88e2d25d5381655e8353fe06b4e50b1c55ecffe56951d18c2"}, + {file = "sqlalchemy-2.0.40-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:c268b5100cfeaa222c40f55e169d484efa1384b44bf9ca415eae6d556f02cb08"}, + {file = "sqlalchemy-2.0.40-cp38-cp38-win32.whl", hash = "sha256:46628ebcec4f23a1584fb52f2abe12ddb00f3bb3b7b337618b80fc1b51177aff"}, + {file = "sqlalchemy-2.0.40-cp38-cp38-win_amd64.whl", hash = "sha256:7e0505719939e52a7b0c65d20e84a6044eb3712bb6f239c6b1db77ba8e173a37"}, + {file = "sqlalchemy-2.0.40-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c884de19528e0fcd9dc34ee94c810581dd6e74aef75437ff17e696c2bfefae3e"}, + {file = "sqlalchemy-2.0.40-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1abb387710283fc5983d8a1209d9696a4eae9db8d7ac94b402981fe2fe2e39ad"}, + {file = "sqlalchemy-2.0.40-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5cfa124eda500ba4b0d3afc3e91ea27ed4754e727c7f025f293a22f512bcd4c9"}, + {file = "sqlalchemy-2.0.40-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8b6b28d303b9d57c17a5164eb1fd2d5119bb6ff4413d5894e74873280483eeb5"}, + {file = "sqlalchemy-2.0.40-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:b5a5bbe29c10c5bfd63893747a1bf6f8049df607638c786252cb9243b86b6706"}, + {file = "sqlalchemy-2.0.40-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:f0fda83e113bb0fb27dc003685f32a5dcb99c9c4f41f4fa0838ac35265c23b5c"}, + {file = "sqlalchemy-2.0.40-cp39-cp39-win32.whl", hash = "sha256:957f8d85d5e834397ef78a6109550aeb0d27a53b5032f7a57f2451e1adc37e98"}, + {file = "sqlalchemy-2.0.40-cp39-cp39-win_amd64.whl", hash = "sha256:1ffdf9c91428e59744f8e6f98190516f8e1d05eec90e936eb08b257332c5e870"}, + {file = "sqlalchemy-2.0.40-py3-none-any.whl", hash = "sha256:32587e2e1e359276957e6fe5dad089758bc042a971a8a09ae8ecf7a8fe23d07a"}, + {file = "sqlalchemy-2.0.40.tar.gz", hash = "sha256:d827099289c64589418ebbcaead0145cd19f4e3e8a93919a0100247af245fa00"}, +] + +[package.dependencies] +greenlet = {version = ">=1", markers = "python_version < \"3.14\" and (platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\")"} +typing-extensions = ">=4.6.0" + +[package.extras] +aiomysql = ["aiomysql (>=0.2.0)", "greenlet (>=1)"] +aioodbc = ["aioodbc", "greenlet (>=1)"] +aiosqlite = ["aiosqlite", "greenlet (>=1)", "typing_extensions (!=3.10.0.1)"] +asyncio = ["greenlet (>=1)"] +asyncmy = ["asyncmy (>=0.2.3,!=0.2.4,!=0.2.6)", "greenlet (>=1)"] +mariadb-connector = ["mariadb (>=1.0.1,!=1.1.2,!=1.1.5,!=1.1.10)"] +mssql = ["pyodbc"] +mssql-pymssql = ["pymssql"] +mssql-pyodbc = ["pyodbc"] +mypy = ["mypy (>=0.910)"] +mysql = ["mysqlclient (>=1.4.0)"] +mysql-connector = ["mysql-connector-python"] +oracle = ["cx_oracle (>=8)"] +oracle-oracledb = ["oracledb (>=1.0.1)"] +postgresql = ["psycopg2 (>=2.7)"] +postgresql-asyncpg = ["asyncpg", "greenlet (>=1)"] +postgresql-pg8000 = ["pg8000 (>=1.29.1)"] +postgresql-psycopg = ["psycopg (>=3.0.7)"] +postgresql-psycopg2binary = ["psycopg2-binary"] +postgresql-psycopg2cffi = ["psycopg2cffi"] +postgresql-psycopgbinary = ["psycopg[binary] (>=3.0.7)"] +pymysql = ["pymysql"] +sqlcipher = ["sqlcipher3_binary"] + [[package]] name = "sqlparse" -version = "0.4.4" +version = "0.5.3" description = "A non-validating SQL parser." optional = false -python-versions = ">=3.5" +python-versions = ">=3.8" groups = ["main"] files = [ - {file = "sqlparse-0.4.4-py3-none-any.whl", hash = "sha256:5430a4fe2ac7d0f93e66f1efc6e1338a41884b7ddf2a350cedd20ccc4d9d28f3"}, - {file = "sqlparse-0.4.4.tar.gz", hash = "sha256:d446183e84b8349fa3061f0fe7f06ca94ba65b426946ffebe6e3e8295332420c"}, + {file = "sqlparse-0.5.3-py3-none-any.whl", hash = "sha256:cf2196ed3418f3ba5de6af7e82c694a9fbdbfecccdfc72e281548517081f16ca"}, + {file = "sqlparse-0.5.3.tar.gz", hash = "sha256:09f67787f56a0b16ecdbde1bfc7f5d9c3371ca683cfeaa8e6ff60b4807ec9272"}, ] [package.extras] -dev = ["build", "flake8"] +dev = ["build", "hatch"] doc = ["sphinx"] -test = ["pytest", "pytest-cov"] + +[[package]] +name = "starlette" +version = "0.46.2" +description = "The little ASGI library that shines." +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "starlette-0.46.2-py3-none-any.whl", hash = "sha256:595633ce89f8ffa71a015caed34a5b2dc1c0cdb3f0f1fbd1e69339cf2abeec35"}, + {file = "starlette-0.46.2.tar.gz", hash = "sha256:7f7361f34eed179294600af672f565727419830b54b7b084efe44bb82d2fccd5"}, +] + +[package.dependencies] +anyio = ">=3.6.2,<5" + +[package.extras] +full = ["httpx (>=0.27.0,<0.29.0)", "itsdangerous", "jinja2", "python-multipart (>=0.0.18)", "pyyaml"] [[package]] name = "stripe" @@ -2331,6 +4637,40 @@ files = [ [package.dependencies] requests = {version = ">=2.20", markers = "python_version >= \"3.0\""} +[[package]] +name = "sympy" +version = "1.14.0" +description = "Computer algebra system (CAS) in Python" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "sympy-1.14.0-py3-none-any.whl", hash = "sha256:e091cc3e99d2141a0ba2847328f5479b05d94a6635cb96148ccb3f34671bd8f5"}, + {file = "sympy-1.14.0.tar.gz", hash = "sha256:d3d3fe8df1e5a0b42f0e7bdf50541697dbe7d23746e894990c030e2b05e72517"}, +] + +[package.dependencies] +mpmath = ">=1.1.0,<1.4" + +[package.extras] +dev = ["hypothesis (>=6.70.0)", "pytest (>=7.1.0)"] + +[[package]] +name = "tenacity" +version = "9.1.2" +description = "Retry code until it succeeds" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "tenacity-9.1.2-py3-none-any.whl", hash = "sha256:f77bf36710d8b73a50b2dd155c97b870017ad21afe6ab300326b0371b3b05138"}, + {file = "tenacity-9.1.2.tar.gz", hash = "sha256:1169d376c297e7de388d18b4481760d478b0e99a777cad3a9c86e556f4b697cb"}, +] + +[package.extras] +doc = ["reno", "sphinx"] +test = ["pytest", "tornado (>=4.5)", "typeguard"] + [[package]] name = "text-unidecode" version = "1.3" @@ -2343,6 +4683,87 @@ files = [ {file = "text_unidecode-1.3-py2.py3-none-any.whl", hash = "sha256:1311f10e8b895935241623731c2ba64f4c455287888b18189350b67134a822e8"}, ] +[[package]] +name = "tiktoken" +version = "0.9.0" +description = "tiktoken is a fast BPE tokeniser for use with OpenAI's models" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "tiktoken-0.9.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:586c16358138b96ea804c034b8acf3f5d3f0258bd2bc3b0227af4af5d622e382"}, + {file = "tiktoken-0.9.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d9c59ccc528c6c5dd51820b3474402f69d9a9e1d656226848ad68a8d5b2e5108"}, + {file = "tiktoken-0.9.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f0968d5beeafbca2a72c595e8385a1a1f8af58feaebb02b227229b69ca5357fd"}, + {file = "tiktoken-0.9.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:92a5fb085a6a3b7350b8fc838baf493317ca0e17bd95e8642f95fc69ecfed1de"}, + {file = "tiktoken-0.9.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:15a2752dea63d93b0332fb0ddb05dd909371ededa145fe6a3242f46724fa7990"}, + {file = "tiktoken-0.9.0-cp310-cp310-win_amd64.whl", hash = "sha256:26113fec3bd7a352e4b33dbaf1bd8948de2507e30bd95a44e2b1156647bc01b4"}, + {file = "tiktoken-0.9.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:f32cc56168eac4851109e9b5d327637f15fd662aa30dd79f964b7c39fbadd26e"}, + {file = "tiktoken-0.9.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:45556bc41241e5294063508caf901bf92ba52d8ef9222023f83d2483a3055348"}, + {file = "tiktoken-0.9.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:03935988a91d6d3216e2ec7c645afbb3d870b37bcb67ada1943ec48678e7ee33"}, + {file = "tiktoken-0.9.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8b3d80aad8d2c6b9238fc1a5524542087c52b860b10cbf952429ffb714bc1136"}, + {file = "tiktoken-0.9.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b2a21133be05dc116b1d0372af051cd2c6aa1d2188250c9b553f9fa49301b336"}, + {file = "tiktoken-0.9.0-cp311-cp311-win_amd64.whl", hash = "sha256:11a20e67fdf58b0e2dea7b8654a288e481bb4fc0289d3ad21291f8d0849915fb"}, + {file = "tiktoken-0.9.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:e88f121c1c22b726649ce67c089b90ddda8b9662545a8aeb03cfef15967ddd03"}, + {file = "tiktoken-0.9.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a6600660f2f72369acb13a57fb3e212434ed38b045fd8cc6cdd74947b4b5d210"}, + {file = "tiktoken-0.9.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:95e811743b5dfa74f4b227927ed86cbc57cad4df859cb3b643be797914e41794"}, + {file = "tiktoken-0.9.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:99376e1370d59bcf6935c933cb9ba64adc29033b7e73f5f7569f3aad86552b22"}, + {file = "tiktoken-0.9.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:badb947c32739fb6ddde173e14885fb3de4d32ab9d8c591cbd013c22b4c31dd2"}, + {file = "tiktoken-0.9.0-cp312-cp312-win_amd64.whl", hash = "sha256:5a62d7a25225bafed786a524c1b9f0910a1128f4232615bf3f8257a73aaa3b16"}, + {file = "tiktoken-0.9.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:2b0e8e05a26eda1249e824156d537015480af7ae222ccb798e5234ae0285dbdb"}, + {file = "tiktoken-0.9.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:27d457f096f87685195eea0165a1807fae87b97b2161fe8c9b1df5bd74ca6f63"}, + {file = "tiktoken-0.9.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2cf8ded49cddf825390e36dd1ad35cd49589e8161fdcb52aa25f0583e90a3e01"}, + {file = "tiktoken-0.9.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cc156cb314119a8bb9748257a2eaebd5cc0753b6cb491d26694ed42fc7cb3139"}, + {file = "tiktoken-0.9.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:cd69372e8c9dd761f0ab873112aba55a0e3e506332dd9f7522ca466e817b1b7a"}, + {file = "tiktoken-0.9.0-cp313-cp313-win_amd64.whl", hash = "sha256:5ea0edb6f83dc56d794723286215918c1cde03712cbbafa0348b33448faf5b95"}, + {file = "tiktoken-0.9.0-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:c6386ca815e7d96ef5b4ac61e0048cd32ca5a92d5781255e13b31381d28667dc"}, + {file = "tiktoken-0.9.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:75f6d5db5bc2c6274b674ceab1615c1778e6416b14705827d19b40e6355f03e0"}, + {file = "tiktoken-0.9.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e15b16f61e6f4625a57a36496d28dd182a8a60ec20a534c5343ba3cafa156ac7"}, + {file = "tiktoken-0.9.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ebcec91babf21297022882344c3f7d9eed855931466c3311b1ad6b64befb3df"}, + {file = "tiktoken-0.9.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:e5fd49e7799579240f03913447c0cdfa1129625ebd5ac440787afc4345990427"}, + {file = "tiktoken-0.9.0-cp39-cp39-win_amd64.whl", hash = "sha256:26242ca9dc8b58e875ff4ca078b9a94d2f0813e6a535dcd2205df5d49d927cc7"}, + {file = "tiktoken-0.9.0.tar.gz", hash = "sha256:d02a5ca6a938e0490e1ff957bc48c8b078c88cb83977be1625b1fd8aac792c5d"}, +] + +[package.dependencies] +regex = ">=2022.1.18" +requests = ">=2.26.0" + +[package.extras] +blobfile = ["blobfile (>=2)"] + +[[package]] +name = "tokenizers" +version = "0.21.1" +description = "" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "tokenizers-0.21.1-cp39-abi3-macosx_10_12_x86_64.whl", hash = "sha256:e78e413e9e668ad790a29456e677d9d3aa50a9ad311a40905d6861ba7692cf41"}, + {file = "tokenizers-0.21.1-cp39-abi3-macosx_11_0_arm64.whl", hash = "sha256:cd51cd0a91ecc801633829fcd1fda9cf8682ed3477c6243b9a095539de4aecf3"}, + {file = "tokenizers-0.21.1-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:28da6b72d4fb14ee200a1bd386ff74ade8992d7f725f2bde2c495a9a98cf4d9f"}, + {file = "tokenizers-0.21.1-cp39-abi3-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:34d8cfde551c9916cb92014e040806122295a6800914bab5865deb85623931cf"}, + {file = "tokenizers-0.21.1-cp39-abi3-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:aaa852d23e125b73d283c98f007e06d4595732104b65402f46e8ef24b588d9f8"}, + {file = "tokenizers-0.21.1-cp39-abi3-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a21a15d5c8e603331b8a59548bbe113564136dc0f5ad8306dd5033459a226da0"}, + {file = "tokenizers-0.21.1-cp39-abi3-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2fdbd4c067c60a0ac7eca14b6bd18a5bebace54eb757c706b47ea93204f7a37c"}, + {file = "tokenizers-0.21.1-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2dd9a0061e403546f7377df940e866c3e678d7d4e9643d0461ea442b4f89e61a"}, + {file = "tokenizers-0.21.1-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:db9484aeb2e200c43b915a1a0150ea885e35f357a5a8fabf7373af333dcc8dbf"}, + {file = "tokenizers-0.21.1-cp39-abi3-musllinux_1_2_armv7l.whl", hash = "sha256:ed248ab5279e601a30a4d67bdb897ecbe955a50f1e7bb62bd99f07dd11c2f5b6"}, + {file = "tokenizers-0.21.1-cp39-abi3-musllinux_1_2_i686.whl", hash = "sha256:9ac78b12e541d4ce67b4dfd970e44c060a2147b9b2a21f509566d556a509c67d"}, + {file = "tokenizers-0.21.1-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:e5a69c1a4496b81a5ee5d2c1f3f7fbdf95e90a0196101b0ee89ed9956b8a168f"}, + {file = "tokenizers-0.21.1-cp39-abi3-win32.whl", hash = "sha256:1039a3a5734944e09de1d48761ade94e00d0fa760c0e0551151d4dd851ba63e3"}, + {file = "tokenizers-0.21.1-cp39-abi3-win_amd64.whl", hash = "sha256:0f0dcbcc9f6e13e675a66d7a5f2f225a736745ce484c1a4e07476a89ccdad382"}, + {file = "tokenizers-0.21.1.tar.gz", hash = "sha256:a1bb04dc5b448985f86ecd4b05407f5a8d97cb2c0532199b2a302a604a0165ab"}, +] + +[package.dependencies] +huggingface-hub = ">=0.16.4,<1.0" + +[package.extras] +dev = ["tokenizers[testing]"] +docs = ["setuptools-rust", "sphinx", "sphinx-rtd-theme"] +testing = ["black (==22.3)", "datasets", "numpy", "pytest", "requests", "ruff"] + [[package]] name = "toml" version = "0.10.2" @@ -2361,7 +4782,7 @@ version = "2.0.1" description = "A lil' TOML parser" optional = false python-versions = ">=3.7" -groups = ["dev", "test"] +groups = ["main", "dev", "test"] markers = "python_version == \"3.10\"" files = [ {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, @@ -2389,18 +4810,51 @@ notebook = ["ipywidgets (>=6)"] slack = ["slack-sdk"] telegram = ["requests"] +[[package]] +name = "typer" +version = "0.15.3" +description = "Typer, build great CLIs. Easy to code. Based on Python type hints." +optional = false +python-versions = ">=3.7" +groups = ["main"] +files = [ + {file = "typer-0.15.3-py3-none-any.whl", hash = "sha256:c86a65ad77ca531f03de08d1b9cb67cd09ad02ddddf4b34745b5008f43b239bd"}, + {file = "typer-0.15.3.tar.gz", hash = "sha256:818873625d0569653438316567861899f7e9972f2e6e0c16dab608345ced713c"}, +] + +[package.dependencies] +click = ">=8.0.0" +rich = ">=10.11.0" +shellingham = ">=1.3.0" +typing-extensions = ">=3.7.4.3" + [[package]] name = "typing-extensions" -version = "4.9.0" +version = "4.13.2" description = "Backported and Experimental Type Hints for Python 3.8+" optional = false python-versions = ">=3.8" groups = ["main", "dev"] files = [ - {file = "typing_extensions-4.9.0-py3-none-any.whl", hash = "sha256:af72aea155e91adfc61c3ae9e0e342dbc0cba726d6cba4b6c72c1f34e47291cd"}, - {file = "typing_extensions-4.9.0.tar.gz", hash = "sha256:23478f88c37f27d76ac8aee6c905017a143b0b1b886c3c9f66bc2fd94f9f5783"}, + {file = "typing_extensions-4.13.2-py3-none-any.whl", hash = "sha256:a439e7c04b49fec3e5d3e2beaa21755cadbbdc391694e28ccdd36ca4a1408f8c"}, + {file = "typing_extensions-4.13.2.tar.gz", hash = "sha256:e6c81219bd689f51865d9e372991c540bda33a0379d5573cddb9a3a23f7caaef"}, +] + +[[package]] +name = "typing-inspection" +version = "0.4.0" +description = "Runtime typing introspection tools" +optional = false +python-versions = ">=3.9" +groups = ["main", "dev"] +files = [ + {file = "typing_inspection-0.4.0-py3-none-any.whl", hash = "sha256:50e72559fcd2a6367a19f7a7e610e6afcb9fac940c650290eed893d61386832f"}, + {file = "typing_inspection-0.4.0.tar.gz", hash = "sha256:9765c87de36671694a67904bf2c96e395be9c6439bb6c87b5142569dcdd65122"}, ] +[package.dependencies] +typing-extensions = ">=4.12.0" + [[package]] name = "tzdata" version = "2023.4" @@ -2442,6 +4896,86 @@ brotli = ["brotli (==1.0.9) ; os_name != \"nt\" and python_version < \"3\" and p secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "ipaddress ; python_version == \"2.7\"", "pyOpenSSL (>=0.14)", "urllib3-secure-extra"] socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] +[[package]] +name = "uvicorn" +version = "0.34.2" +description = "The lightning-fast ASGI server." +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "uvicorn-0.34.2-py3-none-any.whl", hash = "sha256:deb49af569084536d269fe0a6d67e3754f104cf03aba7c11c40f01aadf33c403"}, + {file = "uvicorn-0.34.2.tar.gz", hash = "sha256:0e929828f6186353a80b58ea719861d2629d766293b6d19baf086ba31d4f3328"}, +] + +[package.dependencies] +click = ">=7.0" +colorama = {version = ">=0.4", optional = true, markers = "sys_platform == \"win32\" and extra == \"standard\""} +h11 = ">=0.8" +httptools = {version = ">=0.6.3", optional = true, markers = "extra == \"standard\""} +python-dotenv = {version = ">=0.13", optional = true, markers = "extra == \"standard\""} +pyyaml = {version = ">=5.1", optional = true, markers = "extra == \"standard\""} +typing-extensions = {version = ">=4.0", markers = "python_version < \"3.11\""} +uvloop = {version = ">=0.14.0,<0.15.0 || >0.15.0,<0.15.1 || >0.15.1", optional = true, markers = "sys_platform != \"win32\" and sys_platform != \"cygwin\" and platform_python_implementation != \"PyPy\" and extra == \"standard\""} +watchfiles = {version = ">=0.13", optional = true, markers = "extra == \"standard\""} +websockets = {version = ">=10.4", optional = true, markers = "extra == \"standard\""} + +[package.extras] +standard = ["colorama (>=0.4) ; sys_platform == \"win32\"", "httptools (>=0.6.3)", "python-dotenv (>=0.13)", "pyyaml (>=5.1)", "uvloop (>=0.14.0,!=0.15.0,!=0.15.1) ; sys_platform != \"win32\" and sys_platform != \"cygwin\" and platform_python_implementation != \"PyPy\"", "watchfiles (>=0.13)", "websockets (>=10.4)"] + +[[package]] +name = "uvloop" +version = "0.21.0" +description = "Fast implementation of asyncio event loop on top of libuv" +optional = false +python-versions = ">=3.8.0" +groups = ["main"] +markers = "platform_python_implementation != \"PyPy\" and sys_platform != \"win32\" and sys_platform != \"cygwin\"" +files = [ + {file = "uvloop-0.21.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:ec7e6b09a6fdded42403182ab6b832b71f4edaf7f37a9a0e371a01db5f0cb45f"}, + {file = "uvloop-0.21.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:196274f2adb9689a289ad7d65700d37df0c0930fd8e4e743fa4834e850d7719d"}, + {file = "uvloop-0.21.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f38b2e090258d051d68a5b14d1da7203a3c3677321cf32a95a6f4db4dd8b6f26"}, + {file = "uvloop-0.21.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:87c43e0f13022b998eb9b973b5e97200c8b90823454d4bc06ab33829e09fb9bb"}, + {file = "uvloop-0.21.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:10d66943def5fcb6e7b37310eb6b5639fd2ccbc38df1177262b0640c3ca68c1f"}, + {file = "uvloop-0.21.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:67dd654b8ca23aed0a8e99010b4c34aca62f4b7fce88f39d452ed7622c94845c"}, + {file = "uvloop-0.21.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:c0f3fa6200b3108919f8bdabb9a7f87f20e7097ea3c543754cabc7d717d95cf8"}, + {file = "uvloop-0.21.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0878c2640cf341b269b7e128b1a5fed890adc4455513ca710d77d5e93aa6d6a0"}, + {file = "uvloop-0.21.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b9fb766bb57b7388745d8bcc53a359b116b8a04c83a2288069809d2b3466c37e"}, + {file = "uvloop-0.21.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8a375441696e2eda1c43c44ccb66e04d61ceeffcd76e4929e527b7fa401b90fb"}, + {file = "uvloop-0.21.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:baa0e6291d91649c6ba4ed4b2f982f9fa165b5bbd50a9e203c416a2797bab3c6"}, + {file = "uvloop-0.21.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:4509360fcc4c3bd2c70d87573ad472de40c13387f5fda8cb58350a1d7475e58d"}, + {file = "uvloop-0.21.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:359ec2c888397b9e592a889c4d72ba3d6befba8b2bb01743f72fffbde663b59c"}, + {file = "uvloop-0.21.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:f7089d2dc73179ce5ac255bdf37c236a9f914b264825fdaacaded6990a7fb4c2"}, + {file = "uvloop-0.21.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:baa4dcdbd9ae0a372f2167a207cd98c9f9a1ea1188a8a526431eef2f8116cc8d"}, + {file = "uvloop-0.21.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:86975dca1c773a2c9864f4c52c5a55631038e387b47eaf56210f873887b6c8dc"}, + {file = "uvloop-0.21.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:461d9ae6660fbbafedd07559c6a2e57cd553b34b0065b6550685f6653a98c1cb"}, + {file = "uvloop-0.21.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:183aef7c8730e54c9a3ee3227464daed66e37ba13040bb3f350bc2ddc040f22f"}, + {file = "uvloop-0.21.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:bfd55dfcc2a512316e65f16e503e9e450cab148ef11df4e4e679b5e8253a5281"}, + {file = "uvloop-0.21.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:787ae31ad8a2856fc4e7c095341cccc7209bd657d0e71ad0dc2ea83c4a6fa8af"}, + {file = "uvloop-0.21.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5ee4d4ef48036ff6e5cfffb09dd192c7a5027153948d85b8da7ff705065bacc6"}, + {file = "uvloop-0.21.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f3df876acd7ec037a3d005b3ab85a7e4110422e4d9c1571d4fc89b0fc41b6816"}, + {file = "uvloop-0.21.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:bd53ecc9a0f3d87ab847503c2e1552b690362e005ab54e8a48ba97da3924c0dc"}, + {file = "uvloop-0.21.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a5c39f217ab3c663dc699c04cbd50c13813e31d917642d459fdcec07555cc553"}, + {file = "uvloop-0.21.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:17df489689befc72c39a08359efac29bbee8eee5209650d4b9f34df73d22e414"}, + {file = "uvloop-0.21.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:bc09f0ff191e61c2d592a752423c767b4ebb2986daa9ed62908e2b1b9a9ae206"}, + {file = "uvloop-0.21.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f0ce1b49560b1d2d8a2977e3ba4afb2414fb46b86a1b64056bc4ab929efdafbe"}, + {file = "uvloop-0.21.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e678ad6fe52af2c58d2ae3c73dc85524ba8abe637f134bf3564ed07f555c5e79"}, + {file = "uvloop-0.21.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:460def4412e473896ef179a1671b40c039c7012184b627898eea5072ef6f017a"}, + {file = "uvloop-0.21.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:10da8046cc4a8f12c91a1c39d1dd1585c41162a15caaef165c2174db9ef18bdc"}, + {file = "uvloop-0.21.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c097078b8031190c934ed0ebfee8cc5f9ba9642e6eb88322b9958b649750f72b"}, + {file = "uvloop-0.21.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:46923b0b5ee7fc0020bef24afe7836cb068f5050ca04caf6b487c513dc1a20b2"}, + {file = "uvloop-0.21.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:53e420a3afe22cdcf2a0f4846e377d16e718bc70103d7088a4f7623567ba5fb0"}, + {file = "uvloop-0.21.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:88cb67cdbc0e483da00af0b2c3cdad4b7c61ceb1ee0f33fe00e09c81e3a6cb75"}, + {file = "uvloop-0.21.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:221f4f2a1f46032b403bf3be628011caf75428ee3cc204a22addf96f586b19fd"}, + {file = "uvloop-0.21.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:2d1f581393673ce119355d56da84fe1dd9d2bb8b3d13ce792524e1607139feff"}, + {file = "uvloop-0.21.0.tar.gz", hash = "sha256:3bf12b0fda68447806a7ad847bfa591613177275d35b6724b1ee573faa3704e3"}, +] + +[package.extras] +dev = ["Cython (>=3.0,<4.0)", "setuptools (>=60)"] +docs = ["Sphinx (>=4.1.2,<4.2.0)", "sphinx-rtd-theme (>=0.5.2,<0.6.0)", "sphinxcontrib-asyncio (>=0.3.0,<0.4.0)"] +test = ["aiohttp (>=3.10.5)", "flake8 (>=5.0,<6.0)", "mypy (>=0.800)", "psutil", "pyOpenSSL (>=23.0.0,<23.1.0)", "pycodestyle (>=2.9.0,<2.10.0)"] + [[package]] name = "virtualenv" version = "20.25.0" @@ -2463,6 +4997,186 @@ platformdirs = ">=3.9.1,<5" docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.2)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=23.6)"] test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=23.1)", "pytest (>=7.4)", "pytest-env (>=0.8.2)", "pytest-freezer (>=0.4.8) ; platform_python_implementation == \"PyPy\"", "pytest-mock (>=3.11.1)", "pytest-randomly (>=3.12)", "pytest-timeout (>=2.1)", "setuptools (>=68)", "time-machine (>=2.10) ; platform_python_implementation == \"CPython\""] +[[package]] +name = "watchfiles" +version = "1.0.5" +description = "Simple, modern and high performance file watching and code reload in python." +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "watchfiles-1.0.5-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:5c40fe7dd9e5f81e0847b1ea64e1f5dd79dd61afbedb57759df06767ac719b40"}, + {file = "watchfiles-1.0.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8c0db396e6003d99bb2d7232c957b5f0b5634bbd1b24e381a5afcc880f7373fb"}, + {file = "watchfiles-1.0.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b551d4fb482fc57d852b4541f911ba28957d051c8776e79c3b4a51eb5e2a1b11"}, + {file = "watchfiles-1.0.5-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:830aa432ba5c491d52a15b51526c29e4a4b92bf4f92253787f9726fe01519487"}, + {file = "watchfiles-1.0.5-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a16512051a822a416b0d477d5f8c0e67b67c1a20d9acecb0aafa3aa4d6e7d256"}, + {file = "watchfiles-1.0.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bfe0cbc787770e52a96c6fda6726ace75be7f840cb327e1b08d7d54eadc3bc85"}, + {file = "watchfiles-1.0.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d363152c5e16b29d66cbde8fa614f9e313e6f94a8204eaab268db52231fe5358"}, + {file = "watchfiles-1.0.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7ee32c9a9bee4d0b7bd7cbeb53cb185cf0b622ac761efaa2eba84006c3b3a614"}, + {file = "watchfiles-1.0.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:29c7fd632ccaf5517c16a5188e36f6612d6472ccf55382db6c7fe3fcccb7f59f"}, + {file = "watchfiles-1.0.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8e637810586e6fe380c8bc1b3910accd7f1d3a9a7262c8a78d4c8fb3ba6a2b3d"}, + {file = "watchfiles-1.0.5-cp310-cp310-win32.whl", hash = "sha256:cd47d063fbeabd4c6cae1d4bcaa38f0902f8dc5ed168072874ea11d0c7afc1ff"}, + {file = "watchfiles-1.0.5-cp310-cp310-win_amd64.whl", hash = "sha256:86c0df05b47a79d80351cd179893f2f9c1b1cae49d96e8b3290c7f4bd0ca0a92"}, + {file = "watchfiles-1.0.5-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:237f9be419e977a0f8f6b2e7b0475ababe78ff1ab06822df95d914a945eac827"}, + {file = "watchfiles-1.0.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e0da39ff917af8b27a4bdc5a97ac577552a38aac0d260a859c1517ea3dc1a7c4"}, + {file = "watchfiles-1.0.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2cfcb3952350e95603f232a7a15f6c5f86c5375e46f0bd4ae70d43e3e063c13d"}, + {file = "watchfiles-1.0.5-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:68b2dddba7a4e6151384e252a5632efcaa9bc5d1c4b567f3cb621306b2ca9f63"}, + {file = "watchfiles-1.0.5-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:95cf944fcfc394c5f9de794ce581914900f82ff1f855326f25ebcf24d5397418"}, + {file = "watchfiles-1.0.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ecf6cd9f83d7c023b1aba15d13f705ca7b7d38675c121f3cc4a6e25bd0857ee9"}, + {file = "watchfiles-1.0.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:852de68acd6212cd6d33edf21e6f9e56e5d98c6add46f48244bd479d97c967c6"}, + {file = "watchfiles-1.0.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d5730f3aa35e646103b53389d5bc77edfbf578ab6dab2e005142b5b80a35ef25"}, + {file = "watchfiles-1.0.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:18b3bd29954bc4abeeb4e9d9cf0b30227f0f206c86657674f544cb032296acd5"}, + {file = "watchfiles-1.0.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:ba5552a1b07c8edbf197055bc9d518b8f0d98a1c6a73a293bc0726dce068ed01"}, + {file = "watchfiles-1.0.5-cp311-cp311-win32.whl", hash = "sha256:2f1fefb2e90e89959447bc0420fddd1e76f625784340d64a2f7d5983ef9ad246"}, + {file = "watchfiles-1.0.5-cp311-cp311-win_amd64.whl", hash = "sha256:b6e76ceb1dd18c8e29c73f47d41866972e891fc4cc7ba014f487def72c1cf096"}, + {file = "watchfiles-1.0.5-cp311-cp311-win_arm64.whl", hash = "sha256:266710eb6fddc1f5e51843c70e3bebfb0f5e77cf4f27129278c70554104d19ed"}, + {file = "watchfiles-1.0.5-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:b5eb568c2aa6018e26da9e6c86f3ec3fd958cee7f0311b35c2630fa4217d17f2"}, + {file = "watchfiles-1.0.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0a04059f4923ce4e856b4b4e5e783a70f49d9663d22a4c3b3298165996d1377f"}, + {file = "watchfiles-1.0.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e380c89983ce6e6fe2dd1e1921b9952fb4e6da882931abd1824c092ed495dec"}, + {file = "watchfiles-1.0.5-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fe43139b2c0fdc4a14d4f8d5b5d967f7a2777fd3d38ecf5b1ec669b0d7e43c21"}, + {file = "watchfiles-1.0.5-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ee0822ce1b8a14fe5a066f93edd20aada932acfe348bede8aa2149f1a4489512"}, + {file = "watchfiles-1.0.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a0dbcb1c2d8f2ab6e0a81c6699b236932bd264d4cef1ac475858d16c403de74d"}, + {file = "watchfiles-1.0.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a2014a2b18ad3ca53b1f6c23f8cd94a18ce930c1837bd891262c182640eb40a6"}, + {file = "watchfiles-1.0.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:10f6ae86d5cb647bf58f9f655fcf577f713915a5d69057a0371bc257e2553234"}, + {file = "watchfiles-1.0.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:1a7bac2bde1d661fb31f4d4e8e539e178774b76db3c2c17c4bb3e960a5de07a2"}, + {file = "watchfiles-1.0.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4ab626da2fc1ac277bbf752446470b367f84b50295264d2d313e28dc4405d663"}, + {file = "watchfiles-1.0.5-cp312-cp312-win32.whl", hash = "sha256:9f4571a783914feda92018ef3901dab8caf5b029325b5fe4558c074582815249"}, + {file = "watchfiles-1.0.5-cp312-cp312-win_amd64.whl", hash = "sha256:360a398c3a19672cf93527f7e8d8b60d8275119c5d900f2e184d32483117a705"}, + {file = "watchfiles-1.0.5-cp312-cp312-win_arm64.whl", hash = "sha256:1a2902ede862969077b97523987c38db28abbe09fb19866e711485d9fbf0d417"}, + {file = "watchfiles-1.0.5-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:0b289572c33a0deae62daa57e44a25b99b783e5f7aed81b314232b3d3c81a11d"}, + {file = "watchfiles-1.0.5-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a056c2f692d65bf1e99c41045e3bdcaea3cb9e6b5a53dcaf60a5f3bd95fc9763"}, + {file = "watchfiles-1.0.5-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b9dca99744991fc9850d18015c4f0438865414e50069670f5f7eee08340d8b40"}, + {file = "watchfiles-1.0.5-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:894342d61d355446d02cd3988a7326af344143eb33a2fd5d38482a92072d9563"}, + {file = "watchfiles-1.0.5-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ab44e1580924d1ffd7b3938e02716d5ad190441965138b4aa1d1f31ea0877f04"}, + {file = "watchfiles-1.0.5-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d6f9367b132078b2ceb8d066ff6c93a970a18c3029cea37bfd7b2d3dd2e5db8f"}, + {file = "watchfiles-1.0.5-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f2e55a9b162e06e3f862fb61e399fe9f05d908d019d87bf5b496a04ef18a970a"}, + {file = "watchfiles-1.0.5-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0125f91f70e0732a9f8ee01e49515c35d38ba48db507a50c5bdcad9503af5827"}, + {file = "watchfiles-1.0.5-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:13bb21f8ba3248386337c9fa51c528868e6c34a707f729ab041c846d52a0c69a"}, + {file = "watchfiles-1.0.5-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:839ebd0df4a18c5b3c1b890145b5a3f5f64063c2a0d02b13c76d78fe5de34936"}, + {file = "watchfiles-1.0.5-cp313-cp313-win32.whl", hash = "sha256:4a8ec1e4e16e2d5bafc9ba82f7aaecfeec990ca7cd27e84fb6f191804ed2fcfc"}, + {file = "watchfiles-1.0.5-cp313-cp313-win_amd64.whl", hash = "sha256:f436601594f15bf406518af922a89dcaab416568edb6f65c4e5bbbad1ea45c11"}, + {file = "watchfiles-1.0.5-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:2cfb371be97d4db374cba381b9f911dd35bb5f4c58faa7b8b7106c8853e5d225"}, + {file = "watchfiles-1.0.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a3904d88955fda461ea2531fcf6ef73584ca921415d5cfa44457a225f4a42bc1"}, + {file = "watchfiles-1.0.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2b7a21715fb12274a71d335cff6c71fe7f676b293d322722fe708a9ec81d91f5"}, + {file = "watchfiles-1.0.5-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:dfd6ae1c385ab481766b3c61c44aca2b3cd775f6f7c0fa93d979ddec853d29d5"}, + {file = "watchfiles-1.0.5-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b659576b950865fdad31fa491d31d37cf78b27113a7671d39f919828587b429b"}, + {file = "watchfiles-1.0.5-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1909e0a9cd95251b15bff4261de5dd7550885bd172e3536824bf1cf6b121e200"}, + {file = "watchfiles-1.0.5-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:832ccc221927c860e7286c55c9b6ebcc0265d5e072f49c7f6456c7798d2b39aa"}, + {file = "watchfiles-1.0.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:85fbb6102b3296926d0c62cfc9347f6237fb9400aecd0ba6bbda94cae15f2b3b"}, + {file = "watchfiles-1.0.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:15ac96dd567ad6c71c71f7b2c658cb22b7734901546cd50a475128ab557593ca"}, + {file = "watchfiles-1.0.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:4b6227351e11c57ae997d222e13f5b6f1f0700d84b8c52304e8675d33a808382"}, + {file = "watchfiles-1.0.5-cp39-cp39-win32.whl", hash = "sha256:974866e0db748ebf1eccab17862bc0f0303807ed9cda465d1324625b81293a18"}, + {file = "watchfiles-1.0.5-cp39-cp39-win_amd64.whl", hash = "sha256:9848b21ae152fe79c10dd0197304ada8f7b586d3ebc3f27f43c506e5a52a863c"}, + {file = "watchfiles-1.0.5-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:f59b870db1f1ae5a9ac28245707d955c8721dd6565e7f411024fa374b5362d1d"}, + {file = "watchfiles-1.0.5-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:9475b0093767e1475095f2aeb1d219fb9664081d403d1dff81342df8cd707034"}, + {file = "watchfiles-1.0.5-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fc533aa50664ebd6c628b2f30591956519462f5d27f951ed03d6c82b2dfd9965"}, + {file = "watchfiles-1.0.5-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fed1cd825158dcaae36acce7b2db33dcbfd12b30c34317a88b8ed80f0541cc57"}, + {file = "watchfiles-1.0.5-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:554389562c29c2c182e3908b149095051f81d28c2fec79ad6c8997d7d63e0009"}, + {file = "watchfiles-1.0.5-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:a74add8d7727e6404d5dc4dcd7fac65d4d82f95928bbee0cf5414c900e86773e"}, + {file = "watchfiles-1.0.5-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb1489f25b051a89fae574505cc26360c8e95e227a9500182a7fe0afcc500ce0"}, + {file = "watchfiles-1.0.5-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0901429650652d3f0da90bad42bdafc1f9143ff3605633c455c999a2d786cac"}, + {file = "watchfiles-1.0.5.tar.gz", hash = "sha256:b7529b5dcc114679d43827d8c35a07c493ad6f083633d573d81c660abc5979e9"}, +] + +[package.dependencies] +anyio = ">=3.0.0" + +[[package]] +name = "websocket-client" +version = "1.8.0" +description = "WebSocket client for Python with low level API options" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "websocket_client-1.8.0-py3-none-any.whl", hash = "sha256:17b44cc997f5c498e809b22cdf2d9c7a9e71c02c8cc2b6c56e7c2d1239bfa526"}, + {file = "websocket_client-1.8.0.tar.gz", hash = "sha256:3239df9f44da632f96012472805d40a23281a991027ce11d2f45a6f24ac4c3da"}, +] + +[package.extras] +docs = ["Sphinx (>=6.0)", "myst-parser (>=2.0.0)", "sphinx-rtd-theme (>=1.1.0)"] +optional = ["python-socks", "wsaccel"] +test = ["websockets"] + +[[package]] +name = "websockets" +version = "15.0.1" +description = "An implementation of the WebSocket Protocol (RFC 6455 & 7692)" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "websockets-15.0.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d63efaa0cd96cf0c5fe4d581521d9fa87744540d4bc999ae6e08595a1014b45b"}, + {file = "websockets-15.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ac60e3b188ec7574cb761b08d50fcedf9d77f1530352db4eef1707fe9dee7205"}, + {file = "websockets-15.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5756779642579d902eed757b21b0164cd6fe338506a8083eb58af5c372e39d9a"}, + {file = "websockets-15.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0fdfe3e2a29e4db3659dbd5bbf04560cea53dd9610273917799f1cde46aa725e"}, + {file = "websockets-15.0.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c2529b320eb9e35af0fa3016c187dffb84a3ecc572bcee7c3ce302bfeba52bf"}, + {file = "websockets-15.0.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ac1e5c9054fe23226fb11e05a6e630837f074174c4c2f0fe442996112a6de4fb"}, + {file = "websockets-15.0.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:5df592cd503496351d6dc14f7cdad49f268d8e618f80dce0cd5a36b93c3fc08d"}, + {file = "websockets-15.0.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:0a34631031a8f05657e8e90903e656959234f3a04552259458aac0b0f9ae6fd9"}, + {file = "websockets-15.0.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:3d00075aa65772e7ce9e990cab3ff1de702aa09be3940d1dc88d5abf1ab8a09c"}, + {file = "websockets-15.0.1-cp310-cp310-win32.whl", hash = "sha256:1234d4ef35db82f5446dca8e35a7da7964d02c127b095e172e54397fb6a6c256"}, + {file = "websockets-15.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:39c1fec2c11dc8d89bba6b2bf1556af381611a173ac2b511cf7231622058af41"}, + {file = "websockets-15.0.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:823c248b690b2fd9303ba00c4f66cd5e2d8c3ba4aa968b2779be9532a4dad431"}, + {file = "websockets-15.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:678999709e68425ae2593acf2e3ebcbcf2e69885a5ee78f9eb80e6e371f1bf57"}, + {file = "websockets-15.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d50fd1ee42388dcfb2b3676132c78116490976f1300da28eb629272d5d93e905"}, + {file = "websockets-15.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d99e5546bf73dbad5bf3547174cd6cb8ba7273062a23808ffea025ecb1cf8562"}, + {file = "websockets-15.0.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:66dd88c918e3287efc22409d426c8f729688d89a0c587c88971a0faa2c2f3792"}, + {file = "websockets-15.0.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8dd8327c795b3e3f219760fa603dcae1dcc148172290a8ab15158cf85a953413"}, + {file = "websockets-15.0.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8fdc51055e6ff4adeb88d58a11042ec9a5eae317a0a53d12c062c8a8865909e8"}, + {file = "websockets-15.0.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:693f0192126df6c2327cce3baa7c06f2a117575e32ab2308f7f8216c29d9e2e3"}, + {file = "websockets-15.0.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:54479983bd5fb469c38f2f5c7e3a24f9a4e70594cd68cd1fa6b9340dadaff7cf"}, + {file = "websockets-15.0.1-cp311-cp311-win32.whl", hash = "sha256:16b6c1b3e57799b9d38427dda63edcbe4926352c47cf88588c0be4ace18dac85"}, + {file = "websockets-15.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:27ccee0071a0e75d22cb35849b1db43f2ecd3e161041ac1ee9d2352ddf72f065"}, + {file = "websockets-15.0.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:3e90baa811a5d73f3ca0bcbf32064d663ed81318ab225ee4f427ad4e26e5aff3"}, + {file = "websockets-15.0.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:592f1a9fe869c778694f0aa806ba0374e97648ab57936f092fd9d87f8bc03665"}, + {file = "websockets-15.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0701bc3cfcb9164d04a14b149fd74be7347a530ad3bbf15ab2c678a2cd3dd9a2"}, + {file = "websockets-15.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e8b56bdcdb4505c8078cb6c7157d9811a85790f2f2b3632c7d1462ab5783d215"}, + {file = "websockets-15.0.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0af68c55afbd5f07986df82831c7bff04846928ea8d1fd7f30052638788bc9b5"}, + {file = "websockets-15.0.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:64dee438fed052b52e4f98f76c5790513235efaa1ef7f3f2192c392cd7c91b65"}, + {file = "websockets-15.0.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:d5f6b181bb38171a8ad1d6aa58a67a6aa9d4b38d0f8c5f496b9e42561dfc62fe"}, + {file = "websockets-15.0.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:5d54b09eba2bada6011aea5375542a157637b91029687eb4fdb2dab11059c1b4"}, + {file = "websockets-15.0.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:3be571a8b5afed347da347bfcf27ba12b069d9d7f42cb8c7028b5e98bbb12597"}, + {file = "websockets-15.0.1-cp312-cp312-win32.whl", hash = "sha256:c338ffa0520bdb12fbc527265235639fb76e7bc7faafbb93f6ba80d9c06578a9"}, + {file = "websockets-15.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:fcd5cf9e305d7b8338754470cf69cf81f420459dbae8a3b40cee57417f4614a7"}, + {file = "websockets-15.0.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ee443ef070bb3b6ed74514f5efaa37a252af57c90eb33b956d35c8e9c10a1931"}, + {file = "websockets-15.0.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:5a939de6b7b4e18ca683218320fc67ea886038265fd1ed30173f5ce3f8e85675"}, + {file = "websockets-15.0.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:746ee8dba912cd6fc889a8147168991d50ed70447bf18bcda7039f7d2e3d9151"}, + {file = "websockets-15.0.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:595b6c3969023ecf9041b2936ac3827e4623bfa3ccf007575f04c5a6aa318c22"}, + {file = "websockets-15.0.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3c714d2fc58b5ca3e285461a4cc0c9a66bd0e24c5da9911e30158286c9b5be7f"}, + {file = "websockets-15.0.1-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0f3c1e2ab208db911594ae5b4f79addeb3501604a165019dd221c0bdcabe4db8"}, + {file = "websockets-15.0.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:229cf1d3ca6c1804400b0a9790dc66528e08a6a1feec0d5040e8b9eb14422375"}, + {file = "websockets-15.0.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:756c56e867a90fb00177d530dca4b097dd753cde348448a1012ed6c5131f8b7d"}, + {file = "websockets-15.0.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:558d023b3df0bffe50a04e710bc87742de35060580a293c2a984299ed83bc4e4"}, + {file = "websockets-15.0.1-cp313-cp313-win32.whl", hash = "sha256:ba9e56e8ceeeedb2e080147ba85ffcd5cd0711b89576b83784d8605a7df455fa"}, + {file = "websockets-15.0.1-cp313-cp313-win_amd64.whl", hash = "sha256:e09473f095a819042ecb2ab9465aee615bd9c2028e4ef7d933600a8401c79561"}, + {file = "websockets-15.0.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:5f4c04ead5aed67c8a1a20491d54cdfba5884507a48dd798ecaf13c74c4489f5"}, + {file = "websockets-15.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:abdc0c6c8c648b4805c5eacd131910d2a7f6455dfd3becab248ef108e89ab16a"}, + {file = "websockets-15.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a625e06551975f4b7ea7102bc43895b90742746797e2e14b70ed61c43a90f09b"}, + {file = "websockets-15.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d591f8de75824cbb7acad4e05d2d710484f15f29d4a915092675ad3456f11770"}, + {file = "websockets-15.0.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:47819cea040f31d670cc8d324bb6435c6f133b8c7a19ec3d61634e62f8d8f9eb"}, + {file = "websockets-15.0.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ac017dd64572e5c3bd01939121e4d16cf30e5d7e110a119399cf3133b63ad054"}, + {file = "websockets-15.0.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:4a9fac8e469d04ce6c25bb2610dc535235bd4aa14996b4e6dbebf5e007eba5ee"}, + {file = "websockets-15.0.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:363c6f671b761efcb30608d24925a382497c12c506b51661883c3e22337265ed"}, + {file = "websockets-15.0.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:2034693ad3097d5355bfdacfffcbd3ef5694f9718ab7f29c29689a9eae841880"}, + {file = "websockets-15.0.1-cp39-cp39-win32.whl", hash = "sha256:3b1ac0d3e594bf121308112697cf4b32be538fb1444468fb0a6ae4feebc83411"}, + {file = "websockets-15.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:b7643a03db5c95c799b89b31c036d5f27eeb4d259c798e878d6937d71832b1e4"}, + {file = "websockets-15.0.1-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:0c9e74d766f2818bb95f84c25be4dea09841ac0f734d1966f415e4edfc4ef1c3"}, + {file = "websockets-15.0.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:1009ee0c7739c08a0cd59de430d6de452a55e42d6b522de7aa15e6f67db0b8e1"}, + {file = "websockets-15.0.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:76d1f20b1c7a2fa82367e04982e708723ba0e7b8d43aa643d3dcd404d74f1475"}, + {file = "websockets-15.0.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f29d80eb9a9263b8d109135351caf568cc3f80b9928bccde535c235de55c22d9"}, + {file = "websockets-15.0.1-pp310-pypy310_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b359ed09954d7c18bbc1680f380c7301f92c60bf924171629c5db97febb12f04"}, + {file = "websockets-15.0.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:cad21560da69f4ce7658ca2cb83138fb4cf695a2ba3e475e0559e05991aa8122"}, + {file = "websockets-15.0.1-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:7f493881579c90fc262d9cdbaa05a6b54b3811c2f300766748db79f098db9940"}, + {file = "websockets-15.0.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:47b099e1f4fbc95b701b6e85768e1fcdaf1630f3cbe4765fa216596f12310e2e"}, + {file = "websockets-15.0.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:67f2b6de947f8c757db2db9c71527933ad0019737ec374a8a6be9a956786aaf9"}, + {file = "websockets-15.0.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d08eb4c2b7d6c41da6ca0600c077e93f5adcfd979cd777d747e9ee624556da4b"}, + {file = "websockets-15.0.1-pp39-pypy39_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4b826973a4a2ae47ba357e4e82fa44a463b8f168e1ca775ac64521442b19e87f"}, + {file = "websockets-15.0.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:21c1fa28a6a7e3cbdc171c694398b6df4744613ce9b36b1a498e816787e28123"}, + {file = "websockets-15.0.1-py3-none-any.whl", hash = "sha256:f7a866fbc1e97b5c617ee4116daaa09b722101d4a3c170c787450ba409f9736f"}, + {file = "websockets-15.0.1.tar.gz", hash = "sha256:82544de02076bafba038ce055ee6412d68da13ab47f0c60cab827346de828dee"}, +] + [[package]] name = "win32-setctime" version = "1.1.0" @@ -2479,6 +5193,95 @@ files = [ [package.extras] dev = ["black (>=19.3b0) ; python_version >= \"3.6\"", "pytest (>=4.6.2)"] +[[package]] +name = "wrapt" +version = "1.17.2" +description = "Module for decorators, wrappers and monkey patching." +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "wrapt-1.17.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:3d57c572081fed831ad2d26fd430d565b76aa277ed1d30ff4d40670b1c0dd984"}, + {file = "wrapt-1.17.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b5e251054542ae57ac7f3fba5d10bfff615b6c2fb09abeb37d2f1463f841ae22"}, + {file = "wrapt-1.17.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:80dd7db6a7cb57ffbc279c4394246414ec99537ae81ffd702443335a61dbf3a7"}, + {file = "wrapt-1.17.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0a6e821770cf99cc586d33833b2ff32faebdbe886bd6322395606cf55153246c"}, + {file = "wrapt-1.17.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b60fb58b90c6d63779cb0c0c54eeb38941bae3ecf7a73c764c52c88c2dcb9d72"}, + {file = "wrapt-1.17.2-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b870b5df5b71d8c3359d21be8f0d6c485fa0ebdb6477dda51a1ea54a9b558061"}, + {file = "wrapt-1.17.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:4011d137b9955791f9084749cba9a367c68d50ab8d11d64c50ba1688c9b457f2"}, + {file = "wrapt-1.17.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:1473400e5b2733e58b396a04eb7f35f541e1fb976d0c0724d0223dd607e0f74c"}, + {file = "wrapt-1.17.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:3cedbfa9c940fdad3e6e941db7138e26ce8aad38ab5fe9dcfadfed9db7a54e62"}, + {file = "wrapt-1.17.2-cp310-cp310-win32.whl", hash = "sha256:582530701bff1dec6779efa00c516496968edd851fba224fbd86e46cc6b73563"}, + {file = "wrapt-1.17.2-cp310-cp310-win_amd64.whl", hash = "sha256:58705da316756681ad3c9c73fd15499aa4d8c69f9fd38dc8a35e06c12468582f"}, + {file = "wrapt-1.17.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:ff04ef6eec3eee8a5efef2401495967a916feaa353643defcc03fc74fe213b58"}, + {file = "wrapt-1.17.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4db983e7bca53819efdbd64590ee96c9213894272c776966ca6306b73e4affda"}, + {file = "wrapt-1.17.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:9abc77a4ce4c6f2a3168ff34b1da9b0f311a8f1cfd694ec96b0603dff1c79438"}, + {file = "wrapt-1.17.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0b929ac182f5ace000d459c59c2c9c33047e20e935f8e39371fa6e3b85d56f4a"}, + {file = "wrapt-1.17.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f09b286faeff3c750a879d336fb6d8713206fc97af3adc14def0cdd349df6000"}, + {file = "wrapt-1.17.2-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1a7ed2d9d039bd41e889f6fb9364554052ca21ce823580f6a07c4ec245c1f5d6"}, + {file = "wrapt-1.17.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:129a150f5c445165ff941fc02ee27df65940fcb8a22a61828b1853c98763a64b"}, + {file = "wrapt-1.17.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:1fb5699e4464afe5c7e65fa51d4f99e0b2eadcc176e4aa33600a3df7801d6662"}, + {file = "wrapt-1.17.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:9a2bce789a5ea90e51a02dfcc39e31b7f1e662bc3317979aa7e5538e3a034f72"}, + {file = "wrapt-1.17.2-cp311-cp311-win32.whl", hash = "sha256:4afd5814270fdf6380616b321fd31435a462019d834f83c8611a0ce7484c7317"}, + {file = "wrapt-1.17.2-cp311-cp311-win_amd64.whl", hash = "sha256:acc130bc0375999da18e3d19e5a86403667ac0c4042a094fefb7eec8ebac7cf3"}, + {file = "wrapt-1.17.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:d5e2439eecc762cd85e7bd37161d4714aa03a33c5ba884e26c81559817ca0925"}, + {file = "wrapt-1.17.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:3fc7cb4c1c744f8c05cd5f9438a3caa6ab94ce8344e952d7c45a8ed59dd88392"}, + {file = "wrapt-1.17.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8fdbdb757d5390f7c675e558fd3186d590973244fab0c5fe63d373ade3e99d40"}, + {file = "wrapt-1.17.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5bb1d0dbf99411f3d871deb6faa9aabb9d4e744d67dcaaa05399af89d847a91d"}, + {file = "wrapt-1.17.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d18a4865f46b8579d44e4fe1e2bcbc6472ad83d98e22a26c963d46e4c125ef0b"}, + {file = "wrapt-1.17.2-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc570b5f14a79734437cb7b0500376b6b791153314986074486e0b0fa8d71d98"}, + {file = "wrapt-1.17.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6d9187b01bebc3875bac9b087948a2bccefe464a7d8f627cf6e48b1bbae30f82"}, + {file = "wrapt-1.17.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:9e8659775f1adf02eb1e6f109751268e493c73716ca5761f8acb695e52a756ae"}, + {file = "wrapt-1.17.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e8b2816ebef96d83657b56306152a93909a83f23994f4b30ad4573b00bd11bb9"}, + {file = "wrapt-1.17.2-cp312-cp312-win32.whl", hash = "sha256:468090021f391fe0056ad3e807e3d9034e0fd01adcd3bdfba977b6fdf4213ea9"}, + {file = "wrapt-1.17.2-cp312-cp312-win_amd64.whl", hash = "sha256:ec89ed91f2fa8e3f52ae53cd3cf640d6feff92ba90d62236a81e4e563ac0e991"}, + {file = "wrapt-1.17.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:6ed6ffac43aecfe6d86ec5b74b06a5be33d5bb9243d055141e8cabb12aa08125"}, + {file = "wrapt-1.17.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:35621ae4c00e056adb0009f8e86e28eb4a41a4bfa8f9bfa9fca7d343fe94f998"}, + {file = "wrapt-1.17.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a604bf7a053f8362d27eb9fefd2097f82600b856d5abe996d623babd067b1ab5"}, + {file = "wrapt-1.17.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5cbabee4f083b6b4cd282f5b817a867cf0b1028c54d445b7ec7cfe6505057cf8"}, + {file = "wrapt-1.17.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:49703ce2ddc220df165bd2962f8e03b84c89fee2d65e1c24a7defff6f988f4d6"}, + {file = "wrapt-1.17.2-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8112e52c5822fc4253f3901b676c55ddf288614dc7011634e2719718eaa187dc"}, + {file = "wrapt-1.17.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:9fee687dce376205d9a494e9c121e27183b2a3df18037f89d69bd7b35bcf59e2"}, + {file = "wrapt-1.17.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:18983c537e04d11cf027fbb60a1e8dfd5190e2b60cc27bc0808e653e7b218d1b"}, + {file = "wrapt-1.17.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:703919b1633412ab54bcf920ab388735832fdcb9f9a00ae49387f0fe67dad504"}, + {file = "wrapt-1.17.2-cp313-cp313-win32.whl", hash = "sha256:abbb9e76177c35d4e8568e58650aa6926040d6a9f6f03435b7a522bf1c487f9a"}, + {file = "wrapt-1.17.2-cp313-cp313-win_amd64.whl", hash = "sha256:69606d7bb691b50a4240ce6b22ebb319c1cfb164e5f6569835058196e0f3a845"}, + {file = "wrapt-1.17.2-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:4a721d3c943dae44f8e243b380cb645a709ba5bd35d3ad27bc2ed947e9c68192"}, + {file = "wrapt-1.17.2-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:766d8bbefcb9e00c3ac3b000d9acc51f1b399513f44d77dfe0eb026ad7c9a19b"}, + {file = "wrapt-1.17.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:e496a8ce2c256da1eb98bd15803a79bee00fc351f5dfb9ea82594a3f058309e0"}, + {file = "wrapt-1.17.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40d615e4fe22f4ad3528448c193b218e077656ca9ccb22ce2cb20db730f8d306"}, + {file = "wrapt-1.17.2-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a5aaeff38654462bc4b09023918b7f21790efb807f54c000a39d41d69cf552cb"}, + {file = "wrapt-1.17.2-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9a7d15bbd2bc99e92e39f49a04653062ee6085c0e18b3b7512a4f2fe91f2d681"}, + {file = "wrapt-1.17.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:e3890b508a23299083e065f435a492b5435eba6e304a7114d2f919d400888cc6"}, + {file = "wrapt-1.17.2-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:8c8b293cd65ad716d13d8dd3624e42e5a19cc2a2f1acc74b30c2c13f15cb61a6"}, + {file = "wrapt-1.17.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:4c82b8785d98cdd9fed4cac84d765d234ed3251bd6afe34cb7ac523cb93e8b4f"}, + {file = "wrapt-1.17.2-cp313-cp313t-win32.whl", hash = "sha256:13e6afb7fe71fe7485a4550a8844cc9ffbe263c0f1a1eea569bc7091d4898555"}, + {file = "wrapt-1.17.2-cp313-cp313t-win_amd64.whl", hash = "sha256:eaf675418ed6b3b31c7a989fd007fa7c3be66ce14e5c3b27336383604c9da85c"}, + {file = "wrapt-1.17.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5c803c401ea1c1c18de70a06a6f79fcc9c5acfc79133e9869e730ad7f8ad8ef9"}, + {file = "wrapt-1.17.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:f917c1180fdb8623c2b75a99192f4025e412597c50b2ac870f156de8fb101119"}, + {file = "wrapt-1.17.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:ecc840861360ba9d176d413a5489b9a0aff6d6303d7e733e2c4623cfa26904a6"}, + {file = "wrapt-1.17.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bb87745b2e6dc56361bfde481d5a378dc314b252a98d7dd19a651a3fa58f24a9"}, + {file = "wrapt-1.17.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:58455b79ec2661c3600e65c0a716955adc2410f7383755d537584b0de41b1d8a"}, + {file = "wrapt-1.17.2-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b4e42a40a5e164cbfdb7b386c966a588b1047558a990981ace551ed7e12ca9c2"}, + {file = "wrapt-1.17.2-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:91bd7d1773e64019f9288b7a5101f3ae50d3d8e6b1de7edee9c2ccc1d32f0c0a"}, + {file = "wrapt-1.17.2-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:bb90fb8bda722a1b9d48ac1e6c38f923ea757b3baf8ebd0c82e09c5c1a0e7a04"}, + {file = "wrapt-1.17.2-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:08e7ce672e35efa54c5024936e559469436f8b8096253404faeb54d2a878416f"}, + {file = "wrapt-1.17.2-cp38-cp38-win32.whl", hash = "sha256:410a92fefd2e0e10d26210e1dfb4a876ddaf8439ef60d6434f21ef8d87efc5b7"}, + {file = "wrapt-1.17.2-cp38-cp38-win_amd64.whl", hash = "sha256:95c658736ec15602da0ed73f312d410117723914a5c91a14ee4cdd72f1d790b3"}, + {file = "wrapt-1.17.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:99039fa9e6306880572915728d7f6c24a86ec57b0a83f6b2491e1d8ab0235b9a"}, + {file = "wrapt-1.17.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:2696993ee1eebd20b8e4ee4356483c4cb696066ddc24bd70bcbb80fa56ff9061"}, + {file = "wrapt-1.17.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:612dff5db80beef9e649c6d803a8d50c409082f1fedc9dbcdfde2983b2025b82"}, + {file = "wrapt-1.17.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:62c2caa1585c82b3f7a7ab56afef7b3602021d6da34fbc1cf234ff139fed3cd9"}, + {file = "wrapt-1.17.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c958bcfd59bacc2d0249dcfe575e71da54f9dcf4a8bdf89c4cb9a68a1170d73f"}, + {file = "wrapt-1.17.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fc78a84e2dfbc27afe4b2bd7c80c8db9bca75cc5b85df52bfe634596a1da846b"}, + {file = "wrapt-1.17.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:ba0f0eb61ef00ea10e00eb53a9129501f52385c44853dbd6c4ad3f403603083f"}, + {file = "wrapt-1.17.2-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:1e1fe0e6ab7775fd842bc39e86f6dcfc4507ab0ffe206093e76d61cde37225c8"}, + {file = "wrapt-1.17.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:c86563182421896d73858e08e1db93afdd2b947a70064b813d515d66549e15f9"}, + {file = "wrapt-1.17.2-cp39-cp39-win32.whl", hash = "sha256:f393cda562f79828f38a819f4788641ac7c4085f30f1ce1a68672baa686482bb"}, + {file = "wrapt-1.17.2-cp39-cp39-win_amd64.whl", hash = "sha256:36ccae62f64235cf8ddb682073a60519426fdd4725524ae38874adf72b5f2aeb"}, + {file = "wrapt-1.17.2-py3-none-any.whl", hash = "sha256:b18f2d1533a71f069c7f82d524a52599053d4c7166e9dd374ae2136b7f40f7c8"}, + {file = "wrapt-1.17.2.tar.gz", hash = "sha256:41388e9d4d1522446fe79d3213196bd9e3b301a336965b9e27ca2788ebd122f3"}, +] + [[package]] name = "yamlfix" version = "1.16.0" @@ -2496,7 +5299,140 @@ click = ">=8.1.3" maison = ">=1.4.0" ruyaml = ">=0.91.0" +[[package]] +name = "zipp" +version = "3.21.0" +description = "Backport of pathlib-compatible object wrapper for zip files" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "zipp-3.21.0-py3-none-any.whl", hash = "sha256:ac1bbe05fd2991f160ebce24ffbac5f6d11d83dc90891255885223d42b3cd931"}, + {file = "zipp-3.21.0.tar.gz", hash = "sha256:2c9958f6430a2040341a52eb608ed6dd93ef4392e02ffe219417c1b28b5dd1f4"}, +] + +[package.extras] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1) ; sys_platform != \"cygwin\""] +cover = ["pytest-cov"] +doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +enabler = ["pytest-enabler (>=2.2)"] +test = ["big-O", "importlib-resources ; python_version < \"3.9\"", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-ignore-flaky"] +type = ["pytest-mypy"] + +[[package]] +name = "zstandard" +version = "0.23.0" +description = "Zstandard bindings for Python" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "zstandard-0.23.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:bf0a05b6059c0528477fba9054d09179beb63744355cab9f38059548fedd46a9"}, + {file = "zstandard-0.23.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fc9ca1c9718cb3b06634c7c8dec57d24e9438b2aa9a0f02b8bb36bf478538880"}, + {file = "zstandard-0.23.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:77da4c6bfa20dd5ea25cbf12c76f181a8e8cd7ea231c673828d0386b1740b8dc"}, + {file = "zstandard-0.23.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b2170c7e0367dde86a2647ed5b6f57394ea7f53545746104c6b09fc1f4223573"}, + {file = "zstandard-0.23.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c16842b846a8d2a145223f520b7e18b57c8f476924bda92aeee3a88d11cfc391"}, + {file = "zstandard-0.23.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:157e89ceb4054029a289fb504c98c6a9fe8010f1680de0201b3eb5dc20aa6d9e"}, + {file = "zstandard-0.23.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:203d236f4c94cd8379d1ea61db2fce20730b4c38d7f1c34506a31b34edc87bdd"}, + {file = "zstandard-0.23.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:dc5d1a49d3f8262be192589a4b72f0d03b72dcf46c51ad5852a4fdc67be7b9e4"}, + {file = "zstandard-0.23.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:752bf8a74412b9892f4e5b58f2f890a039f57037f52c89a740757ebd807f33ea"}, + {file = "zstandard-0.23.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:80080816b4f52a9d886e67f1f96912891074903238fe54f2de8b786f86baded2"}, + {file = "zstandard-0.23.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:84433dddea68571a6d6bd4fbf8ff398236031149116a7fff6f777ff95cad3df9"}, + {file = "zstandard-0.23.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:ab19a2d91963ed9e42b4e8d77cd847ae8381576585bad79dbd0a8837a9f6620a"}, + {file = "zstandard-0.23.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:59556bf80a7094d0cfb9f5e50bb2db27fefb75d5138bb16fb052b61b0e0eeeb0"}, + {file = "zstandard-0.23.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:27d3ef2252d2e62476389ca8f9b0cf2bbafb082a3b6bfe9d90cbcbb5529ecf7c"}, + {file = "zstandard-0.23.0-cp310-cp310-win32.whl", hash = "sha256:5d41d5e025f1e0bccae4928981e71b2334c60f580bdc8345f824e7c0a4c2a813"}, + {file = "zstandard-0.23.0-cp310-cp310-win_amd64.whl", hash = "sha256:519fbf169dfac1222a76ba8861ef4ac7f0530c35dd79ba5727014613f91613d4"}, + {file = "zstandard-0.23.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:34895a41273ad33347b2fc70e1bff4240556de3c46c6ea430a7ed91f9042aa4e"}, + {file = "zstandard-0.23.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:77ea385f7dd5b5676d7fd943292ffa18fbf5c72ba98f7d09fc1fb9e819b34c23"}, + {file = "zstandard-0.23.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:983b6efd649723474f29ed42e1467f90a35a74793437d0bc64a5bf482bedfa0a"}, + {file = "zstandard-0.23.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:80a539906390591dd39ebb8d773771dc4db82ace6372c4d41e2d293f8e32b8db"}, + {file = "zstandard-0.23.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:445e4cb5048b04e90ce96a79b4b63140e3f4ab5f662321975679b5f6360b90e2"}, + {file = "zstandard-0.23.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fd30d9c67d13d891f2360b2a120186729c111238ac63b43dbd37a5a40670b8ca"}, + {file = "zstandard-0.23.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d20fd853fbb5807c8e84c136c278827b6167ded66c72ec6f9a14b863d809211c"}, + {file = "zstandard-0.23.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ed1708dbf4d2e3a1c5c69110ba2b4eb6678262028afd6c6fbcc5a8dac9cda68e"}, + {file = "zstandard-0.23.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:be9b5b8659dff1f913039c2feee1aca499cfbc19e98fa12bc85e037c17ec6ca5"}, + {file = "zstandard-0.23.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:65308f4b4890aa12d9b6ad9f2844b7ee42c7f7a4fd3390425b242ffc57498f48"}, + {file = "zstandard-0.23.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:98da17ce9cbf3bfe4617e836d561e433f871129e3a7ac16d6ef4c680f13a839c"}, + {file = "zstandard-0.23.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:8ed7d27cb56b3e058d3cf684d7200703bcae623e1dcc06ed1e18ecda39fee003"}, + {file = "zstandard-0.23.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:b69bb4f51daf461b15e7b3db033160937d3ff88303a7bc808c67bbc1eaf98c78"}, + {file = "zstandard-0.23.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:034b88913ecc1b097f528e42b539453fa82c3557e414b3de9d5632c80439a473"}, + {file = "zstandard-0.23.0-cp311-cp311-win32.whl", hash = "sha256:f2d4380bf5f62daabd7b751ea2339c1a21d1c9463f1feb7fc2bdcea2c29c3160"}, + {file = "zstandard-0.23.0-cp311-cp311-win_amd64.whl", hash = "sha256:62136da96a973bd2557f06ddd4e8e807f9e13cbb0bfb9cc06cfe6d98ea90dfe0"}, + {file = "zstandard-0.23.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b4567955a6bc1b20e9c31612e615af6b53733491aeaa19a6b3b37f3b65477094"}, + {file = "zstandard-0.23.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1e172f57cd78c20f13a3415cc8dfe24bf388614324d25539146594c16d78fcc8"}, + {file = "zstandard-0.23.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b0e166f698c5a3e914947388c162be2583e0c638a4703fc6a543e23a88dea3c1"}, + {file = "zstandard-0.23.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:12a289832e520c6bd4dcaad68e944b86da3bad0d339ef7989fb7e88f92e96072"}, + {file = "zstandard-0.23.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d50d31bfedd53a928fed6707b15a8dbeef011bb6366297cc435accc888b27c20"}, + {file = "zstandard-0.23.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:72c68dda124a1a138340fb62fa21b9bf4848437d9ca60bd35db36f2d3345f373"}, + {file = "zstandard-0.23.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:53dd9d5e3d29f95acd5de6802e909ada8d8d8cfa37a3ac64836f3bc4bc5512db"}, + {file = "zstandard-0.23.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:6a41c120c3dbc0d81a8e8adc73312d668cd34acd7725f036992b1b72d22c1772"}, + {file = "zstandard-0.23.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:40b33d93c6eddf02d2c19f5773196068d875c41ca25730e8288e9b672897c105"}, + {file = "zstandard-0.23.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:9206649ec587e6b02bd124fb7799b86cddec350f6f6c14bc82a2b70183e708ba"}, + {file = "zstandard-0.23.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:76e79bc28a65f467e0409098fa2c4376931fd3207fbeb6b956c7c476d53746dd"}, + {file = "zstandard-0.23.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:66b689c107857eceabf2cf3d3fc699c3c0fe8ccd18df2219d978c0283e4c508a"}, + {file = "zstandard-0.23.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:9c236e635582742fee16603042553d276cca506e824fa2e6489db04039521e90"}, + {file = "zstandard-0.23.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:a8fffdbd9d1408006baaf02f1068d7dd1f016c6bcb7538682622c556e7b68e35"}, + {file = "zstandard-0.23.0-cp312-cp312-win32.whl", hash = "sha256:dc1d33abb8a0d754ea4763bad944fd965d3d95b5baef6b121c0c9013eaf1907d"}, + {file = "zstandard-0.23.0-cp312-cp312-win_amd64.whl", hash = "sha256:64585e1dba664dc67c7cdabd56c1e5685233fbb1fc1966cfba2a340ec0dfff7b"}, + {file = "zstandard-0.23.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:576856e8594e6649aee06ddbfc738fec6a834f7c85bf7cadd1c53d4a58186ef9"}, + {file = "zstandard-0.23.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:38302b78a850ff82656beaddeb0bb989a0322a8bbb1bf1ab10c17506681d772a"}, + {file = "zstandard-0.23.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d2240ddc86b74966c34554c49d00eaafa8200a18d3a5b6ffbf7da63b11d74ee2"}, + {file = "zstandard-0.23.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2ef230a8fd217a2015bc91b74f6b3b7d6522ba48be29ad4ea0ca3a3775bf7dd5"}, + {file = "zstandard-0.23.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:774d45b1fac1461f48698a9d4b5fa19a69d47ece02fa469825b442263f04021f"}, + {file = "zstandard-0.23.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6f77fa49079891a4aab203d0b1744acc85577ed16d767b52fc089d83faf8d8ed"}, + {file = "zstandard-0.23.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ac184f87ff521f4840e6ea0b10c0ec90c6b1dcd0bad2f1e4a9a1b4fa177982ea"}, + {file = "zstandard-0.23.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:c363b53e257246a954ebc7c488304b5592b9c53fbe74d03bc1c64dda153fb847"}, + {file = "zstandard-0.23.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:e7792606d606c8df5277c32ccb58f29b9b8603bf83b48639b7aedf6df4fe8171"}, + {file = "zstandard-0.23.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a0817825b900fcd43ac5d05b8b3079937073d2b1ff9cf89427590718b70dd840"}, + {file = "zstandard-0.23.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:9da6bc32faac9a293ddfdcb9108d4b20416219461e4ec64dfea8383cac186690"}, + {file = "zstandard-0.23.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:fd7699e8fd9969f455ef2926221e0233f81a2542921471382e77a9e2f2b57f4b"}, + {file = "zstandard-0.23.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:d477ed829077cd945b01fc3115edd132c47e6540ddcd96ca169facff28173057"}, + {file = "zstandard-0.23.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:fa6ce8b52c5987b3e34d5674b0ab529a4602b632ebab0a93b07bfb4dfc8f8a33"}, + {file = "zstandard-0.23.0-cp313-cp313-win32.whl", hash = "sha256:a9b07268d0c3ca5c170a385a0ab9fb7fdd9f5fd866be004c4ea39e44edce47dd"}, + {file = "zstandard-0.23.0-cp313-cp313-win_amd64.whl", hash = "sha256:f3513916e8c645d0610815c257cbfd3242adfd5c4cfa78be514e5a3ebb42a41b"}, + {file = "zstandard-0.23.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2ef3775758346d9ac6214123887d25c7061c92afe1f2b354f9388e9e4d48acfc"}, + {file = "zstandard-0.23.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4051e406288b8cdbb993798b9a45c59a4896b6ecee2f875424ec10276a895740"}, + {file = "zstandard-0.23.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e2d1a054f8f0a191004675755448d12be47fa9bebbcffa3cdf01db19f2d30a54"}, + {file = "zstandard-0.23.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f83fa6cae3fff8e98691248c9320356971b59678a17f20656a9e59cd32cee6d8"}, + {file = "zstandard-0.23.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:32ba3b5ccde2d581b1e6aa952c836a6291e8435d788f656fe5976445865ae045"}, + {file = "zstandard-0.23.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2f146f50723defec2975fb7e388ae3a024eb7151542d1599527ec2aa9cacb152"}, + {file = "zstandard-0.23.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1bfe8de1da6d104f15a60d4a8a768288f66aa953bbe00d027398b93fb9680b26"}, + {file = "zstandard-0.23.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:29a2bc7c1b09b0af938b7a8343174b987ae021705acabcbae560166567f5a8db"}, + {file = "zstandard-0.23.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:61f89436cbfede4bc4e91b4397eaa3e2108ebe96d05e93d6ccc95ab5714be512"}, + {file = "zstandard-0.23.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:53ea7cdc96c6eb56e76bb06894bcfb5dfa93b7adcf59d61c6b92674e24e2dd5e"}, + {file = "zstandard-0.23.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:a4ae99c57668ca1e78597d8b06d5af837f377f340f4cce993b551b2d7731778d"}, + {file = "zstandard-0.23.0-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:379b378ae694ba78cef921581ebd420c938936a153ded602c4fea612b7eaa90d"}, + {file = "zstandard-0.23.0-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:50a80baba0285386f97ea36239855f6020ce452456605f262b2d33ac35c7770b"}, + {file = "zstandard-0.23.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:61062387ad820c654b6a6b5f0b94484fa19515e0c5116faf29f41a6bc91ded6e"}, + {file = "zstandard-0.23.0-cp38-cp38-win32.whl", hash = "sha256:b8c0bd73aeac689beacd4e7667d48c299f61b959475cdbb91e7d3d88d27c56b9"}, + {file = "zstandard-0.23.0-cp38-cp38-win_amd64.whl", hash = "sha256:a05e6d6218461eb1b4771d973728f0133b2a4613a6779995df557f70794fd60f"}, + {file = "zstandard-0.23.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3aa014d55c3af933c1315eb4bb06dd0459661cc0b15cd61077afa6489bec63bb"}, + {file = "zstandard-0.23.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:0a7f0804bb3799414af278e9ad51be25edf67f78f916e08afdb983e74161b916"}, + {file = "zstandard-0.23.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb2b1ecfef1e67897d336de3a0e3f52478182d6a47eda86cbd42504c5cbd009a"}, + {file = "zstandard-0.23.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:837bb6764be6919963ef41235fd56a6486b132ea64afe5fafb4cb279ac44f259"}, + {file = "zstandard-0.23.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1516c8c37d3a053b01c1c15b182f3b5f5eef19ced9b930b684a73bad121addf4"}, + {file = "zstandard-0.23.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:48ef6a43b1846f6025dde6ed9fee0c24e1149c1c25f7fb0a0585572b2f3adc58"}, + {file = "zstandard-0.23.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:11e3bf3c924853a2d5835b24f03eeba7fc9b07d8ca499e247e06ff5676461a15"}, + {file = "zstandard-0.23.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:2fb4535137de7e244c230e24f9d1ec194f61721c86ebea04e1581d9d06ea1269"}, + {file = "zstandard-0.23.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8c24f21fa2af4bb9f2c492a86fe0c34e6d2c63812a839590edaf177b7398f700"}, + {file = "zstandard-0.23.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:a8c86881813a78a6f4508ef9daf9d4995b8ac2d147dcb1a450448941398091c9"}, + {file = "zstandard-0.23.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:fe3b385d996ee0822fd46528d9f0443b880d4d05528fd26a9119a54ec3f91c69"}, + {file = "zstandard-0.23.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:82d17e94d735c99621bf8ebf9995f870a6b3e6d14543b99e201ae046dfe7de70"}, + {file = "zstandard-0.23.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:c7c517d74bea1a6afd39aa612fa025e6b8011982a0897768a2f7c8ab4ebb78a2"}, + {file = "zstandard-0.23.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:1fd7e0f1cfb70eb2f95a19b472ee7ad6d9a0a992ec0ae53286870c104ca939e5"}, + {file = "zstandard-0.23.0-cp39-cp39-win32.whl", hash = "sha256:43da0f0092281bf501f9c5f6f3b4c975a8a0ea82de49ba3f7100e64d422a1274"}, + {file = "zstandard-0.23.0-cp39-cp39-win_amd64.whl", hash = "sha256:f8346bfa098532bc1fb6c7ef06783e969d87a99dd1d2a5a18a892c1d7a643c58"}, + {file = "zstandard-0.23.0.tar.gz", hash = "sha256:b2d8c62d08e7255f68f7a740bae85b3c9b8e5466baa9cbf7f57f1cde0ac6bc09"}, +] + +[package.dependencies] +cffi = {version = ">=1.11", markers = "platform_python_implementation == \"PyPy\""} + +[package.extras] +cffi = ["cffi (>=1.11)"] + [metadata] lock-version = "2.1" python-versions = ">=3.10,<3.13" -content-hash = "dd9e4d7250d6758ec617051e48a482c3cc96946537cc5bd24b4dd72f90a179e1" +content-hash = "3f0b1890a850a10387b347751e01af37a449346449a3cc504fea672d3b8cd6c1" diff --git a/pyproject.toml b/pyproject.toml index a2b8cedf..43c72693 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -42,6 +42,7 @@ requests = "^2.31.0" tqdm = "^4.66.4" djangorestframework = "^3.16.0" djangorestframework-simplejwt = "^5.5.0" +chatbot = {path = "chatbot"} [tool.poetry.group.dev.dependencies] pre-commit = "^3.3.3" From d3830ab3aa2c226d5017291213d1e9b48335dc32 Mon Sep 17 00:00:00 2001 From: vrtornisiello Date: Fri, 9 May 2025 17:32:01 -0300 Subject: [PATCH 055/181] update env variables file --- .env.docker | 12 ++++++++++-- 1 file changed, 10 insertions(+), 2 deletions(-) diff --git a/.env.docker b/.env.docker index e9c922be..547cd6e3 100644 --- a/.env.docker +++ b/.env.docker @@ -2,34 +2,42 @@ ADMINS="Gabriel Milan,gabriel.gazola@poli.ufrj.br" EMAIL_HOST_USER="notifications@gmail.com" EMAIL_HOST_PASSWORD="password" + # Django configurations DJANGO_SECRET_KEY="some-secret" DJANGO_SETTINGS_MODULE="backend.settings.local" + # Logger LOGGER_LEVEL="DEBUG" LOGGER_IGNORE="faker,haystack" LOGGER_SERIALIZE="" + # Database DB_HOST="database" DB_PORT="5432" DB_NAME="postgres" DB_USER="postgres" DB_PASSWORD="postgres" + # Queue REDIS_HOST="queue" REDIS_PORT="6379" + # Index ELASTICSEARCH_URL=http://index:9200 + # Chatbot GOOGLE_APPLICATION_CREDENTIALS= BILLING_PROJECT_ID= QUERY_PROJECT_ID= MODEL_URI= -OPENAI_API_KEY= LANGCHAIN_TRACING_V2= LANGCHAIN_API_KEY= DB_URL= CHROMA_HOST= CHROMA_PORT= SQL_CHROMA_COLLECTION= -VIZ_CHROMA_COLLECTION= + +# Local DB populating +METABASE_USER= +METABASE_PASSWORD= From 2cbca17196a0c445fc46617e466de380936d4e73 Mon Sep 17 00:00:00 2001 From: vrtornisiello Date: Tue, 13 May 2025 17:33:03 -0300 Subject: [PATCH 056/181] optimize docker image --- Dockerfile | 8 +++----- 1 file changed, 3 insertions(+), 5 deletions(-) diff --git a/Dockerfile b/Dockerfile index bd32deab..56449182 100644 --- a/Dockerfile +++ b/Dockerfile @@ -7,20 +7,18 @@ RUN pip install --no-cache-dir -U virtualenv>=20.13.1 && virtualenv /env --pytho ENV PATH /env/bin:$PATH # Install make, nginx and copy configuration -RUN apt-get update \ - && apt-get install -y --no-install-recommends build-essential curl g++ libpq-dev make nginx \ +RUN apt-get update && apt-get install -y --no-install-recommends \ + build-essential curl g++ libpq-dev make nginx postgresql postgresql-contrib \ && apt-get clean \ && rm -rf /var/lib/apt/lists/* \ && rm /etc/nginx/sites-enabled/default -RUN apt-get update && apt-get install -y postgresql postgresql-contrib COPY nginx.conf /etc/nginx/nginx.conf # Install pip requirements WORKDIR /app COPY . . -RUN /env/bin/pip install --no-cache-dir . && rm nginx.conf RUN test -d ./chatbot || (echo "ERROR: Git submodule 'chatbot' not found. Please run 'git submodule update --init --recursive'. See backend/README.md for more information." && exit 1) -RUN /env/bin/pip install --no-cache-dir ./chatbot +RUN /env/bin/pip install --no-cache-dir . ./chatbot && rm nginx.conf # Prevents Python from writing .pyc files to disc # https://docs.python.org/3/using/cmdline.html#envvar-PYTHONDONTWRITEBYTECODE From 1e654386acc5b2732f71aadcb11b858e457c56f2 Mon Sep 17 00:00:00 2001 From: Laura Amaral Date: Thu, 8 May 2025 10:06:54 -0300 Subject: [PATCH 057/181] fix: add fields and fix order in table and dataset admin page --- backend/apps/api/v1/admin.py | 256 +++++++++++++++++++++--- backend/apps/api/v1/forms/admin_form.py | 9 +- backend/apps/api/v1/models.py | 4 + 3 files changed, 235 insertions(+), 34 deletions(-) diff --git a/backend/apps/api/v1/admin.py b/backend/apps/api/v1/admin.py index 97af9437..8048d2b4 100644 --- a/backend/apps/api/v1/admin.py +++ b/backend/apps/api/v1/admin.py @@ -12,13 +12,13 @@ from django.shortcuts import render from django.urls import reverse from django.utils.html import format_html +from django.utils.safestring import mark_safe from modeltranslation.admin import TabbedTranslationAdmin, TranslationStackedInline from ordered_model.admin import OrderedInlineModelAdminMixin, OrderedStackedInline from backend.apps.api.v1.filters import ( AreaAdministrativeLevelFilter, AreaParentFilter, - DatasetOrganizationListFilter, OrganizationImageListFilter, TableCoverageListFilter, TableDirectoryListFilter, @@ -611,18 +611,9 @@ class DatasetAdmin(OrderedInlineModelAdminMixin, TabbedTranslationAdmin): "full_slug", "spatial_coverage", "temporal_coverage", - "contains_tables", - "contains_raw_data_sources", - "contains_information_requests", - "contains_closed_data", - "contains_direct_download_free", - "contains_direct_download_paid", - "contains_temporalcoverage_free", - "contains_temporalcoverage_paid", "page_views", "created_at", "updated_at", - "related_objects", ] search_fields = ["name", "slug", "organizations__name"] filter_horizontal = [ @@ -630,16 +621,12 @@ class DatasetAdmin(OrderedInlineModelAdminMixin, TabbedTranslationAdmin): "themes", "organizations", ] - list_filter = [ - DatasetOrganizationListFilter, - ] list_display = [ "name", "get_organizations", - "spatial_coverage", "temporal_coverage", - "related_objects", - "created_at", + "related_tables", + "related_raw_data_sources", "updated_at", ] ordering = ["-updated_at"] @@ -650,7 +637,7 @@ def get_organizations(self, obj): get_organizations.short_description = "Organizations" - def related_objects(self, obj): + def related_tables(self, obj): return format_html( "{1} {2}", @@ -659,7 +646,18 @@ def related_objects(self, obj): "tables" if obj.tables.count() > 1 else "table", ) - related_objects.short_description = "Tables" + related_tables.short_description = "Tables" + + def related_raw_data_sources(self, obj): + return format_html( + "{1} {2}", + obj.id, + obj.raw_data_sources.count(), + "sources" if obj.raw_data_sources.count() > 1 else "sources", + ) + + related_raw_data_sources.short_description = "Sources" class CustomUserAdmin(UserAdmin): @@ -673,6 +671,31 @@ class CustomUserAdmin(UserAdmin): class TableAdmin(OrderedInlineModelAdminMixin, TabbedTranslationAdmin): form = TableForm + fieldsets = ( + ( + None, + { + "fields": ( + "dataset", + "get_table_url", + "status", + "name", + "slug", + "description", + "get_datetime_ranges_display", + "number_columns", + "number_rows", + "get_update_display", + "raw_data_source", + "published_by", + "data_cleaned_by", + "auxiliary_files_url", + "created_at", + "updated_at", + ) + }, + ), + ) actions = [ reorder_columns, reset_column_order, @@ -689,7 +712,8 @@ class TableAdmin(OrderedInlineModelAdminMixin, TabbedTranslationAdmin): UpdateInline, ] readonly_fields = [ - "id", + "get_table_url", + "get_datetime_ranges_display", "partitions", "created_at", "updated_at", @@ -700,13 +724,8 @@ class TableAdmin(OrderedInlineModelAdminMixin, TabbedTranslationAdmin): "number_columns", "uncompressed_file_size", "compressed_file_size", - "contains_open_data", - "contains_direct_download_free", - "contains_direct_download_paid", - "contains_temporalcoverage_free", - "contains_temporalcoverage_paid", - "contains_closed_data", "page_views", + "get_update_display", ] search_fields = [ "name", @@ -714,7 +733,6 @@ class TableAdmin(OrderedInlineModelAdminMixin, TabbedTranslationAdmin): ] autocomplete_fields = [ "dataset", - "partner_organization", "published_by", "data_cleaned_by", ] @@ -757,6 +775,188 @@ def get_data_cleaners(self, obj): get_data_cleaners.short_description = "Data Cleaners" + def get_table_url(self, obj): + """Get the clickable URL for the table""" + website_url = f"https://basedosdados.org/dataset/{obj.dataset.id}?table={obj.id}" + website_html = format_html( + '🖥️ Ver tabela no site', website_url + ) + + cloud_tables = obj.cloud_tables.all() + + if len(cloud_tables) == 0: + add_cloud_table_url = reverse("admin:v1_cloudtable_add") + f"?table={obj.id}" + gcp_html = format_html( + 'No cloud table found. Create here', add_cloud_table_url + ) + + elif len(cloud_tables) > 1: + cloud_table_tab = reverse("admin:v1_table_change") + "/#cloud-tables-tab" + gcp_html = format_html( + 'More than 1 cloud table found. Fix it here', cloud_table_tab + ) + + else: + cloud_table = cloud_tables[0] + gcp_dev_url = f"https://console.cloud.google.com/bigquery?p=basedosdados-dev&d={cloud_table.gcp_dataset_id}&t={cloud_table.gcp_table_id}&page=table" + gcp_prod_url = f"https://console.cloud.google.com/bigquery?p=basedosdados&d={cloud_table.gcp_dataset_id}&t={cloud_table.gcp_table_id}&page=table" + + # Gerando o HTML + gcp_html = format_html( + '🧩 Ver tabela em BigQuery-dev
' + '🧊 Ver tabela em BigQuery-prod', + gcp_dev_url, + gcp_prod_url, + ) + + return format_html("{}
{}", website_html, gcp_html) + + get_table_url.short_description = "Table URLs" + + def get_datetime_ranges_display(self, obj): + """Display datetime ranges with links to their admin pages""" + coverages = list(obj.coverages.all()) + links = [] + + if len(coverages) == 0: + add_coverage_url = reverse("admin:v1_coverage_add") + f"?table={obj.id}" + return format_html("No coverages found. Create here", add_coverage_url) + + for cov in coverages: + url_coverage = reverse("admin:v1_coverage_change", args=[cov.id]) + add_date_time_range_url = reverse("admin:v1_datetimerange_add") + f"?coverage={cov.id}" + status = "Closed" if cov.is_closed else "Open" + + if cov.datetime_ranges.count() == 0: + links.append( + format_html( + "⚠️ {} coverage found, but no Datetime Range. Create here", + add_date_time_range_url, + status, + add_date_time_range_url, + ) + ) + + ranges = sorted(cov.datetime_ranges.all(), key=lambda dt: str(dt)) + for dt_range in ranges: + url_dt_range = reverse("admin:v1_datetimerange_change", args=[dt_range.id]) + links.append( + format_html( + '{} - {} coverage', + url_dt_range, + str(dt_range), + url_coverage, + status, + ) + ) + + return format_html("
".join(links)) + + get_datetime_ranges_display.short_description = "DateTime Ranges" + + def get_update_display(self, obj): + """Display update info""" + + def check_if_there_is_only_one_object_connected( + object_label, attr_label, tab_label, connection_obj + ): + print(f"\t\t\t{object_label = }") + print(f"\t\t\t{attr_label = }") + print(f"\t\t\t{tab_label = }") + print(f"\t\t\t{connection_obj = }") + + campos = [f.name for f in connection_obj._meta.get_fields()] + print(f"\t\t\t{campos = }") + + if attr_label not in campos: + print(f"\t\t\t{attr_label} not in {campos}") + return None, format_html( + "No {0} found. Create one", + object_label, + connection_obj.admin_url, + ) + + obj_list = getattr(connection_obj, attr_label).all() + + print(f"\t\t\t{list(obj_list) = }") + + connection_label = connection_obj._meta.model_name + + print(f"\t\t\t{connection_label = }") + + change_url = connection_obj.admin_url + ("#" + tab_label if tab_label else "") + + print(f"\t\t\t{change_url = }") + + # Se não houver objetos + if len(obj_list) == 0: + return None, format_html( + "No {0} found. Create one", object_label, change_url + ) + + # Se houver mais de 1 objeto + elif len(obj_list) > 1: + return None, format_html( + "More than 1 {0} found. Fix it", object_label, change_url + ) + + # Se houver exatamente 1 objeto + else: + selected_obj = obj_list[0] + html = format_html( + "{} {} found in {} ", + selected_obj.admin_url, + str(selected_obj), + object_label, + change_url, + connection_label, + ) + return obj_list[0], html + + def check_if_there_is_only_one_raw_data_source_connected(table_object): + obj_list = getattr(table_object, "raw_data_source").all() + if len(obj_list) == 0: + return None, format_html("No Raw Data Source found. Add one in the box bellow") + + # Se houver mais de 1 objeto + elif len(obj_list) > 1: + return None, format_html( + "More than 1 Raw Data Source found. Fix one in the box bellow" + ) + + # Se houver exatamente 1 objeto + else: + selected_obj = obj_list[0] + html = format_html( + "Raw Data Source found", + selected_obj.admin_url, + str(selected_obj), + ) + return obj_list[0], html + + _, update_html = check_if_there_is_only_one_object_connected( + "update", "updates", "updates-tab", obj + ) + ( + raw_data_source_obj, + raw_data_source_html, + ) = check_if_there_is_only_one_raw_data_source_connected(obj) + print(f"{raw_data_source_obj.admin_url = }") + if raw_data_source_obj: + _, raw_data_source_update_html = check_if_there_is_only_one_object_connected( + "update", "updates", "updates-tab", raw_data_source_obj + ) + print(f"{raw_data_source_update_html = }") + _, poll_raw_data_source_html = check_if_there_is_only_one_object_connected( + "poll", "polls", "polls-tab", raw_data_source_obj + ) + + raw_data_source_html = format_html( + raw_data_source_update_html + "
" + poll_raw_data_source_html + ) + + return format_html(update_html + "
" + raw_data_source_html) + class TableNeighborAdmin(admin.ModelAdmin): search_fields = [ @@ -1054,9 +1254,9 @@ def datetime_ranges_display(self, obj): # Add link to add new datetime range add_url = reverse("admin:v1_datetimerange_add") + f"?coverage={obj.id}" - links.append(format_html('Add DateTime Range', add_url)) + links.append(mark_safe(f'Add DateTime Range')) - return format_html("
".join(links)) + return mark_safe("
".join(links)) datetime_ranges_display.short_description = "DateTime Ranges" diff --git a/backend/apps/api/v1/forms/admin_form.py b/backend/apps/api/v1/forms/admin_form.py index 84c45044..cbb69d3b 100644 --- a/backend/apps/api/v1/forms/admin_form.py +++ b/backend/apps/api/v1/forms/admin_form.py @@ -87,15 +87,12 @@ class Meta(UUIDHiddenIdForm.Meta): fields = [ "id", "name", - "name_staging", - "description", "bigquery_type", - "is_closed", - "status", - "is_primary_key", - "table", + "description", + "covered_by_dictionary", "observation_level", "directory_primary_key", + "is_primary_key", ] readonly_fields = [ "order", diff --git a/backend/apps/api/v1/models.py b/backend/apps/api/v1/models.py index cc57405b..e7e382aa 100644 --- a/backend/apps/api/v1/models.py +++ b/backend/apps/api/v1/models.py @@ -895,6 +895,8 @@ class Update(BaseModel): graphql_nested_filter_fields_whitelist = ["id"] def __str__(self): + if self.latest: + return f"{self.latest.strftime('%Y-%m-%d')}: {str(self.frequency)} {str(self.entity)}" return f"{str(self.frequency)} {str(self.entity)}" class Meta: @@ -964,6 +966,8 @@ class Poll(BaseModel): graphql_nested_filter_fields_whitelist = ["id"] def __str__(self): + if self.latest: + return f"{self.latest.strftime('%Y-%m-%d')}: {str(self.frequency)} {str(self.entity)}" return f"{str(self.frequency)} {str(self.entity)}" class Meta: From 1167f4f6ac0ede6ddfb37ec1fc8ce15acbef17f0 Mon Sep 17 00:00:00 2001 From: Laura Amaral Date: Tue, 20 May 2025 15:10:47 -0300 Subject: [PATCH 058/181] fix: clean functions --- backend/apps/api/v1/admin.py | 112 ++++++++++++++--------------------- 1 file changed, 44 insertions(+), 68 deletions(-) diff --git a/backend/apps/api/v1/admin.py b/backend/apps/api/v1/admin.py index 8048d2b4..f9408318 100644 --- a/backend/apps/api/v1/admin.py +++ b/backend/apps/api/v1/admin.py @@ -20,10 +20,6 @@ AreaAdministrativeLevelFilter, AreaParentFilter, OrganizationImageListFilter, - TableCoverageListFilter, - TableDirectoryListFilter, - TableObservationListFilter, - TableOrganizationListFilter, ) from backend.apps.api.v1.forms import ( ColumnInlineForm, @@ -742,17 +738,9 @@ class TableAdmin(OrderedInlineModelAdminMixin, TabbedTranslationAdmin): list_display = [ "name", "dataset", - "get_publishers", - "get_data_cleaners", "created_at", "updated_at", ] - list_filter = [ - TableOrganizationListFilter, - TableCoverageListFilter, - TableObservationListFilter, - TableDirectoryListFilter, - ] ordering = ["-updated_at"] def get_queryset(self, request): @@ -823,14 +811,15 @@ def get_datetime_ranges_display(self, obj): return format_html("No coverages found. Create here", add_coverage_url) for cov in coverages: - url_coverage = reverse("admin:v1_coverage_change", args=[cov.id]) + url_coverage = cov.admin_url add_date_time_range_url = reverse("admin:v1_datetimerange_add") + f"?coverage={cov.id}" status = "Closed" if cov.is_closed else "Open" if cov.datetime_ranges.count() == 0: links.append( format_html( - "⚠️ {} coverage found, but no Datetime Range. Create here", + "⚠️ {} coverage found, but no Datetime Range." + "Create here", add_date_time_range_url, status, add_date_time_range_url, @@ -854,50 +843,38 @@ def get_datetime_ranges_display(self, obj): get_datetime_ranges_display.short_description = "DateTime Ranges" - def get_update_display(self, obj): + def get_update_display(self, table_obj): """Display update info""" - def check_if_there_is_only_one_object_connected( - object_label, attr_label, tab_label, connection_obj - ): - print(f"\t\t\t{object_label = }") - print(f"\t\t\t{attr_label = }") - print(f"\t\t\t{tab_label = }") - print(f"\t\t\t{connection_obj = }") - + def check_if_there_is_only_one_object_connected(attr_label, connection_obj): campos = [f.name for f in connection_obj._meta.get_fields()] - print(f"\t\t\t{campos = }") if attr_label not in campos: - print(f"\t\t\t{attr_label} not in {campos}") - return None, format_html( - "No {0} found. Create one", - object_label, - connection_obj.admin_url, + return format_html( + "The {} label was not found in {} model", + attr_label, + connection_obj._meta.verbose_name, ) obj_list = getattr(connection_obj, attr_label).all() - - print(f"\t\t\t{list(obj_list) = }") - - connection_label = connection_obj._meta.model_name - - print(f"\t\t\t{connection_label = }") - - change_url = connection_obj.admin_url + ("#" + tab_label if tab_label else "") - - print(f"\t\t\t{change_url = }") + change_url = connection_obj.admin_url + "#" + attr_label + "-tab" # Se não houver objetos if len(obj_list) == 0: - return None, format_html( - "No {0} found. Create one", object_label, change_url + return format_html( + "No {} found in {}. Create one", + attr_label, + connection_obj._meta.verbose_name, + change_url, ) # Se houver mais de 1 objeto elif len(obj_list) > 1: - return None, format_html( - "More than 1 {0} found. Fix it", object_label, change_url + return format_html( + "More than 1 {} found in {}. Fix it", + obj_list[0]._meta.verbose_name, + connection_obj._meta.verbose_name, + change_url, ) # Se houver exatamente 1 objeto @@ -907,56 +884,55 @@ def check_if_there_is_only_one_object_connected( "{} {} found in {} ", selected_obj.admin_url, str(selected_obj), - object_label, + selected_obj._meta.verbose_name, change_url, - connection_label, + connection_obj._meta.verbose_name, ) - return obj_list[0], html + return html def check_if_there_is_only_one_raw_data_source_connected(table_object): - obj_list = getattr(table_object, "raw_data_source").all() - if len(obj_list) == 0: + """Specific function to check Raw Data Source + the instructions and conditionals are different from updates and polls""" + raw_data_source_obj_list = getattr(table_object, "raw_data_source").all() + if len(raw_data_source_obj_list) == 0: return None, format_html("No Raw Data Source found. Add one in the box bellow") - # Se houver mais de 1 objeto - elif len(obj_list) > 1: - return None, format_html( - "More than 1 Raw Data Source found. Fix one in the box bellow" - ) + elif len(raw_data_source_obj_list) > 1: + return None, format_html("More than 1 Raw Data Source found. Fix in the box bellow") - # Se houver exatamente 1 objeto else: - selected_obj = obj_list[0] + selected_obj = raw_data_source_obj_list[0] html = format_html( "Raw Data Source found", selected_obj.admin_url, str(selected_obj), ) - return obj_list[0], html + return raw_data_source_obj_list[0], html + + update_html = check_if_there_is_only_one_object_connected("updates", table_obj) - _, update_html = check_if_there_is_only_one_object_connected( - "update", "updates", "updates-tab", obj - ) ( raw_data_source_obj, raw_data_source_html, - ) = check_if_there_is_only_one_raw_data_source_connected(obj) - print(f"{raw_data_source_obj.admin_url = }") + ) = check_if_there_is_only_one_raw_data_source_connected(table_obj) + if raw_data_source_obj: - _, raw_data_source_update_html = check_if_there_is_only_one_object_connected( - "update", "updates", "updates-tab", raw_data_source_obj + raw_data_source_update_html = check_if_there_is_only_one_object_connected( + "updates", raw_data_source_obj ) print(f"{raw_data_source_update_html = }") - _, poll_raw_data_source_html = check_if_there_is_only_one_object_connected( - "poll", "polls", "polls-tab", raw_data_source_obj + poll_raw_data_source_html = check_if_there_is_only_one_object_connected( + "polls", raw_data_source_obj ) - raw_data_source_html = format_html( - raw_data_source_update_html + "
" + poll_raw_data_source_html - ) + raw_data_source_html = format_html( + raw_data_source_update_html + "
" + poll_raw_data_source_html + ) return format_html(update_html + "
" + raw_data_source_html) + get_update_display.short_description = "Update and Poll Info" + class TableNeighborAdmin(admin.ModelAdmin): search_fields = [ From 959e581b7fd55795504c0dae168370f0b0e6f562 Mon Sep 17 00:00:00 2001 From: Luiz Eduardo Date: Wed, 21 May 2025 03:16:54 -0300 Subject: [PATCH 059/181] =?UTF-8?q?feat:=20Adicionando=20POC=20para=20bot?= =?UTF-8?q?=C3=A3o=20upload=5Fcolumns=20em=20Tables=20(#813)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit feat: Adicionando POC para botão upload_columns em Tables (#813) --- backend/apps/api/v1/urls.py | 3 +- backend/apps/api/v1/views.py | 87 ++++++++++++++++- backend/apps/core/static/core/css/main.css | 95 +++++++++++++++++-- .../apps/core/static/core/js/ferramentas.js | 57 +++++++++++ backend/templates/admin/change_form.html | 48 ++++++++++ 5 files changed, 281 insertions(+), 9 deletions(-) create mode 100644 backend/apps/core/static/core/js/ferramentas.js create mode 100644 backend/templates/admin/change_form.html diff --git a/backend/apps/api/v1/urls.py b/backend/apps/api/v1/urls.py index d3fa3ab5..412e65c9 100644 --- a/backend/apps/api/v1/urls.py +++ b/backend/apps/api/v1/urls.py @@ -5,7 +5,7 @@ from graphene_file_upload.django import FileUploadGraphQLView from backend.apps.api.v1.search_views import DatasetFacetValuesView, DatasetSearchView -from backend.apps.api.v1.views import DatasetRedirectView +from backend.apps.api.v1.views import DatasetRedirectView, upload_columns def redirect_to_graphql(request): @@ -25,4 +25,5 @@ def graphql_view(): path("facet_values/", DatasetFacetValuesView.as_view()), path("dataset/", DatasetRedirectView.as_view()), path("dataset_redirect/", DatasetRedirectView.as_view()), + path("upload_columns/", upload_columns), ] diff --git a/backend/apps/api/v1/views.py b/backend/apps/api/v1/views.py index 829253c7..89153b3b 100644 --- a/backend/apps/api/v1/views.py +++ b/backend/apps/api/v1/views.py @@ -1,12 +1,14 @@ # -*- coding: utf-8 -*- from __future__ import annotations +from typing import Dict, List from urllib.parse import urlparse -from django.http import HttpResponseRedirect +import pandas as pd +from django.http import HttpRequest, HttpResponseRedirect, JsonResponse from django.views import View -from backend.apps.api.v1.models import CloudTable, Dataset +from backend.apps.api.v1.models import BigQueryType, CloudTable, Column, Dataset, Table URL_MAPPING = { "localhost:8080": "http://localhost:3000", @@ -34,3 +36,84 @@ def get(self, request, *args, **kwargs): return HttpResponseRedirect(f"{domain}/dataset/{resource.id}") return HttpResponseRedirect(f"{domain}/404") + + +def upload_columns(request: HttpRequest): + # Aqui vai sua função + + token, table_id, dataset_id, link = request.POST.values() + + selected_table = Table.objects.get(id=table_id) + + selected_table.columns.all().delete() + + architecture = read_architecture_table(link) + + tables_dict: Dict[str, Table] = {table.gbq_slug: table for table in Table.objects.all()} + + columns: List[Column] = [ + create_columns(selected_table=selected_table, tables_dict=tables_dict, row=row) + for _, row in architecture.iterrows() + ] + + selected_table.columns.set(columns) + + resultado = "Colunas Salvas com sucesso!" + + print(token, table_id, dataset_id, link) + + return JsonResponse({"status": "sucesso", "mensagem": resultado}) + + +def read_architecture_table(url: str) -> pd.DataFrame: + id_spreadsheets = url.split("/")[-2] + + spreadsheets_raw_url = ( + f"https://docs.google.com/spreadsheets/d/{id_spreadsheets}/gviz/tq?tqx=out:csv" + ) + + df_architecture = pd.read_csv(spreadsheets_raw_url, dtype=str) + + df_architecture = df_architecture.loc[df_architecture["name"] != "(excluido)"] + + df_architecture.fillna("", inplace=True) + + return df_architecture + + +def create_columns(selected_table: Table, tables_dict: Dict[str, Table], row: pd.Series) -> Column: + # Pegar ID do BigQueryType Model + + row_bqtype = row["bigquery_type"].strip().upper() + bqtype = BigQueryType.objects.get(name=row_bqtype) + + # Pegar ID da coluna Diretorio + + directory_column = None + + if row["directory_column"]: + full_slug_models = "basedosdados.{table_full_slug}" + + table_full_slug = row["directory_column"].split(":")[0] + table_full_slug = full_slug_models.format(table_full_slug=table_full_slug) + + directory_column_name = row["directory_column"].split(":")[1] + + table_directory = tables_dict[table_full_slug] + + directory_column = table_directory.columns.get(name=directory_column_name) + + # Definir Coluna + + column = selected_table.columns.create( + name=row["name"], + description=row["description"], + covered_by_dictionary=row["covered_by_dictionary"] == "yes", + measurement_unit=row["measurement_unit"], + contains_sensitive_data=row["has_sensitive_data"] == "yes", + observations=row["observations"], + bigquery_type=bqtype, + directory_primary_key=directory_column, + ) + + return column diff --git a/backend/apps/core/static/core/css/main.css b/backend/apps/core/static/core/css/main.css index d5a5e14b..7645bc92 100644 --- a/backend/apps/core/static/core/css/main.css +++ b/backend/apps/core/static/core/css/main.css @@ -1,6 +1,89 @@ -.btn-primary { - color: #fff; - background-color: #28a745; - border-color: #28a745; - box-shadow: none; -} +.btn-primary { + color: #fff; + background-color: #28a745; + border-color: #28a745; + box-shadow: none; +} + +.botao-imagem { + padding: 0; + border: none; + background: none; + width: 150px; + height: 30px; +} +.botao-imagem img { + width: 150px; + height: 30px; +} + +.modal { +display: none; +position: fixed; +top: 0; +left: 0; +width: 100%; +height: 100%; +background-color: rgba(0, 0, 0, 0.5); +z-index: 1000; +} + +.modal-content { + background-color: white; + margin: 15% auto; + padding: 20px; + width: 80%; + max-width: 500px; + border-radius: 5px; +} + +.form-group { + margin-bottom: 15px; +} + +.form-group label { + display: block; + margin-bottom: 5px; +} + +.form-group input { + width: 100%; + padding: 8px; + border: 1px solid #ddd; + border-radius: 4px; +} + +.close-button { + float: right; + font-size: 24px; + cursor: pointer; +} + +.loading-overlay { + display: none; + position: absolute; + top: 0; + left: 0; + width: 100%; + height: 100%; + background-color: rgba(255, 255, 255, 0.8); + z-index: 1000; +} + +.loading-spinner { + position: absolute; + top: 50%; + left: 50%; + transform: translate(-50%, -50%); + border: 4px solid #f3f3f3; + border-top: 4px solid #3498db; + border-radius: 50%; + width: 40px; + height: 40px; + animation: spin 1s linear infinite; +} + +@keyframes spin { + 0% { transform: translate(-50%, -50%) rotate(0deg); } + 100% { transform: translate(-50%, -50%) rotate(360deg); } +} diff --git a/backend/apps/core/static/core/js/ferramentas.js b/backend/apps/core/static/core/js/ferramentas.js new file mode 100644 index 00000000..7ec98c49 --- /dev/null +++ b/backend/apps/core/static/core/js/ferramentas.js @@ -0,0 +1,57 @@ +const modal = document.getElementById('dadosModal'); +const closeButton = document.querySelector('.close-button'); +const dadosForm = document.getElementById('dadosForm'); +const loadingOverlay = document.getElementById('loadingOverlay'); + +function mostrarCarregamento() { +loadingOverlay.style.display = 'block'; +} + +// Função para esconder a animação de carregamento +function esconderCarregamento() { +loadingOverlay.style.display = 'none'; +} + +// Função para abrir a modal +function abrirModal() { +modal.style.display = 'block'; +} + +// Função para fechar a modal +function fecharModal() { +modal.style.display = 'none'; +} + +// Adicionar botão para abrir a modal +// Event listeners +closeButton.onclick = fecharModal; +window.onclick = function(event) { +if (event.target === modal) { + fecharModal(); +} +} + +function processar() { + +const formData = new FormData(dadosForm); +mostrarCarregamento(); + +fetch('/upload_columns/', { + method: 'POST', + body: formData, + headers: { + 'X-CSRFToken': document.querySelector('[name=csrfmiddlewaretoken]').value + } +}) +.then(response => response.json()) +.then(data => { + alert('Dados enviados com sucesso!' + data); + fecharModal(); +}) +.catch(error => { + alert('Erro ao enviar dados: ' + error); +}) +.finally(() => { +esconderCarregamento(); // Esconde o carregamento independente do resultado +}); +}; diff --git a/backend/templates/admin/change_form.html b/backend/templates/admin/change_form.html new file mode 100644 index 00000000..c559e2ea --- /dev/null +++ b/backend/templates/admin/change_form.html @@ -0,0 +1,48 @@ +{% extends "admin/change_form.html" %} +{% load static %} +{% block extra_actions %} +{% if opts.model_name == 'table' %} + + + +{% endif %} +{% endblock %} +{% block content %} +{{ block.super }} +{% if opts.model_name == 'table' %} + + + +{% endif %} + +{% endblock %} From 192d8ca239295260f8840c9f730e015be02c3d50 Mon Sep 17 00:00:00 2001 From: vrtornisiello Date: Wed, 21 May 2025 17:03:47 -0300 Subject: [PATCH 060/181] the `chatbot` package was added to the api dependencies, so it doesn't need to be installed it directly. --- Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Dockerfile b/Dockerfile index 56449182..77fcafe4 100644 --- a/Dockerfile +++ b/Dockerfile @@ -18,7 +18,7 @@ COPY nginx.conf /etc/nginx/nginx.conf WORKDIR /app COPY . . RUN test -d ./chatbot || (echo "ERROR: Git submodule 'chatbot' not found. Please run 'git submodule update --init --recursive'. See backend/README.md for more information." && exit 1) -RUN /env/bin/pip install --no-cache-dir . ./chatbot && rm nginx.conf +RUN /env/bin/pip install --no-cache-dir . && rm nginx.conf # Prevents Python from writing .pyc files to disc # https://docs.python.org/3/using/cmdline.html#envvar-PYTHONDONTWRITEBYTECODE From 2201cec07614f9cf3afd56c04a2503bb475c4d3d Mon Sep 17 00:00:00 2001 From: vrtornisiello Date: Wed, 21 May 2025 17:08:50 -0300 Subject: [PATCH 061/181] add chatbot migrations --- .../migrations/0002_alter_feedback_comment.py | 18 ++++++++++++ ...sync_status_feedback_synced_at_and_more.py | 28 +++++++++++++++++++ .../0004_alter_feedback_sync_status.py | 18 ++++++++++++ 3 files changed, 64 insertions(+) create mode 100644 backend/apps/chatbot/migrations/0002_alter_feedback_comment.py create mode 100644 backend/apps/chatbot/migrations/0003_feedback_sync_status_feedback_synced_at_and_more.py create mode 100644 backend/apps/chatbot/migrations/0004_alter_feedback_sync_status.py diff --git a/backend/apps/chatbot/migrations/0002_alter_feedback_comment.py b/backend/apps/chatbot/migrations/0002_alter_feedback_comment.py new file mode 100644 index 00000000..0f79b964 --- /dev/null +++ b/backend/apps/chatbot/migrations/0002_alter_feedback_comment.py @@ -0,0 +1,18 @@ +# Generated by Django 4.2.20 on 2025-04-17 14:05 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('chatbot', '0001_initial'), + ] + + operations = [ + migrations.AlterField( + model_name='feedback', + name='comment', + field=models.TextField(blank=True, null=True), + ), + ] diff --git a/backend/apps/chatbot/migrations/0003_feedback_sync_status_feedback_synced_at_and_more.py b/backend/apps/chatbot/migrations/0003_feedback_sync_status_feedback_synced_at_and_more.py new file mode 100644 index 00000000..478de74c --- /dev/null +++ b/backend/apps/chatbot/migrations/0003_feedback_sync_status_feedback_synced_at_and_more.py @@ -0,0 +1,28 @@ +# Generated by Django 4.2.21 on 2025-05-08 17:05 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('chatbot', '0002_alter_feedback_comment'), + ] + + operations = [ + migrations.AddField( + model_name='feedback', + name='sync_status', + field=models.CharField(choices=[('pending', 'Pending'), ('success', 'Success'), ('failed', 'Failed')], default='pending'), + ), + migrations.AddField( + model_name='feedback', + name='synced_at', + field=models.DateTimeField(blank=True, null=True), + ), + migrations.AlterField( + model_name='feedback', + name='updated_at', + field=models.DateTimeField(blank=True, null=True), + ), + ] diff --git a/backend/apps/chatbot/migrations/0004_alter_feedback_sync_status.py b/backend/apps/chatbot/migrations/0004_alter_feedback_sync_status.py new file mode 100644 index 00000000..52e83aca --- /dev/null +++ b/backend/apps/chatbot/migrations/0004_alter_feedback_sync_status.py @@ -0,0 +1,18 @@ +# Generated by Django 4.2.21 on 2025-05-13 20:10 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('chatbot', '0003_feedback_sync_status_feedback_synced_at_and_more'), + ] + + operations = [ + migrations.AlterField( + model_name='feedback', + name='sync_status', + field=models.TextField(choices=[('pending', 'Pending'), ('success', 'Success'), ('failed', 'Failed')], default='pending'), + ), + ] From 2a8550e6d96022d99bda80b3c0ce2d9d7efdaa16 Mon Sep 17 00:00:00 2001 From: vrtornisiello Date: Wed, 21 May 2025 17:13:34 -0300 Subject: [PATCH 062/181] removed unused type --- backend/apps/chatbot/views.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/backend/apps/chatbot/views.py b/backend/apps/chatbot/views.py index 43c70e4e..efae1b2c 100644 --- a/backend/apps/chatbot/views.py +++ b/backend/apps/chatbot/views.py @@ -24,8 +24,6 @@ from .models import * from .serializers import * -PydanticModel = TypeVar("PydanticModel", bound=pydantic.BaseModel) - ModelSerializer = TypeVar("ModelSerializer", bound=Serializer) @cache From c13c26252d6809b20ad477bbcaf0291943b5f2d2 Mon Sep 17 00:00:00 2001 From: vrtornisiello Date: Wed, 21 May 2025 17:32:25 -0300 Subject: [PATCH 063/181] edited server starting scripts --- docker-compose.override.yaml | 2 +- start-dev.sh => start-server-dev.sh | 11 ++++++++--- start-server.sh | 6 +++++- 3 files changed, 14 insertions(+), 5 deletions(-) rename start-dev.sh => start-server-dev.sh (78%) diff --git a/docker-compose.override.yaml b/docker-compose.override.yaml index 6306ba62..690e27c2 100644 --- a/docker-compose.override.yaml +++ b/docker-compose.override.yaml @@ -58,7 +58,7 @@ services: dockerfile: Dockerfile container_name: api env_file: [.env.docker] - command: ["/app/start-dev.sh"] + command: ["/app/start-server-dev.sh"] volumes: - .:/app - $HOME/.config/pydata:$HOME/.config/pydata diff --git a/start-dev.sh b/start-server-dev.sh similarity index 78% rename from start-dev.sh rename to start-server-dev.sh index 30c6ec44..97d92a16 100755 --- a/start-dev.sh +++ b/start-server-dev.sh @@ -1,17 +1,22 @@ #!/usr/bin/env bash -# start-server.sh +# start-server-dev.sh echo "> Making migrations" (cd /app; python manage.py makemigrations) + echo "> Migrating" (cd /app; python manage.py migrate) + echo "> Installing debugpy" pip install debugpy + echo "> Creating superuser" if [ -n "$DJANGO_SUPERUSER_USERNAME" ] && [ -n "$DJANGO_SUPERUSER_PASSWORD" ] ; then -(cd /app; python manage.py createsuperuser --no-input) + (cd /app; python manage.py createsuperuser --no-input) fi + echo "> Running Huey" (cd /app; python manage.py run_huey &) + +# Start server in development mode with django echo "> Running server in development mode" -# Start the server in development mode with django (cd /app; python -m debugpy --listen 0.0.0.0:5678 manage.py runserver 0.0.0.0:8000) diff --git a/start-server.sh b/start-server.sh index ab59c9b2..c8e91a03 100755 --- a/start-server.sh +++ b/start-server.sh @@ -2,13 +2,17 @@ # start-server.sh echo "> Making migrations" (cd /app; python manage.py makemigrations) + echo "> Migrating" (cd /app; python manage.py migrate) + echo "> Creating superuser" if [ -n "$DJANGO_SUPERUSER_USERNAME" ] && [ -n "$DJANGO_SUPERUSER_PASSWORD" ] ; then - (cd /app; python manage.py createsuperuser --no-input) + (cd /app; python manage.py createsuperuser --no-input) fi + echo "> Running Huey" (cd /app; python manage.py run_huey &) + echo "> Running Gunicorn" (cd /app; gunicorn backend.wsgi --user www-data --bind 0.0.0.0:8000 --workers 3 --timeout 180) & nginx -g "daemon off;" From 5a315b1a05a152a58fd707202832d78b5b9b4d99 Mon Sep 17 00:00:00 2001 From: Luiz Eduardo Date: Thu, 22 May 2025 03:45:36 -0300 Subject: [PATCH 064/181] feat: adicionando cached_property e Inline paginated (#814) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Atualmente temos problema de desempenho abrindo diversos Datasets e Tables. Com apenas um click conseguimos usar 100% do processamento do PODs travando sua operação. No PR estamos: - Otimizando muitas funções utilizando do `cached_property` - Adicionamos uma nova extensão `django-admin-inline-paginator-plus` --- backend/apps/api/v1/admin.py | 11 ++- backend/apps/api/v1/models.py | 156 ++++++++++++++++++++----------- backend/settings/base.py | 2 + poetry.lock | 170 ++++++++++++++++++++++++++++++---- pyproject.toml | 1 + 5 files changed, 265 insertions(+), 75 deletions(-) diff --git a/backend/apps/api/v1/admin.py b/backend/apps/api/v1/admin.py index f9408318..56bc01b7 100644 --- a/backend/apps/api/v1/admin.py +++ b/backend/apps/api/v1/admin.py @@ -12,7 +12,9 @@ from django.shortcuts import render from django.urls import reverse from django.utils.html import format_html +from django_admin_inline_paginator_plus.admin import StackedInlinePaginated from django.utils.safestring import mark_safe + from modeltranslation.admin import TabbedTranslationAdmin, TranslationStackedInline from ordered_model.admin import OrderedInlineModelAdminMixin, OrderedStackedInline @@ -94,16 +96,20 @@ class MeasurementUnitInline(OrderedTranslatedInline): show_change_link = True -class ColumnInline(OrderedTranslatedInline): +class ColumnInline(OrderedStackedInline, StackedInlinePaginated): model = Column form = ColumnInlineForm extra = 0 show_change_link = True show_full_result_count = True + + per_page = 20 + fields = ColumnInlineForm.Meta.fields + [ "order", "move_up_down_links", ] + readonly_fields = [ "order", "move_up_down_links", @@ -194,10 +200,11 @@ def has_change_permission(self, request, obj=None): return False -class TableInline(OrderedTranslatedInline): +class TableInline(OrderedStackedInline, StackedInlinePaginated): model = Table form = TableInlineForm extra = 0 + per_page = 5 show_change_link = True fields = [ "order", diff --git a/backend/apps/api/v1/models.py b/backend/apps/api/v1/models.py index e7e382aa..778b43cb 100644 --- a/backend/apps/api/v1/models.py +++ b/backend/apps/api/v1/models.py @@ -8,6 +8,7 @@ from django.core.exceptions import ValidationError from django.db import models +from django.utils.functional import cached_property from ordered_model.models import OrderedModel from backend.apps.account.models import Account @@ -567,6 +568,24 @@ class Meta: verbose_name_plural = "Datasets" ordering = ["slug"] + @cached_property + def get_tables(self): + return self.tables.all() + + @cached_property + def get_raw_data_sources(self): + return self.raw_data_sources.all() + + @cached_property + def generate_resources(self): + resources = [ + *self.get_tables, + *self.get_raw_data_sources, + *self.information_requests.all(), + ] + + return resources + @property def full_slug(self): if self.organizations.first().area.slug != "unknown": @@ -584,11 +603,7 @@ def popularity(self): @property def temporal_coverage(self) -> dict: """Temporal coverage of all related entities""" - resources = [ - *self.tables.all(), - *self.raw_data_sources.all(), - *self.information_requests.all(), - ] + resources = self.generate_resources temporal_coverage = get_temporal_coverage(resources) if temporal_coverage["start"] and temporal_coverage["end"]: return f"{temporal_coverage['start']} - {temporal_coverage['end']}" @@ -601,52 +616,32 @@ def temporal_coverage(self) -> dict: @property def spatial_coverage(self) -> list[str]: """Union spatial coverage of all related resources""" - resources = [ - *self.tables.all(), - *self.raw_data_sources.all(), - *self.information_requests.all(), - ] + resources = self.generate_resources return sorted(list(get_spatial_coverage(resources))) @property def spatial_coverage_name_pt(self) -> list[str]: """Union spatial coverage of all related resources""" - resources = [ - *self.tables.all(), - *self.raw_data_sources.all(), - *self.information_requests.all(), - ] + resources = self.generate_resources return sorted(list(get_spatial_coverage_name(resources, locale="pt"))) @property def spatial_coverage_name_en(self) -> list[str]: """Union spatial coverage of all related resources""" - resources = [ - *self.tables.all(), - *self.raw_data_sources.all(), - *self.information_requests.all(), - ] + resources = self.generate_resources return sorted(list(get_spatial_coverage_name(resources, locale="en"))) @property def spatial_coverage_name_es(self) -> list[str]: """Union spatial coverage of all related resources""" - resources = [ - *self.tables.all(), - *self.raw_data_sources.all(), - *self.information_requests.all(), - ] + resources = self.generate_resources return sorted(list(get_spatial_coverage_name(resources, locale="es"))) @property def entities(self) -> list[dict]: """Entity of all related resources""" entities = [] - resources = [ - *self.tables.all(), - *self.raw_data_sources.all(), - *self.information_requests.all(), - ] + resources = self.generate_resources for resource in resources: for observation in resource.observation_levels.all(): entities.append(observation.entity.as_search_result) @@ -655,9 +650,13 @@ def entities(self) -> list[dict]: @property def contains_open_data(self): """Returns true if there are tables or columns with open coverages""" + + cached_tables = self.get_tables + open_data = False + tables = ( - self.tables.exclude(status__slug__in=["under_review", "excluded"]) + cached_tables.exclude(status__slug__in=["under_review", "excluded"]) .exclude(slug__in=["dicionario", "dictionary"]) .all() ) @@ -670,9 +669,13 @@ def contains_open_data(self): @property def contains_closed_data(self): - """Returns true if there are tables or columns with closed coverages, or if the uncompressed file size is above 1 GB""" + """Returns true if there are tables or columns with closed coverages, + or if the uncompressed file size is above 1 GB""" + + cached_tables = self.get_tables + for table in ( - self.tables.exclude(status__slug__in=["under_review", "excluded"]) + cached_tables.exclude(status__slug__in=["under_review", "excluded"]) .exclude(slug__in=["dicionario", "dictionary"]) .all() ): @@ -682,10 +685,12 @@ def contains_closed_data(self): @property def contains_direct_download_free(self): + cached_tables = self.get_tables + return len( [ table - for table in self.tables.exclude(status__slug__in=["under_review", "excluded"]) + for table in cached_tables.exclude(status__slug__in=["under_review", "excluded"]) .exclude(slug__in=["dicionario", "dictionary"]) .all() if table.contains_direct_download_free @@ -694,10 +699,12 @@ def contains_direct_download_free(self): @property def contains_direct_download_paid(self): + cached_tables = self.get_tables + return len( [ table - for table in self.tables.exclude(status__slug__in=["under_review", "excluded"]) + for table in cached_tables.exclude(status__slug__in=["under_review", "excluded"]) .exclude(slug__in=["dicionario", "dictionary"]) .all() if table.contains_direct_download_paid @@ -706,10 +713,12 @@ def contains_direct_download_paid(self): @property def contains_temporalcoverage_free(self): + cached_tables = self.get_tables + return len( [ table - for table in self.tables.exclude(status__slug__in=["under_review", "excluded"]) + for table in cached_tables.exclude(status__slug__in=["under_review", "excluded"]) .exclude(slug__in=["dicionario", "dictionary"]) .all() if table.contains_temporalcoverage_free @@ -718,10 +727,12 @@ def contains_temporalcoverage_free(self): @property def contains_temporalcoverage_paid(self): + cached_tables = self.get_tables + return len( [ table - for table in self.tables.exclude(status__slug__in=["under_review", "excluded"]) + for table in cached_tables.exclude(status__slug__in=["under_review", "excluded"]) .exclude(slug__in=["dicionario", "dictionary"]) .all() if table.contains_temporalcoverage_paid @@ -731,9 +742,12 @@ def contains_temporalcoverage_paid(self): @property def contains_tables(self): """Returns true if there are tables in the dataset""" + + cached_tables = self.get_tables + return ( len( - self.tables.exclude(status__slug__in=["under_review", "excluded"]) + cached_tables.exclude(status__slug__in=["under_review", "excluded"]) .exclude(slug__in=["dicionario", "dictionary"]) .all() ) @@ -743,8 +757,15 @@ def contains_tables(self): @property def contains_raw_data_sources(self): """Returns true if there are raw data sources in the dataset""" + + cached_get_raw_data_sources = self.get_raw_data_sources + return ( - len(self.raw_data_sources.exclude(status__slug__in=["under_review", "excluded"]).all()) + len( + cached_get_raw_data_sources.exclude( + status__slug__in=["under_review", "excluded"] + ).all() + ) > 0 ) @@ -762,16 +783,20 @@ def contains_information_requests(self): @property def n_tables(self): + cached_tables = self.get_tables + return len( - self.tables.exclude(status__slug__in=["under_review", "excluded"]) + cached_tables.exclude(status__slug__in=["under_review", "excluded"]) .exclude(slug__in=["dicionario", "dictionary"]) .all() ) @property def n_raw_data_sources(self): + cached_get_raw_data_sources = self.get_raw_data_sources + return len( - self.raw_data_sources.exclude(status__slug__in=["under_review", "excluded"]).all() + cached_get_raw_data_sources.exclude(status__slug__in=["under_review", "excluded"]).all() ) @property @@ -782,8 +807,10 @@ def n_information_requests(self): @property def first_table_id(self): + cached_tables = self.get_tables + if ( - resource := self.tables.exclude(status__slug__in=["under_review", "excluded"]) + resource := cached_tables.exclude(status__slug__in=["under_review", "excluded"]) .exclude(slug__in=["dicionario", "dictionary"]) .order_by("order") .first() @@ -792,8 +819,10 @@ def first_table_id(self): @property def first_open_table_id(self): + cached_tables = self.get_tables + for resource in ( - self.tables.exclude(status__slug__in=["under_review", "excluded"]) + cached_tables.exclude(status__slug__in=["under_review", "excluded"]) .exclude(slug__in=["dicionario", "dictionary"]) .order_by("order") .all() @@ -803,8 +832,10 @@ def first_open_table_id(self): @property def first_closed_table_id(self): + cached_tables = self.get_tables + for resource in ( - self.tables.exclude(status__slug__in=["under_review", "excluded"]) + cached_tables.exclude(status__slug__in=["under_review", "excluded"]) .exclude(slug__in=["dicionario", "dictionary"]) .order_by("order") .all() @@ -814,8 +845,10 @@ def first_closed_table_id(self): @property def first_raw_data_source_id(self): + cached_get_raw_data_sources = self.get_raw_data_sources + resource = ( - self.raw_data_sources + cached_get_raw_data_sources .exclude(status__slug__in=["under_review", "excluded"]) .order_by("order") .first() @@ -834,24 +867,37 @@ def first_information_request_id(self): @property def table_last_updated_at(self): + cached_tables = self.get_tables + updates = [ - u.last_updated_at for u in self.tables.exclude(status__slug__in=["under_review", "excluded"]).exclude(slug__in=["dicionario", "dictionary"]).all() + u.last_updated_at + for u in cached_tables.exclude( + status__slug__in=["under_review", "excluded"]).exclude( + slug__in=["dicionario", "dictionary"]).all() if u.last_updated_at ] # fmt: skip return max(updates) if updates else None @property def raw_data_source_last_polled_at(self): + cached_get_raw_data_sources = self.get_raw_data_sources + polls = [ - u.last_polled_at for u in self.raw_data_sources.exclude(status__slug__in=["under_review", "excluded"]).all() - if u.last_polled_at + u.get("last_polled_at") + for u in cached_get_raw_data_sources.exclude( + status__slug__in=["under_review", "excluded"]).all().values("last_polled_at") + if u.get("last_polled_at") ] # fmt: skip return max(polls) if polls else None @property def raw_data_source_last_updated_at(self): + cached_get_raw_data_sources = self.get_raw_data_sources + updates = [ - u.last_updated_at for u in self.raw_data_sources.exclude(status__slug__in=["under_review", "excluded"]).all() + u.last_updated_at + for u in cached_get_raw_data_sources.exclude( + status__slug__in=["under_review", "excluded"]).all() if u.last_updated_at ] # fmt: skip return max(updates) if updates else None @@ -1011,7 +1057,8 @@ class Table(BaseModel, OrderedModel): ) is_deprecated = models.BooleanField( default=False, - help_text="We stopped maintaining this table for some reason. Examples: raw data deprecated, new version elsewhere, etc.", + help_text="We stopped maintaining this table for some reason. " + "Examples: raw data deprecated, new version elsewhere, etc.", ) license = models.ForeignKey( "License", @@ -1123,7 +1170,9 @@ def gcs_slug(self): @property def partitions(self): """Returns a list of columns used to partition the table""" - partitions_list = [p.name for p in self.columns.all().filter(is_partition=True)] + partitions_list = [ + p.get("name") for p in self.columns.all().filter(is_partition=True).values("name") + ] return ", ".join(partitions_list) @property @@ -1211,7 +1260,7 @@ def neighbors(self) -> list[dict]: @property def last_updated_at(self): - updates = [u.latest for u in self.updates.all() if u.latest] + updates = [u.get("latest") for u in self.updates.all().values("latest") if u.get("latest")] return max(updates) if updates else None @property @@ -2312,7 +2361,8 @@ def get_full_temporal_coverage(resources: list) -> dict: def get_spatial_coverage(resources: list) -> list: - """Get spatial coverage of resources by returning unique area slugs, keeping only the highest level in each branch + """Get spatial coverage of resources by returning unique area slugs, + keeping only the highest level in each branch For example: - If areas = [br_mg_3100104, br_mg_3100104] -> returns [br_mg_3100104] diff --git a/backend/settings/base.py b/backend/settings/base.py index 72e25fbf..4695654a 100644 --- a/backend/settings/base.py +++ b/backend/settings/base.py @@ -44,6 +44,8 @@ "jazzmin", "modeltranslation", # + "django_admin_inline_paginator_plus", + # "django.contrib.admin", "django.contrib.auth", "django.contrib.contenttypes", diff --git a/poetry.lock b/poetry.lock index 27b60198..e378c154 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand. +# This file is automatically @generated by Poetry 2.1.3 and should not be changed by hand. [[package]] name = "aniso8601" @@ -6,6 +6,7 @@ version = "9.0.1" description = "A library for parsing ISO 8601 strings." optional = false python-versions = "*" +groups = ["main"] files = [ {file = "aniso8601-9.0.1-py2.py3-none-any.whl", hash = "sha256:1d2b7ef82963909e93c4f24ce48d4de9e66009a21bf1c1e1c85bdd0812fe412f"}, {file = "aniso8601-9.0.1.tar.gz", hash = "sha256:72e3117667eedf66951bb2d93f4296a56b94b078a8a95905a052611fb3f1b973"}, @@ -20,6 +21,7 @@ version = "0.6.0" description = "Reusable constraint types to use with typing.Annotated" optional = false python-versions = ">=3.8" +groups = ["main", "dev"] files = [ {file = "annotated_types-0.6.0-py3-none-any.whl", hash = "sha256:0641064de18ba7a25dee8f96403ebc39113d0cb953a01429249d5c7564666a43"}, {file = "annotated_types-0.6.0.tar.gz", hash = "sha256:563339e807e53ffd9c267e99fc6d9ea23eb8443c08f112651963e24e22f84a5d"}, @@ -31,6 +33,7 @@ version = "3.7.2" description = "ASGI specs, helper code, and adapters" optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "asgiref-3.7.2-py3-none-any.whl", hash = "sha256:89b2ef2247e3b562a16eef663bc0e2e703ec6468e2fa8a5cd61cd449786d4f6e"}, {file = "asgiref-3.7.2.tar.gz", hash = "sha256:9e0ce3aa93a819ba5b45120216b23878cf6e8525eb3848653452b4192b92afed"}, @@ -48,6 +51,8 @@ version = "4.0.3" description = "Timeout context manager for asyncio programs" optional = false python-versions = ">=3.7" +groups = ["main"] +markers = "python_full_version <= \"3.11.2\"" files = [ {file = "async-timeout-4.0.3.tar.gz", hash = "sha256:4640d96be84d82d02ed59ea2b7105a0f7b33abe8703703cd0ab0bf87c427522f"}, {file = "async_timeout-4.0.3-py3-none-any.whl", hash = "sha256:7405140ff1230c310e51dc27b3145b9092d659ce68ff733fb0cefe3ee42be028"}, @@ -59,6 +64,7 @@ version = "5.3.2" description = "Extensible memoizing collections and decorators" optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "cachetools-5.3.2-py3-none-any.whl", hash = "sha256:861f35a13a451f94e301ce2bec7cac63e881232ccce7ed67fab9b5df4d3beaa1"}, {file = "cachetools-5.3.2.tar.gz", hash = "sha256:086ee420196f7b2ab9ca2db2520aca326318b68fe5ba8bc4d49cca91add450f2"}, @@ -70,6 +76,7 @@ version = "2024.2.2" description = "Python package for providing Mozilla's CA Bundle." optional = false python-versions = ">=3.6" +groups = ["main"] files = [ {file = "certifi-2024.2.2-py3-none-any.whl", hash = "sha256:dc383c07b76109f368f6106eee2b593b04a011ea4d55f652c6ca24a754d1cdd1"}, {file = "certifi-2024.2.2.tar.gz", hash = "sha256:0569859f95fc761b18b45ef421b1290a0f65f147e92a1e5eb3e635f9a5e4e66f"}, @@ -81,6 +88,7 @@ version = "3.4.0" description = "Validate configuration and produce human readable error messages." optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "cfgv-3.4.0-py2.py3-none-any.whl", hash = "sha256:b7265b1f29fd3316bfcd2b330d63d024f2bfd8bcb8b0272f8e19a504856c48f9"}, {file = "cfgv-3.4.0.tar.gz", hash = "sha256:e52591d4c5f5dead8e0f673fb16db7949d2cfb3f7da4582893288f0ded8fe560"}, @@ -92,6 +100,7 @@ version = "3.3.2" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." optional = false python-versions = ">=3.7.0" +groups = ["main"] files = [ {file = "charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5"}, {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3"}, @@ -191,6 +200,7 @@ version = "8.1.7" description = "Composable command line interface toolkit" optional = false python-versions = ">=3.7" +groups = ["dev"] files = [ {file = "click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28"}, {file = "click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de"}, @@ -205,10 +215,12 @@ version = "0.4.6" description = "Cross-platform colored terminal text." optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +groups = ["main", "dev", "test"] files = [ {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, ] +markers = {main = "sys_platform == \"win32\" or platform_system == \"Windows\"", dev = "platform_system == \"Windows\"", test = "sys_platform == \"win32\""} [[package]] name = "coverage" @@ -216,6 +228,7 @@ version = "7.4.1" description = "Code coverage measurement for Python" optional = false python-versions = ">=3.8" +groups = ["test"] files = [ {file = "coverage-7.4.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:077d366e724f24fc02dbfe9d946534357fda71af9764ff99d73c3c596001bbd7"}, {file = "coverage-7.4.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0193657651f5399d433c92f8ae264aff31fc1d066deee4b831549526433f3f61"}, @@ -275,7 +288,7 @@ files = [ tomli = {version = "*", optional = true, markers = "python_full_version <= \"3.11.0a6\" and extra == \"toml\""} [package.extras] -toml = ["tomli"] +toml = ["tomli ; python_full_version <= \"3.11.0a6\""] [[package]] name = "db-dtypes" @@ -283,6 +296,7 @@ version = "1.2.0" description = "Pandas Data Types for SQL systems (BigQuery, Spanner)" optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "db-dtypes-1.2.0.tar.gz", hash = "sha256:3531bb1fb8b5fbab33121fe243ccc2ade16ab2524f4c113b05cc702a1908e6ea"}, {file = "db_dtypes-1.2.0-py2.py3-none-any.whl", hash = "sha256:6320bddd31d096447ef749224d64aab00972ed20e4392d86f7d8b81ad79f7ff0"}, @@ -300,6 +314,7 @@ version = "0.3.8" description = "Distribution utilities" optional = false python-versions = "*" +groups = ["dev"] files = [ {file = "distlib-0.3.8-py2.py3-none-any.whl", hash = "sha256:034db59a0b96f8ca18035f36290806a9a6e6bd9d1ff91e45a7f172eb17e51784"}, {file = "distlib-0.3.8.tar.gz", hash = "sha256:1530ea13e350031b6312d8580ddb6b27a104275a31106523b8f123787f494f64"}, @@ -311,6 +326,7 @@ version = "1.9.0" description = "Distro - an OS platform information API" optional = false python-versions = ">=3.6" +groups = ["dev"] files = [ {file = "distro-1.9.0-py3-none-any.whl", hash = "sha256:7bffd925d65168f85027d8da9af6bddab658135b840670a223589bc0c8ef02b2"}, {file = "distro-1.9.0.tar.gz", hash = "sha256:2fa77c6fd8940f116ee1d6b94a2f90b13b5ea8d019b98bc8bafdcabcdd9bdbed"}, @@ -322,6 +338,7 @@ version = "2.8.3" description = "Django + Stripe made easy" optional = false python-versions = ">=3.8.0,<4.0.0" +groups = ["main"] files = [ {file = "dj_stripe-2.8.3-py3-none-any.whl", hash = "sha256:4d442f43dd016ba89af6db3fd790673b61e9d36813f45e5964d471997e9039be"}, {file = "dj_stripe-2.8.3.tar.gz", hash = "sha256:f5205a3f2baa7cd5b858b2250200fb85e48616d9418218b2c9ab9b82747c31d4"}, @@ -341,6 +358,7 @@ version = "4.2.10" description = "A high-level Python web framework that encourages rapid development and clean, pragmatic design." optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "Django-4.2.10-py3-none-any.whl", hash = "sha256:a2d4c4d4ea0b6f0895acde632071aff6400bfc331228fc978b05452a0ff3e9f1"}, {file = "Django-4.2.10.tar.gz", hash = "sha256:b1260ed381b10a11753c73444408e19869f3241fc45c985cd55a30177c789d13"}, @@ -355,12 +373,31 @@ tzdata = {version = "*", markers = "sys_platform == \"win32\""} argon2 = ["argon2-cffi (>=19.1.0)"] bcrypt = ["bcrypt"] +[[package]] +name = "django-admin-inline-paginator-plus" +version = "0.1.4" +description = "The 'Django Admin Inline Paginator Plus' is simple way to paginate your inlines in Django admin" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "django_admin_inline_paginator_plus-0.1.4-py3-none-any.whl", hash = "sha256:a988bf248781e06c5b5daf88dd97f8b808c6842bd935babd987a8d17bfe099f3"}, + {file = "django_admin_inline_paginator_plus-0.1.4.tar.gz", hash = "sha256:0fb61009483d94a386a7c1c5e163551f511abc9ae41791d7af3da892a3139754"}, +] + +[package.dependencies] +django = "*" + +[package.extras] +dev = ["coverage", "mypy", "pytest", "ruff"] + [[package]] name = "django-cors-headers" version = "3.14.0" description = "django-cors-headers is a Django application for handling the server headers required for Cross-Origin Resource Sharing (CORS)." optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "django_cors_headers-3.14.0-py3-none-any.whl", hash = "sha256:684180013cc7277bdd8702b80a3c5a4b3fcae4abb2bf134dceb9f5dfe300228e"}, {file = "django_cors_headers-3.14.0.tar.gz", hash = "sha256:5fbd58a6fb4119d975754b2bc090f35ec160a8373f276612c675b00e8a138739"}, @@ -375,6 +412,7 @@ version = "3.2.3" description = "Extensions for Django" optional = false python-versions = ">=3.6" +groups = ["main"] files = [ {file = "django-extensions-3.2.3.tar.gz", hash = "sha256:44d27919d04e23b3f40231c4ab7af4e61ce832ef46d610cc650d53e68328410a"}, {file = "django_extensions-3.2.3-py3-none-any.whl", hash = "sha256:9600b7562f79a92cbf1fde6403c04fee314608fefbb595502e34383ae8203401"}, @@ -389,6 +427,7 @@ version = "22.1" description = "Django-filter is a reusable Django application for allowing users to filter querysets dynamically." optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "django-filter-22.1.tar.gz", hash = "sha256:ed473b76e84f7e83b2511bb2050c3efb36d135207d0128dfe3ae4b36e3594ba5"}, {file = "django_filter-22.1-py3-none-any.whl", hash = "sha256:ed429e34760127e3520a67f415bec4c905d4649fbe45d0d6da37e6ff5e0287eb"}, @@ -403,6 +442,7 @@ version = "0.3.4" description = "JSON Web Token for Django GraphQL." optional = false python-versions = ">=3.6,<4.0" +groups = ["main"] files = [ {file = "django-graphql-jwt-0.3.4.tar.gz", hash = "sha256:654808417a1fa97e4d489766b61046fa8006f58dfad1c44cc3a37a9e4929203b"}, {file = "django_graphql_jwt-0.3.4-py3-none-any.whl", hash = "sha256:fb20194bda649b2b1b49049ef84d0e957851df485fba7f1901aace54ca328063"}, @@ -420,6 +460,7 @@ version = "3.2.1" description = "Pluggable search for Django." optional = false python-versions = "*" +groups = ["main"] files = [ {file = "django-haystack-3.2.1.tar.gz", hash = "sha256:97e3197aefc225fe405b6f17600a2534bf827cb4d6743130c20bc1a06f7293a4"}, ] @@ -437,6 +478,7 @@ version = "3.18.1" description = "Run checks on services like databases, queue servers, celery processes, etc." optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "django-health-check-3.18.1.tar.gz", hash = "sha256:44552d55ae8950c9548d3b90f9d9fd5570b57446a19b2a8e674c82f993cb7a2c"}, {file = "django_health_check-3.18.1-py2.py3-none-any.whl", hash = "sha256:2c89a326cd79830e2fc6808823a9e7e874ab23f7aef3ff2c4d1194c998e1dca1"}, @@ -455,6 +497,7 @@ version = "2.6.0" description = "Drop-in theme for django admin, that utilises AdminLTE 3 & Bootstrap 4 to make yo' admin look jazzy" optional = false python-versions = ">=3.6.2" +groups = ["main"] files = [ {file = "django_jazzmin-2.6.0-py3-none-any.whl", hash = "sha256:fb554c2d564649c65243b13385121fdbdda58521f49544f9d7cb9c414a4908d4"}, {file = "django_jazzmin-2.6.0.tar.gz", hash = "sha256:5bb07055cf19183030724f976904fd8b6337559727959340a43832fab0531812"}, @@ -469,6 +512,7 @@ version = "0.18.11" description = "Translates Django models using a registration approach." optional = false python-versions = "*" +groups = ["main"] files = [ {file = "django-modeltranslation-0.18.11.tar.gz", hash = "sha256:a6e2c459e3b31852287d030bc6e29fa28576db97455dccd399fe08ac8e9223b9"}, {file = "django_modeltranslation-0.18.11-py3-none-any.whl", hash = "sha256:81b68e4dc806a3b779ac88babe1cbd99d5318d374a43b3737a65fb0f4c1cffe8"}, @@ -484,6 +528,7 @@ version = "3.7.4" description = "Allows Django models to be ordered and provides a simple admin interface for reordering them." optional = false python-versions = "*" +groups = ["main"] files = [ {file = "django-ordered-model-3.7.4.tar.gz", hash = "sha256:f258b9762525c00a53009e82f8b8bf2a3aa315e8b453e281e8fdbbfe2b8cb3ba"}, {file = "django_ordered_model-3.7.4-py3-none-any.whl", hash = "sha256:dfcd3183fe0749dad1c9971cba1d6240ce7328742a30ddc92feca41107bb241d"}, @@ -495,6 +540,7 @@ version = "1.14.2" description = "Support for many storage backends in Django" optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "django-storages-1.14.2.tar.gz", hash = "sha256:51b36af28cc5813b98d5f3dfe7459af638d84428c8df4a03990c7d74d1bea4e5"}, {file = "django_storages-1.14.2-py3-none-any.whl", hash = "sha256:1db759346b52ada6c2efd9f23d8241ecf518813eb31db9e2589207174f58f6ad"}, @@ -519,6 +565,7 @@ version = "7.17.9" description = "Python client for Elasticsearch" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, <4" +groups = ["main"] files = [ {file = "elasticsearch-7.17.9-py2.py3-none-any.whl", hash = "sha256:0e2454645dc00517dee4c6de3863411a9c5f1955d013c5fefa29123dadc92f98"}, {file = "elasticsearch-7.17.9.tar.gz", hash = "sha256:66c4ece2adfe7cc120e2b6a6798a1fd5c777aecf82eec39bb95cef7cfc7ea2b3"}, @@ -540,6 +587,8 @@ version = "1.2.0" description = "Backport of PEP 654 (exception groups)" optional = false python-versions = ">=3.7" +groups = ["test"] +markers = "python_version == \"3.10\"" files = [ {file = "exceptiongroup-1.2.0-py3-none-any.whl", hash = "sha256:4bfd3996ac73b41e9b9628b04e079f193850720ea5945fc96a08633c66912f14"}, {file = "exceptiongroup-1.2.0.tar.gz", hash = "sha256:91f5c769735f051a4290d52edd0858999b57e5876e9f85937691bd4c9fa3ed68"}, @@ -554,6 +603,7 @@ version = "19.13.0" description = "Faker is a Python package that generates fake data for you." optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "Faker-19.13.0-py3-none-any.whl", hash = "sha256:da880a76322db7a879c848a0771e129338e0a680a9f695fd9a3e7a6ac82b45e1"}, {file = "Faker-19.13.0.tar.gz", hash = "sha256:14ccb0aec342d33aa3889a864a56e5b3c2d56bce1b89f9189f4fbc128b9afc1e"}, @@ -568,6 +618,7 @@ version = "3.13.1" description = "A platform independent file lock." optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "filelock-3.13.1-py3-none-any.whl", hash = "sha256:57dbda9b35157b05fb3e58ee91448612eb674172fab98ee235ccb0b5bee19a1c"}, {file = "filelock-3.13.1.tar.gz", hash = "sha256:521f5f56c50f8426f5e03ad3b281b490a87ef15bc6c526f168290f0c7148d44e"}, @@ -576,7 +627,7 @@ files = [ [package.extras] docs = ["furo (>=2023.9.10)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.24)"] testing = ["covdefaults (>=2.3)", "coverage (>=7.3.2)", "diff-cover (>=8)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)", "pytest-timeout (>=2.2)"] -typing = ["typing-extensions (>=4.8)"] +typing = ["typing-extensions (>=4.8) ; python_version < \"3.11\""] [[package]] name = "google-api-core" @@ -584,6 +635,7 @@ version = "2.17.0" description = "Google API client core library" optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "google-api-core-2.17.0.tar.gz", hash = "sha256:de7ef0450faec7c75e0aea313f29ac870fdc44cfaec9d6499a9a17305980ef66"}, {file = "google_api_core-2.17.0-py3-none-any.whl", hash = "sha256:08ed79ed8e93e329de5e3e7452746b734e6bf8438d8d64dd3319d21d3164890c"}, @@ -593,18 +645,18 @@ files = [ google-auth = ">=2.14.1,<3.0.dev0" googleapis-common-protos = ">=1.56.2,<2.0.dev0" grpcio = [ - {version = ">=1.33.2,<2.0dev", optional = true, markers = "python_version < \"3.11\" and extra == \"grpc\""}, {version = ">=1.49.1,<2.0dev", optional = true, markers = "python_version >= \"3.11\" and extra == \"grpc\""}, + {version = ">=1.33.2,<2.0dev", optional = true, markers = "extra == \"grpc\""}, ] grpcio-status = [ - {version = ">=1.33.2,<2.0.dev0", optional = true, markers = "python_version < \"3.11\" and extra == \"grpc\""}, {version = ">=1.49.1,<2.0.dev0", optional = true, markers = "python_version >= \"3.11\" and extra == \"grpc\""}, + {version = ">=1.33.2,<2.0.dev0", optional = true, markers = "extra == \"grpc\""}, ] protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.0 || >4.21.0,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<5.0.0.dev0" requests = ">=2.18.0,<3.0.0.dev0" [package.extras] -grpc = ["grpcio (>=1.33.2,<2.0dev)", "grpcio (>=1.49.1,<2.0dev)", "grpcio-status (>=1.33.2,<2.0.dev0)", "grpcio-status (>=1.49.1,<2.0.dev0)"] +grpc = ["grpcio (>=1.33.2,<2.0dev)", "grpcio (>=1.49.1,<2.0dev) ; python_version >= \"3.11\"", "grpcio-status (>=1.33.2,<2.0.dev0)", "grpcio-status (>=1.49.1,<2.0.dev0) ; python_version >= \"3.11\""] grpcgcp = ["grpcio-gcp (>=0.2.2,<1.0.dev0)"] grpcio-gcp = ["grpcio-gcp (>=0.2.2,<1.0.dev0)"] @@ -614,6 +666,7 @@ version = "2.117.0" description = "Google API Client Library for Python" optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "google-api-python-client-2.117.0.tar.gz", hash = "sha256:b38cd1477ee3c341a0d2f7427326499b416f36c44e9b20d1da229df8be0c596e"}, {file = "google_api_python_client-2.117.0-py2.py3-none-any.whl", hash = "sha256:bd6d393d0eaa7ea1fa13aefb44be787d1ebdc068ab8255f1c3f1d8b486f46afd"}, @@ -632,6 +685,7 @@ version = "2.27.0" description = "Google Authentication Library" optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "google-auth-2.27.0.tar.gz", hash = "sha256:e863a56ccc2d8efa83df7a80272601e43487fa9a728a376205c86c26aaefa821"}, {file = "google_auth-2.27.0-py2.py3-none-any.whl", hash = "sha256:8e4bad367015430ff253fe49d500fdc3396c1a434db5740828c728e45bcce245"}, @@ -655,6 +709,7 @@ version = "0.2.0" description = "Google Authentication Library: httplib2 transport" optional = false python-versions = "*" +groups = ["main"] files = [ {file = "google-auth-httplib2-0.2.0.tar.gz", hash = "sha256:38aa7badf48f974f1eb9861794e9c0cb2a0511a4ec0679b1f886d108f5640e05"}, {file = "google_auth_httplib2-0.2.0-py2.py3-none-any.whl", hash = "sha256:b65a0a2123300dd71281a7bf6e64d65a0759287df52729bdd1ae2e47dc311a3d"}, @@ -670,6 +725,7 @@ version = "1.2.0" description = "Google Authentication Library" optional = false python-versions = ">=3.6" +groups = ["main"] files = [ {file = "google-auth-oauthlib-1.2.0.tar.gz", hash = "sha256:292d2d3783349f2b0734a0a0207b1e1e322ac193c2c09d8f7c613fb7cc501ea8"}, {file = "google_auth_oauthlib-1.2.0-py2.py3-none-any.whl", hash = "sha256:297c1ce4cb13a99b5834c74a1fe03252e1e499716718b190f56bcb9c4abc4faf"}, @@ -688,6 +744,7 @@ version = "3.17.2" description = "Google BigQuery API client library" optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "google-cloud-bigquery-3.17.2.tar.gz", hash = "sha256:6e1cf669a40e567ab3289c7b5f2056363da9fcb85d9a4736ee90240d4a7d84ea"}, {file = "google_cloud_bigquery-3.17.2-py2.py3-none-any.whl", hash = "sha256:cdadf5283dca55a1a350bacf8c8a7466169d3cf46c5a0a3abc5e9aa0b0a51dee"}, @@ -702,14 +759,14 @@ python-dateutil = ">=2.7.2,<3.0dev" requests = ">=2.21.0,<3.0.0dev" [package.extras] -all = ["Shapely (>=1.8.4,<3.0.0dev)", "db-dtypes (>=0.3.0,<2.0.0dev)", "geopandas (>=0.9.0,<1.0dev)", "google-cloud-bigquery-storage (>=2.6.0,<3.0.0dev)", "grpcio (>=1.47.0,<2.0dev)", "grpcio (>=1.49.1,<2.0dev)", "importlib-metadata (>=1.0.0)", "ipykernel (>=6.0.0)", "ipython (>=7.23.1,!=8.1.0)", "ipywidgets (>=7.7.0)", "opentelemetry-api (>=1.1.0)", "opentelemetry-instrumentation (>=0.20b0)", "opentelemetry-sdk (>=1.1.0)", "pandas (>=1.1.0)", "proto-plus (>=1.15.0,<2.0.0dev)", "protobuf (>=3.19.5,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5,<5.0.0dev)", "pyarrow (>=3.0.0)", "tqdm (>=4.7.4,<5.0.0dev)"] +all = ["Shapely (>=1.8.4,<3.0.0dev)", "db-dtypes (>=0.3.0,<2.0.0dev)", "geopandas (>=0.9.0,<1.0dev)", "google-cloud-bigquery-storage (>=2.6.0,<3.0.0dev)", "grpcio (>=1.47.0,<2.0dev)", "grpcio (>=1.49.1,<2.0dev) ; python_version >= \"3.11\"", "importlib-metadata (>=1.0.0) ; python_version < \"3.8\"", "ipykernel (>=6.0.0)", "ipython (>=7.23.1,!=8.1.0)", "ipywidgets (>=7.7.0)", "opentelemetry-api (>=1.1.0)", "opentelemetry-instrumentation (>=0.20b0)", "opentelemetry-sdk (>=1.1.0)", "pandas (>=1.1.0)", "proto-plus (>=1.15.0,<2.0.0dev)", "protobuf (>=3.19.5,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5,<5.0.0dev)", "pyarrow (>=3.0.0)", "tqdm (>=4.7.4,<5.0.0dev)"] bigquery-v2 = ["proto-plus (>=1.15.0,<2.0.0dev)", "protobuf (>=3.19.5,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5,<5.0.0dev)"] -bqstorage = ["google-cloud-bigquery-storage (>=2.6.0,<3.0.0dev)", "grpcio (>=1.47.0,<2.0dev)", "grpcio (>=1.49.1,<2.0dev)", "pyarrow (>=3.0.0)"] +bqstorage = ["google-cloud-bigquery-storage (>=2.6.0,<3.0.0dev)", "grpcio (>=1.47.0,<2.0dev)", "grpcio (>=1.49.1,<2.0dev) ; python_version >= \"3.11\"", "pyarrow (>=3.0.0)"] geopandas = ["Shapely (>=1.8.4,<3.0.0dev)", "geopandas (>=0.9.0,<1.0dev)"] ipython = ["ipykernel (>=6.0.0)", "ipython (>=7.23.1,!=8.1.0)"] ipywidgets = ["ipykernel (>=6.0.0)", "ipywidgets (>=7.7.0)"] opentelemetry = ["opentelemetry-api (>=1.1.0)", "opentelemetry-instrumentation (>=0.20b0)", "opentelemetry-sdk (>=1.1.0)"] -pandas = ["db-dtypes (>=0.3.0,<2.0.0dev)", "importlib-metadata (>=1.0.0)", "pandas (>=1.1.0)", "pyarrow (>=3.0.0)"] +pandas = ["db-dtypes (>=0.3.0,<2.0.0dev)", "importlib-metadata (>=1.0.0) ; python_version < \"3.8\"", "pandas (>=1.1.0)", "pyarrow (>=3.0.0)"] tqdm = ["tqdm (>=4.7.4,<5.0.0dev)"] [[package]] @@ -718,6 +775,7 @@ version = "2.24.0" description = "Google Cloud Bigquery Storage API client library" optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "google-cloud-bigquery-storage-2.24.0.tar.gz", hash = "sha256:b4af5b9aacd8396b8407d1b877601a376d8eea6d192823a8a7881bd2fdc076ce"}, {file = "google_cloud_bigquery_storage-2.24.0-py2.py3-none-any.whl", hash = "sha256:7981eb2758cba56603058d11bb1eeeebf2e1c18097a7118a894510a16e02be52"}, @@ -726,14 +784,14 @@ files = [ [package.dependencies] google-api-core = {version = ">=1.34.0,<2.0.dev0 || >=2.11.dev0,<3.0.0dev", extras = ["grpc"]} proto-plus = [ - {version = ">=1.22.0,<2.0.0dev", markers = "python_version < \"3.11\""}, {version = ">=1.22.2,<2.0.0dev", markers = "python_version >= \"3.11\""}, + {version = ">=1.22.0,<2.0.0dev", markers = "python_version < \"3.11\""}, ] protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.0 || >4.21.0,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<5.0.0dev" [package.extras] fastavro = ["fastavro (>=0.21.2)"] -pandas = ["importlib-metadata (>=1.0.0)", "pandas (>=0.21.1)"] +pandas = ["importlib-metadata (>=1.0.0) ; python_version < \"3.8\"", "pandas (>=0.21.1)"] pyarrow = ["pyarrow (>=0.15.0)"] [[package]] @@ -742,6 +800,7 @@ version = "2.4.1" description = "Google Cloud API client core library" optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "google-cloud-core-2.4.1.tar.gz", hash = "sha256:9b7749272a812bde58fff28868d0c5e2f585b82f37e09a1f6ed2d4d10f134073"}, {file = "google_cloud_core-2.4.1-py2.py3-none-any.whl", hash = "sha256:a9e6a4422b9ac5c29f79a0ede9485473338e2ce78d91f2370c01e730eab22e61"}, @@ -760,6 +819,7 @@ version = "2.14.0" description = "Google Cloud Storage API client library" optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "google-cloud-storage-2.14.0.tar.gz", hash = "sha256:2d23fcf59b55e7b45336729c148bb1c464468c69d5efbaee30f7201dd90eb97e"}, {file = "google_cloud_storage-2.14.0-py2.py3-none-any.whl", hash = "sha256:8641243bbf2a2042c16a6399551fbb13f062cbc9a2de38d6c0bb5426962e9dbd"}, @@ -782,6 +842,7 @@ version = "1.5.0" description = "A python wrapper of the C library 'Google CRC32C'" optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "google-crc32c-1.5.0.tar.gz", hash = "sha256:89284716bc6a5a415d4eaa11b1726d2d60a0cd12aadf5439828353662ede9dd7"}, {file = "google_crc32c-1.5.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:596d1f98fc70232fcb6590c439f43b350cb762fb5d61ce7b0e9db4539654cc13"}, @@ -862,6 +923,7 @@ version = "2.7.0" description = "Utilities for Google Media Downloads and Resumable Uploads" optional = false python-versions = ">= 3.7" +groups = ["main"] files = [ {file = "google-resumable-media-2.7.0.tar.gz", hash = "sha256:5f18f5fa9836f4b083162064a1c2c98c17239bfda9ca50ad970ccf905f3e625b"}, {file = "google_resumable_media-2.7.0-py2.py3-none-any.whl", hash = "sha256:79543cfe433b63fd81c0844b7803aba1bb8950b47bedf7d980c38fa123937e08"}, @@ -880,6 +942,7 @@ version = "1.62.0" description = "Common protobufs used in Google APIs" optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "googleapis-common-protos-1.62.0.tar.gz", hash = "sha256:83f0ece9f94e5672cced82f592d2a5edf527a96ed1794f0bab36d5735c996277"}, {file = "googleapis_common_protos-1.62.0-py2.py3-none-any.whl", hash = "sha256:4750113612205514f9f6aa4cb00d523a94f3e8c06c5ad2fee466387dc4875f07"}, @@ -897,6 +960,7 @@ version = "3.2.1" description = "GraphQL Framework for Python" optional = false python-versions = "*" +groups = ["main"] files = [ {file = "graphene-3.2.1-py2.py3-none-any.whl", hash = "sha256:2ef689f514ba9e65e88961798cf4c637ca580e541168f9aee2ffbe21fd46f388"}, {file = "graphene-3.2.1.tar.gz", hash = "sha256:722243a9da2caeab703b1af9ec0deec602589c97035f86c486106a52d0c67082"}, @@ -917,6 +981,7 @@ version = "3.0.0" description = "Graphene Django integration" optional = false python-versions = "*" +groups = ["main"] files = [ {file = "graphene-django-3.0.0.tar.gz", hash = "sha256:018a8dc4736d99b5bb4a15d7fd0b46c98010e9201cb52a290f6d1f16ae6fefda"}, {file = "graphene_django-3.0.0-py2.py3-none-any.whl", hash = "sha256:9fa531d319d5c8f9e08274628f547574ee684e74dddd1c969abf38142bc32df2"}, @@ -941,6 +1006,7 @@ version = "1.3.0" description = "Lib for adding file upload functionality to GraphQL mutations in Graphene Django and Flask-Graphql" optional = false python-versions = "*" +groups = ["main"] files = [ {file = "graphene_file_upload-1.3.0-py3-none-any.whl", hash = "sha256:5afe50f409f50e3d198fd92c883d98d868e6c6aaadf5df3a3f4d88ecad90ed97"}, {file = "graphene_file_upload-1.3.0.tar.gz", hash = "sha256:6898480b0556826472c80971032917c01968ade5800d84054008fe598795b063"}, @@ -961,6 +1027,7 @@ version = "3.2.3" description = "GraphQL implementation for Python, a port of GraphQL.js, the JavaScript reference implementation for GraphQL." optional = false python-versions = ">=3.6,<4" +groups = ["main"] files = [ {file = "graphql-core-3.2.3.tar.gz", hash = "sha256:06d2aad0ac723e35b1cb47885d3e5c45e956a53bc1b209a9fc5369007fe46676"}, {file = "graphql_core-3.2.3-py3-none-any.whl", hash = "sha256:5766780452bd5ec8ba133f8bf287dc92713e3868ddd83aee4faab9fc3e303dc3"}, @@ -972,6 +1039,7 @@ version = "3.2.0" description = "Relay library for graphql-core" optional = false python-versions = ">=3.6,<4" +groups = ["main"] files = [ {file = "graphql-relay-3.2.0.tar.gz", hash = "sha256:1ff1c51298356e481a0be009ccdff249832ce53f30559c1338f22a0e0d17250c"}, {file = "graphql_relay-3.2.0-py3-none-any.whl", hash = "sha256:c9b22bd28b170ba1fe674c74384a8ff30a76c8e26f88ac3aa1584dd3179953e5"}, @@ -986,6 +1054,7 @@ version = "1.60.1" description = "HTTP/2-based RPC framework" optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "grpcio-1.60.1-cp310-cp310-linux_armv7l.whl", hash = "sha256:14e8f2c84c0832773fb3958240c69def72357bc11392571f87b2d7b91e0bb092"}, {file = "grpcio-1.60.1-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:33aed0a431f5befeffd9d346b0fa44b2c01aa4aeae5ea5b2c03d3e25e0071216"}, @@ -1052,6 +1121,7 @@ version = "1.60.1" description = "Status proto mapping for gRPC" optional = false python-versions = ">=3.6" +groups = ["main"] files = [ {file = "grpcio-status-1.60.1.tar.gz", hash = "sha256:61b5aab8989498e8aa142c20b88829ea5d90d18c18c853b9f9e6d407d37bf8b4"}, {file = "grpcio_status-1.60.1-py3-none-any.whl", hash = "sha256:3034fdb239185b6e0f3169d08c268c4507481e4b8a434c21311a03d9eb5889a0"}, @@ -1068,6 +1138,7 @@ version = "20.1.0" description = "WSGI HTTP Server for UNIX" optional = false python-versions = ">=3.5" +groups = ["main"] files = [ {file = "gunicorn-20.1.0-py3-none-any.whl", hash = "sha256:9dcc4547dbb1cb284accfb15ab5667a0e5d1881cc443e0677b4882a4067a807e"}, {file = "gunicorn-20.1.0.tar.gz", hash = "sha256:e0a968b5ba15f8a328fdfd7ab1fcb5af4470c28aaf7e55df02a99bc13138e6e8"}, @@ -1088,6 +1159,7 @@ version = "0.22.0" description = "A comprehensive HTTP client library." optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +groups = ["main"] files = [ {file = "httplib2-0.22.0-py3-none-any.whl", hash = "sha256:14ae0a53c1ba8f3d37e9e27cf37eabb0fb9980f435ba405d546948b009dd64dc"}, {file = "httplib2-0.22.0.tar.gz", hash = "sha256:d7a10bc5ef5ab08322488bde8c726eeee5c8618723fdb399597ec58f3d82df81"}, @@ -1102,6 +1174,7 @@ version = "2.5.0" description = "huey, a little task queue" optional = false python-versions = "*" +groups = ["main"] files = [ {file = "huey-2.5.0.tar.gz", hash = "sha256:2ffb52fb5c46a1b0d53c79d59df3622312b27e2ab68d81a580985a8ea4ca3480"}, ] @@ -1116,6 +1189,7 @@ version = "2.5.34" description = "File identification library for Python" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "identify-2.5.34-py2.py3-none-any.whl", hash = "sha256:a4316013779e433d08b96e5eabb7f641e6c7942e4ab5d4c509ebd2e7a8994aed"}, {file = "identify-2.5.34.tar.gz", hash = "sha256:ee17bc9d499899bc9eaec1ac7bf2dc9eedd480db9d88b96d123d3b64a9d34f5d"}, @@ -1130,6 +1204,7 @@ version = "3.6" description = "Internationalized Domain Names in Applications (IDNA)" optional = false python-versions = ">=3.5" +groups = ["main"] files = [ {file = "idna-3.6-py3-none-any.whl", hash = "sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f"}, {file = "idna-3.6.tar.gz", hash = "sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca"}, @@ -1141,6 +1216,7 @@ version = "2.0.0" description = "brain-dead simple config-ini parsing" optional = false python-versions = ">=3.7" +groups = ["test"] files = [ {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, @@ -1152,6 +1228,7 @@ version = "0.7.2" description = "Python logging made (stupidly) simple" optional = false python-versions = ">=3.5" +groups = ["main"] files = [ {file = "loguru-0.7.2-py3-none-any.whl", hash = "sha256:003d71e3d3ed35f0f8984898359d65b79e5b21943f78af86aa5491210429b8eb"}, {file = "loguru-0.7.2.tar.gz", hash = "sha256:e671a53522515f34fd406340ee968cb9ecafbc4b36c679da03c18fd8d0bd51ac"}, @@ -1162,7 +1239,7 @@ colorama = {version = ">=0.3.4", markers = "sys_platform == \"win32\""} win32-setctime = {version = ">=1.0.0", markers = "sys_platform == \"win32\""} [package.extras] -dev = ["Sphinx (==7.2.5)", "colorama (==0.4.5)", "colorama (==0.4.6)", "exceptiongroup (==1.1.3)", "freezegun (==1.1.0)", "freezegun (==1.2.2)", "mypy (==v0.910)", "mypy (==v0.971)", "mypy (==v1.4.1)", "mypy (==v1.5.1)", "pre-commit (==3.4.0)", "pytest (==6.1.2)", "pytest (==7.4.0)", "pytest-cov (==2.12.1)", "pytest-cov (==4.1.0)", "pytest-mypy-plugins (==1.9.3)", "pytest-mypy-plugins (==3.0.0)", "sphinx-autobuild (==2021.3.14)", "sphinx-rtd-theme (==1.3.0)", "tox (==3.27.1)", "tox (==4.11.0)"] +dev = ["Sphinx (==7.2.5) ; python_version >= \"3.9\"", "colorama (==0.4.5) ; python_version < \"3.8\"", "colorama (==0.4.6) ; python_version >= \"3.8\"", "exceptiongroup (==1.1.3) ; python_version >= \"3.7\" and python_version < \"3.11\"", "freezegun (==1.1.0) ; python_version < \"3.8\"", "freezegun (==1.2.2) ; python_version >= \"3.8\"", "mypy (==v0.910) ; python_version < \"3.6\"", "mypy (==v0.971) ; python_version == \"3.6\"", "mypy (==v1.4.1) ; python_version == \"3.7\"", "mypy (==v1.5.1) ; python_version >= \"3.8\"", "pre-commit (==3.4.0) ; python_version >= \"3.8\"", "pytest (==6.1.2) ; python_version < \"3.8\"", "pytest (==7.4.0) ; python_version >= \"3.8\"", "pytest-cov (==2.12.1) ; python_version < \"3.8\"", "pytest-cov (==4.1.0) ; python_version >= \"3.8\"", "pytest-mypy-plugins (==1.9.3) ; python_version >= \"3.6\" and python_version < \"3.8\"", "pytest-mypy-plugins (==3.0.0) ; python_version >= \"3.8\"", "sphinx-autobuild (==2021.3.14) ; python_version >= \"3.9\"", "sphinx-rtd-theme (==1.3.0) ; python_version >= \"3.9\"", "tox (==3.27.1) ; python_version < \"3.8\"", "tox (==4.11.0) ; python_version >= \"3.8\""] [[package]] name = "maison" @@ -1170,6 +1247,7 @@ version = "1.4.3" description = "Read settings from config files" optional = false python-versions = ">=3.7.1,<4.0.0" +groups = ["dev"] files = [ {file = "maison-1.4.3-py3-none-any.whl", hash = "sha256:a36208d0befb3bd8aa3b002ac198ce6f6e61efe568b195132640f4032eff46ac"}, {file = "maison-1.4.3.tar.gz", hash = "sha256:766222ce82ae27138256c4af9d0bc6b3226288349601e095dcc567884cf0ce36"}, @@ -1186,6 +1264,7 @@ version = "1.8.0" description = "Node.js virtual environment builder" optional = false python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*" +groups = ["dev"] files = [ {file = "nodeenv-1.8.0-py2.py3-none-any.whl", hash = "sha256:df865724bb3c3adc86b3876fa209771517b0cfe596beff01a92700e0e8be4cec"}, {file = "nodeenv-1.8.0.tar.gz", hash = "sha256:d51e0c37e64fbf47d017feac3145cdbb58836d7eee8c6f6d3b6880c5456227d2"}, @@ -1200,6 +1279,7 @@ version = "1.26.4" description = "Fundamental package for array computing in Python" optional = false python-versions = ">=3.9" +groups = ["main"] files = [ {file = "numpy-1.26.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9ff0f4f29c51e2803569d7a51c2304de5554655a60c5d776e35b4a41413830d0"}, {file = "numpy-1.26.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2e4ee3380d6de9c9ec04745830fd9e2eccb3e6cf790d39d7b98ffd19b0dd754a"}, @@ -1245,6 +1325,7 @@ version = "3.2.2" description = "A generic, spec-compliant, thorough implementation of the OAuth request-signing logic" optional = false python-versions = ">=3.6" +groups = ["main"] files = [ {file = "oauthlib-3.2.2-py3-none-any.whl", hash = "sha256:8139f29aac13e25d502680e9e19963e83f16838d48a0d71c287fe40e7067fbca"}, {file = "oauthlib-3.2.2.tar.gz", hash = "sha256:9859c40929662bec5d64f34d01c99e093149682a3f38915dc0655d5a633dd918"}, @@ -1261,6 +1342,7 @@ version = "23.2" description = "Core utilities for Python packages" optional = false python-versions = ">=3.7" +groups = ["main", "test"] files = [ {file = "packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7"}, {file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"}, @@ -1272,6 +1354,7 @@ version = "2.2.0" description = "Powerful data structures for data analysis, time series, and statistics" optional = false python-versions = ">=3.9" +groups = ["main"] files = [ {file = "pandas-2.2.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:8108ee1712bb4fa2c16981fba7e68b3f6ea330277f5ca34fa8d557e986a11670"}, {file = "pandas-2.2.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:736da9ad4033aeab51d067fc3bd69a0ba36f5a60f66a527b3d72e2030e63280a"}, @@ -1306,9 +1389,9 @@ files = [ [package.dependencies] numpy = [ - {version = ">=1.22.4,<2", markers = "python_version < \"3.11\""}, {version = ">=1.23.2,<2", markers = "python_version == \"3.11\""}, {version = ">=1.26.0,<2", markers = "python_version >= \"3.12\""}, + {version = ">=1.22.4,<2", markers = "python_version < \"3.11\""}, ] python-dateutil = ">=2.8.2" pytz = ">=2020.1" @@ -1344,6 +1427,7 @@ version = "0.19.2" description = "Google BigQuery connector for pandas" optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "pandas-gbq-0.19.2.tar.gz", hash = "sha256:b0f7fa84a2be0fe767e33a008ca7e4ad9a9e3ac67255fd0a41fc19b503138447"}, {file = "pandas_gbq-0.19.2-py2.py3-none-any.whl", hash = "sha256:0ef8da3e4088053a2bea069ed688992a44b52af67dadb97eee494b32a2147563"}, @@ -1371,6 +1455,7 @@ version = "9.5.0" description = "Python Imaging Library (Fork)" optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "Pillow-9.5.0-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:ace6ca218308447b9077c14ea4ef381ba0b67ee78d64046b3f19cf4e1139ad16"}, {file = "Pillow-9.5.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d3d403753c9d5adc04d4694d35cf0391f0f3d57c8e0030aac09d7678fa8030aa"}, @@ -1450,6 +1535,7 @@ version = "3.11.0" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." optional = false python-versions = ">=3.7" +groups = ["dev"] files = [ {file = "platformdirs-3.11.0-py3-none-any.whl", hash = "sha256:e9d171d00af68be50e9202731309c4e658fd8bc76f55c11c7dd760d023bda68e"}, {file = "platformdirs-3.11.0.tar.gz", hash = "sha256:cf8ee52a3afdb965072dcc652433e0c7e3e40cf5ea1477cd4b3b1d2eb75495b3"}, @@ -1465,6 +1551,7 @@ version = "1.4.0" description = "plugin and hook calling mechanisms for python" optional = false python-versions = ">=3.8" +groups = ["test"] files = [ {file = "pluggy-1.4.0-py3-none-any.whl", hash = "sha256:7db9f7b503d67d1c5b95f59773ebb58a8c1c288129a88665838012cfb07b8981"}, {file = "pluggy-1.4.0.tar.gz", hash = "sha256:8c85c2876142a764e5b7548e7d9a0e0ddb46f5185161049a79b7e974454223be"}, @@ -1480,6 +1567,7 @@ version = "3.6.1" description = "A framework for managing and maintaining multi-language pre-commit hooks." optional = false python-versions = ">=3.9" +groups = ["dev"] files = [ {file = "pre_commit-3.6.1-py2.py3-none-any.whl", hash = "sha256:9fe989afcf095d2c4796ce7c553cf28d4d4a9b9346de3cda079bcf40748454a4"}, {file = "pre_commit-3.6.1.tar.gz", hash = "sha256:c90961d8aa706f75d60935aba09469a6b0bcb8345f127c3fbee4bdc5f114cf4b"}, @@ -1498,6 +1586,7 @@ version = "2.3" description = "Promises/A+ implementation for Python" optional = false python-versions = "*" +groups = ["main"] files = [ {file = "promise-2.3.tar.gz", hash = "sha256:dfd18337c523ba4b6a58801c164c1904a9d4d1b1747c7d5dbf45b693a49d93d0"}, ] @@ -1514,6 +1603,7 @@ version = "1.23.0" description = "Beautiful, Pythonic protocol buffers." optional = false python-versions = ">=3.6" +groups = ["main"] files = [ {file = "proto-plus-1.23.0.tar.gz", hash = "sha256:89075171ef11988b3fa157f5dbd8b9cf09d65fffee97e29ce403cd8defba19d2"}, {file = "proto_plus-1.23.0-py3-none-any.whl", hash = "sha256:a829c79e619e1cf632de091013a4173deed13a55f326ef84f05af6f50ff4c82c"}, @@ -1531,6 +1621,7 @@ version = "4.25.2" description = "" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "protobuf-4.25.2-cp310-abi3-win32.whl", hash = "sha256:b50c949608682b12efb0b2717f53256f03636af5f60ac0c1d900df6213910fd6"}, {file = "protobuf-4.25.2-cp310-abi3-win_amd64.whl", hash = "sha256:8f62574857ee1de9f770baf04dde4165e30b15ad97ba03ceac65f760ff018ac9"}, @@ -1551,6 +1642,7 @@ version = "2.9.9" description = "psycopg2 - Python-PostgreSQL Database Adapter" optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "psycopg2-binary-2.9.9.tar.gz", hash = "sha256:7f01846810177d829c7692f1f5ada8096762d9172af1b1a28d4ab5b77c923c1c"}, {file = "psycopg2_binary-2.9.9-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c2470da5418b76232f02a2fcd2229537bb2d5a7096674ce61859c3229f2eb202"}, @@ -1632,6 +1724,7 @@ version = "15.0.0" description = "Python library for Apache Arrow" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "pyarrow-15.0.0-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:0a524532fd6dd482edaa563b686d754c70417c2f72742a8c990b322d4c03a15d"}, {file = "pyarrow-15.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:60a6bdb314affa9c2e0d5dddf3d9cbb9ef4a8dddaa68669975287d47ece67642"}, @@ -1680,6 +1773,7 @@ version = "0.5.1" description = "Pure-Python implementation of ASN.1 types and DER/BER/CER codecs (X.208)" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" +groups = ["main"] files = [ {file = "pyasn1-0.5.1-py2.py3-none-any.whl", hash = "sha256:4439847c58d40b1d0a573d07e3856e95333f1976294494c325775aeca506eb58"}, {file = "pyasn1-0.5.1.tar.gz", hash = "sha256:6d391a96e59b23130a5cfa74d6fd7f388dbbe26cc8f1edf39fdddf08d9d6676c"}, @@ -1691,6 +1785,7 @@ version = "0.3.0" description = "A collection of ASN.1-based protocols modules" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" +groups = ["main"] files = [ {file = "pyasn1_modules-0.3.0-py2.py3-none-any.whl", hash = "sha256:d3ccd6ed470d9ffbc716be08bd90efbd44d0734bc9303818f7336070984a162d"}, {file = "pyasn1_modules-0.3.0.tar.gz", hash = "sha256:5bd01446b736eb9d31512a30d46c1ac3395d676c6f3cafa4c03eb54b9925631c"}, @@ -1705,6 +1800,7 @@ version = "2.6.1" description = "Data validation using Python type hints" optional = false python-versions = ">=3.8" +groups = ["main", "dev"] files = [ {file = "pydantic-2.6.1-py3-none-any.whl", hash = "sha256:0b6a909df3192245cb736509a92ff69e4fef76116feffec68e93a567347bae6f"}, {file = "pydantic-2.6.1.tar.gz", hash = "sha256:4fd5c182a2488dc63e6d32737ff19937888001e2a6d86e94b3f233104a5d1fa9"}, @@ -1724,6 +1820,7 @@ version = "2.16.2" description = "" optional = false python-versions = ">=3.8" +groups = ["main", "dev"] files = [ {file = "pydantic_core-2.16.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:3fab4e75b8c525a4776e7630b9ee48aea50107fea6ca9f593c98da3f4d11bf7c"}, {file = "pydantic_core-2.16.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8bde5b48c65b8e807409e6f20baee5d2cd880e0fad00b1a811ebc43e39a00ab2"}, @@ -1815,6 +1912,7 @@ version = "1.8.2" description = "PyData helpers for authenticating to Google APIs" optional = false python-versions = "*" +groups = ["main"] files = [ {file = "pydata-google-auth-1.8.2.tar.gz", hash = "sha256:547b6c0fbea657dcecd50887c5db8640ebec062a59a2b88e8ff8e53a04818303"}, {file = "pydata_google_auth-1.8.2-py2.py3-none-any.whl", hash = "sha256:a9dce59af4a170ea60c4b2ebbc83ee1f74d34255a4f97b2469ae9a4a0dc98e99"}, @@ -1831,6 +1929,7 @@ version = "2.8.0" description = "JSON Web Token implementation in Python" optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "PyJWT-2.8.0-py3-none-any.whl", hash = "sha256:59127c392cc44c2da5bb3192169a91f429924e17aff6534d70fdc02ab3e04320"}, {file = "PyJWT-2.8.0.tar.gz", hash = "sha256:57e28d156e3d5c10088e0c68abb90bfac3df82b40a71bd0daa20c65ccd5c23de"}, @@ -1848,6 +1947,7 @@ version = "3.1.1" description = "pyparsing module - Classes and methods to define and execute parsing grammars" optional = false python-versions = ">=3.6.8" +groups = ["main"] files = [ {file = "pyparsing-3.1.1-py3-none-any.whl", hash = "sha256:32c7c0b711493c72ff18a981d24f28aaf9c1fb7ed5e9667c9e84e3db623bdbfb"}, {file = "pyparsing-3.1.1.tar.gz", hash = "sha256:ede28a1a32462f5a9705e07aea48001a08f7cf81a021585011deba701581a0db"}, @@ -1862,6 +1962,7 @@ version = "7.4.4" description = "pytest: simple powerful testing with Python" optional = false python-versions = ">=3.7" +groups = ["test"] files = [ {file = "pytest-7.4.4-py3-none-any.whl", hash = "sha256:b090cdf5ed60bf4c45261be03239c2c1c22df034fbffe691abe93cd80cea01d8"}, {file = "pytest-7.4.4.tar.gz", hash = "sha256:2cf0005922c6ace4a3e2ec8b4080eb0d9753fdc93107415332f50ce9e7994280"}, @@ -1884,6 +1985,7 @@ version = "4.1.0" description = "Pytest plugin for measuring coverage." optional = false python-versions = ">=3.7" +groups = ["test"] files = [ {file = "pytest-cov-4.1.0.tar.gz", hash = "sha256:3904b13dfbfec47f003b8e77fd5b589cd11904a21ddf1ab38a64f204d6a10ef6"}, {file = "pytest_cov-4.1.0-py3-none-any.whl", hash = "sha256:6ba70b9e97e69fcc3fb45bfeab2d0a138fb65c4d0d6a41ef33983ad114be8c3a"}, @@ -1902,6 +2004,7 @@ version = "4.8.0" description = "A Django plugin for pytest." optional = false python-versions = ">=3.8" +groups = ["test"] files = [ {file = "pytest-django-4.8.0.tar.gz", hash = "sha256:5d054fe011c56f3b10f978f41a8efb2e5adfc7e680ef36fb571ada1f24779d90"}, {file = "pytest_django-4.8.0-py3-none-any.whl", hash = "sha256:ca1ddd1e0e4c227cf9e3e40a6afc6d106b3e70868fd2ac5798a22501271cd0c7"}, @@ -1920,6 +2023,7 @@ version = "2.8.2" description = "Extensions to the standard Python datetime module" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +groups = ["main"] files = [ {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, @@ -1934,6 +2038,7 @@ version = "2024.1" description = "World timezone definitions, modern and historical" optional = false python-versions = "*" +groups = ["main"] files = [ {file = "pytz-2024.1-py2.py3-none-any.whl", hash = "sha256:328171f4e3623139da4983451950b28e95ac706e13f3f2630a879749e7a8b319"}, {file = "pytz-2024.1.tar.gz", hash = "sha256:2a29735ea9c18baf14b448846bde5a48030ed267578472d8955cd0e7443a9812"}, @@ -1945,6 +2050,7 @@ version = "6.0.1" description = "YAML parser and emitter for Python" optional = false python-versions = ">=3.6" +groups = ["dev"] files = [ {file = "PyYAML-6.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d858aa552c999bc8a8d57426ed01e40bef403cd8ccdd0fc5f6f04a00414cac2a"}, {file = "PyYAML-6.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fd66fc5d0da6d9815ba2cebeb4205f95818ff4b79c3ebe268e75d961704af52f"}, @@ -2005,6 +2111,7 @@ version = "5.0.1" description = "Python client for Redis database and key-value store" optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "redis-5.0.1-py3-none-any.whl", hash = "sha256:ed4802971884ae19d640775ba3b03aa2e7bd5e8fb8dfaed2decce4d0fc48391f"}, {file = "redis-5.0.1.tar.gz", hash = "sha256:0dab495cd5753069d3bc650a0dde8a8f9edde16fc5691b689a566eda58100d0f"}, @@ -2023,6 +2130,7 @@ version = "2.31.0" description = "Python HTTP for Humans." optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "requests-2.31.0-py3-none-any.whl", hash = "sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f"}, {file = "requests-2.31.0.tar.gz", hash = "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1"}, @@ -2044,6 +2152,7 @@ version = "1.3.1" description = "OAuthlib authentication support for Requests." optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +groups = ["main"] files = [ {file = "requests-oauthlib-1.3.1.tar.gz", hash = "sha256:75beac4a47881eeb94d5ea5d6ad31ef88856affe2332b9aafb52c6452ccf0d7a"}, {file = "requests_oauthlib-1.3.1-py2.py3-none-any.whl", hash = "sha256:2577c501a2fb8d05a304c09d090d6e47c306fef15809d102b327cf8364bddab5"}, @@ -2062,6 +2171,7 @@ version = "4.9" description = "Pure-Python RSA implementation" optional = false python-versions = ">=3.6,<4" +groups = ["main"] files = [ {file = "rsa-4.9-py3-none-any.whl", hash = "sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7"}, {file = "rsa-4.9.tar.gz", hash = "sha256:e38464a49c6c85d7f1351b0126661487a7e0a14a50f1675ec50eb34d4f20ef21"}, @@ -2076,6 +2186,7 @@ version = "0.2.1" description = "An extremely fast Python linter and code formatter, written in Rust." optional = false python-versions = ">=3.7" +groups = ["dev"] files = [ {file = "ruff-0.2.1-py3-none-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:dd81b911d28925e7e8b323e8d06951554655021df8dd4ac3045d7212ac4ba080"}, {file = "ruff-0.2.1-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:dc586724a95b7d980aa17f671e173df00f0a2eef23f8babbeee663229a938fec"}, @@ -2102,6 +2213,7 @@ version = "0.91.0" description = "ruyaml is a fork of ruamel.yaml" optional = false python-versions = ">=3.6" +groups = ["dev"] files = [ {file = "ruyaml-0.91.0-py3-none-any.whl", hash = "sha256:50e0ee3389c77ad340e209472e0effd41ae0275246df00cdad0a067532171755"}, {file = "ruyaml-0.91.0.tar.gz", hash = "sha256:6ce9de9f4d082d696d3bde264664d1bcdca8f5a9dff9d1a1f1a127969ab871ab"}, @@ -2120,6 +2232,7 @@ version = "69.0.3" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false python-versions = ">=3.8" +groups = ["main", "dev"] files = [ {file = "setuptools-69.0.3-py3-none-any.whl", hash = "sha256:385eb4edd9c9d5c17540511303e39a147ce2fc04bc55289c322b9e5904fe2c05"}, {file = "setuptools-69.0.3.tar.gz", hash = "sha256:be1af57fc409f93647f2e8e4573a142ed38724b8cdd389706a867bb4efcf1e78"}, @@ -2127,7 +2240,7 @@ files = [ [package.extras] docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] -testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21) ; python_version >= \"3.9\" and sys_platform != \"cygwin\"", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pytest (>=6)", "pytest-black (>=0.3.7) ; platform_python_implementation != \"PyPy\"", "pytest-checkdocs (>=2.4)", "pytest-cov ; platform_python_implementation != \"PyPy\"", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1) ; platform_python_implementation != \"PyPy\"", "pytest-perf ; sys_platform != \"cygwin\"", "pytest-ruff ; sys_platform != \"cygwin\"", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.1)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] [[package]] @@ -2136,6 +2249,7 @@ version = "0.20.0" description = "sqlfmt formats your dbt SQL files so you don't have to." optional = false python-versions = ">=3.8,<4.0" +groups = ["dev"] files = [ {file = "shandy_sqlfmt-0.20.0-py3-none-any.whl", hash = "sha256:0a8fd640e7d5fdb60b97faef9485e7389b94406f36501f7dc84c86577283f282"}, {file = "shandy_sqlfmt-0.20.0.tar.gz", hash = "sha256:2c6a8a39b03b1dac761239a08e66fbde849eed739528c2e80aeebf5164b45f6b"}, @@ -2157,6 +2271,7 @@ version = "1.16.0" description = "Python 2 and 3 compatibility utilities" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" +groups = ["main"] files = [ {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, @@ -2168,6 +2283,7 @@ version = "0.4.4" description = "A non-validating SQL parser." optional = false python-versions = ">=3.5" +groups = ["main"] files = [ {file = "sqlparse-0.4.4-py3-none-any.whl", hash = "sha256:5430a4fe2ac7d0f93e66f1efc6e1338a41884b7ddf2a350cedd20ccc4d9d28f3"}, {file = "sqlparse-0.4.4.tar.gz", hash = "sha256:d446183e84b8349fa3061f0fe7f06ca94ba65b426946ffebe6e3e8295332420c"}, @@ -2184,6 +2300,7 @@ version = "4.2.0" description = "Python bindings for the Stripe API" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +groups = ["main"] files = [ {file = "stripe-4.2.0-py2.py3-none-any.whl", hash = "sha256:8ce03bfc099465740e33890000c454e79316c8730e45ad1efbaec3d52a019d05"}, {file = "stripe-4.2.0.tar.gz", hash = "sha256:f0134704bd4e9410fae25034836dc6f5849d92c0f9083d58d43e01b3e631ac4c"}, @@ -2198,6 +2315,7 @@ version = "1.3" description = "The most basic Text::Unidecode port" optional = false python-versions = "*" +groups = ["main"] files = [ {file = "text-unidecode-1.3.tar.gz", hash = "sha256:bad6603bb14d279193107714b288be206cac565dfa49aa5b105294dd5c4aab93"}, {file = "text_unidecode-1.3-py2.py3-none-any.whl", hash = "sha256:1311f10e8b895935241623731c2ba64f4c455287888b18189350b67134a822e8"}, @@ -2209,6 +2327,7 @@ version = "0.10.2" description = "Python Library for Tom's Obvious, Minimal Language" optional = false python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" +groups = ["dev"] files = [ {file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"}, {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"}, @@ -2220,6 +2339,8 @@ version = "2.0.1" description = "A lil' TOML parser" optional = false python-versions = ">=3.7" +groups = ["dev", "test"] +markers = "python_version == \"3.10\"" files = [ {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, @@ -2231,6 +2352,7 @@ version = "4.66.4" description = "Fast, Extensible Progress Meter" optional = false python-versions = ">=3.7" +groups = ["main", "dev"] files = [ {file = "tqdm-4.66.4-py3-none-any.whl", hash = "sha256:b75ca56b413b030bc3f00af51fd2c1a1a5eac6a0c1cca83cbb37a5c52abce644"}, {file = "tqdm-4.66.4.tar.gz", hash = "sha256:e4d936c9de8727928f3be6079590e97d9abfe8d39a590be678eb5919ffc186bb"}, @@ -2251,6 +2373,7 @@ version = "4.9.0" description = "Backported and Experimental Type Hints for Python 3.8+" optional = false python-versions = ">=3.8" +groups = ["main", "dev"] files = [ {file = "typing_extensions-4.9.0-py3-none-any.whl", hash = "sha256:af72aea155e91adfc61c3ae9e0e342dbc0cba726d6cba4b6c72c1f34e47291cd"}, {file = "typing_extensions-4.9.0.tar.gz", hash = "sha256:23478f88c37f27d76ac8aee6c905017a143b0b1b886c3c9f66bc2fd94f9f5783"}, @@ -2262,6 +2385,7 @@ version = "2023.4" description = "Provider of IANA time zone data" optional = false python-versions = ">=2" +groups = ["main"] files = [ {file = "tzdata-2023.4-py2.py3-none-any.whl", hash = "sha256:aa3ace4329eeacda5b7beb7ea08ece826c28d761cda36e747cfbf97996d39bf3"}, {file = "tzdata-2023.4.tar.gz", hash = "sha256:dd54c94f294765522c77399649b4fefd95522479a664a0cec87f41bebc6148c9"}, @@ -2273,6 +2397,7 @@ version = "4.1.1" description = "Implementation of RFC 6570 URI Templates" optional = false python-versions = ">=3.6" +groups = ["main"] files = [ {file = "uritemplate-4.1.1-py2.py3-none-any.whl", hash = "sha256:830c08b8d99bdd312ea4ead05994a38e8936266f84b9a7878232db50b044e02e"}, {file = "uritemplate-4.1.1.tar.gz", hash = "sha256:4346edfc5c3b79f694bccd6d6099a322bbeb628dbf2cd86eea55a456ce5124f0"}, @@ -2284,14 +2409,15 @@ version = "1.26.18" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" +groups = ["main"] files = [ {file = "urllib3-1.26.18-py2.py3-none-any.whl", hash = "sha256:34b97092d7e0a3a8cf7cd10e386f401b3737364026c45e622aa02903dffe0f07"}, {file = "urllib3-1.26.18.tar.gz", hash = "sha256:f8ecc1bba5667413457c529ab955bf8c67b45db799d159066261719e328580a0"}, ] [package.extras] -brotli = ["brotli (==1.0.9)", "brotli (>=1.0.9)", "brotlicffi (>=0.8.0)", "brotlipy (>=0.6.0)"] -secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "ipaddress", "pyOpenSSL (>=0.14)", "urllib3-secure-extra"] +brotli = ["brotli (==1.0.9) ; os_name != \"nt\" and python_version < \"3\" and platform_python_implementation == \"CPython\"", "brotli (>=1.0.9) ; python_version >= \"3\" and platform_python_implementation == \"CPython\"", "brotlicffi (>=0.8.0) ; (os_name != \"nt\" or python_version >= \"3\") and platform_python_implementation != \"CPython\"", "brotlipy (>=0.6.0) ; os_name == \"nt\" and python_version < \"3\""] +secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "ipaddress ; python_version == \"2.7\"", "pyOpenSSL (>=0.14)", "urllib3-secure-extra"] socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] [[package]] @@ -2300,6 +2426,7 @@ version = "20.25.0" description = "Virtual Python Environment builder" optional = false python-versions = ">=3.7" +groups = ["dev"] files = [ {file = "virtualenv-20.25.0-py3-none-any.whl", hash = "sha256:4238949c5ffe6876362d9c0180fc6c3a824a7b12b80604eeb8085f2ed7460de3"}, {file = "virtualenv-20.25.0.tar.gz", hash = "sha256:bf51c0d9c7dd63ea8e44086fa1e4fb1093a31e963b86959257378aef020e1f1b"}, @@ -2312,7 +2439,7 @@ platformdirs = ">=3.9.1,<5" [package.extras] docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.2)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=23.6)"] -test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=23.1)", "pytest (>=7.4)", "pytest-env (>=0.8.2)", "pytest-freezer (>=0.4.8)", "pytest-mock (>=3.11.1)", "pytest-randomly (>=3.12)", "pytest-timeout (>=2.1)", "setuptools (>=68)", "time-machine (>=2.10)"] +test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=23.1)", "pytest (>=7.4)", "pytest-env (>=0.8.2)", "pytest-freezer (>=0.4.8) ; platform_python_implementation == \"PyPy\"", "pytest-mock (>=3.11.1)", "pytest-randomly (>=3.12)", "pytest-timeout (>=2.1)", "setuptools (>=68)", "time-machine (>=2.10) ; platform_python_implementation == \"CPython\""] [[package]] name = "win32-setctime" @@ -2320,13 +2447,15 @@ version = "1.1.0" description = "A small Python utility to set file creation time on Windows" optional = false python-versions = ">=3.5" +groups = ["main"] +markers = "sys_platform == \"win32\"" files = [ {file = "win32_setctime-1.1.0-py3-none-any.whl", hash = "sha256:231db239e959c2fe7eb1d7dc129f11172354f98361c4fa2d6d2d7e278baa8aad"}, {file = "win32_setctime-1.1.0.tar.gz", hash = "sha256:15cf5750465118d6929ae4de4eb46e8edae9a5634350c01ba582df868e932cb2"}, ] [package.extras] -dev = ["black (>=19.3b0)", "pytest (>=4.6.2)"] +dev = ["black (>=19.3b0) ; python_version >= \"3.6\"", "pytest (>=4.6.2)"] [[package]] name = "yamlfix" @@ -2334,6 +2463,7 @@ version = "1.16.0" description = "A simple opionated yaml formatter that keeps your comments!" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "yamlfix-1.16.0-py3-none-any.whl", hash = "sha256:d92bf8a6d5b6f186bd9d643d633549a1c2424555cb8d176a5d38bce3e678b2b0"}, {file = "yamlfix-1.16.0.tar.gz", hash = "sha256:72f7990e5b2b4459ef3249df4724dacbd85ce7b87f4ea3503d8a72c48574cc32"}, @@ -2345,6 +2475,6 @@ maison = ">=1.4.0" ruyaml = ">=0.91.0" [metadata] -lock-version = "2.0" +lock-version = "2.1" python-versions = ">=3.10,<3.13" -content-hash = "5973fbf28a9bc43c2038ff04f01624241ddb0b56a704b6d0ce8426370c308fc5" +content-hash = "599b8436b2f842c2da9b8ae64ae5202bbad2de108a14631a47da28368969587a" diff --git a/pyproject.toml b/pyproject.toml index b3622a2b..7bdab546 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -40,6 +40,7 @@ dj-stripe = "^2.8.3" pydantic = "^2.5.3" requests = "^2.31.0" tqdm = "^4.66.4" +django-admin-inline-paginator-plus = "^0.1.4" [tool.poetry.group.dev.dependencies] pre-commit = "^3.3.3" From 69b1fe6fd2bc039626eb328691e839ed553f80f3 Mon Sep 17 00:00:00 2001 From: vrtornisiello Date: Thu, 22 May 2025 10:28:59 -0300 Subject: [PATCH 065/181] update compose file --- docker-compose.yaml | 18 ++++++++++++++++-- 1 file changed, 16 insertions(+), 2 deletions(-) diff --git a/docker-compose.yaml b/docker-compose.yaml index e95a66e9..8360f37d 100644 --- a/docker-compose.yaml +++ b/docker-compose.yaml @@ -43,6 +43,15 @@ services: retries: 5 start_period: 1m restart: unless-stopped + vector-database: + image: chromadb/chroma:0.6.3 # chromadb version that gets installed with langchain-chroma==0.2.2 + ports: + - 8001:8000 + volumes: + - chroma_data:/chroma/chroma + healthcheck: + test: curl -f http://localhost:8000/api/v2/heartbeat || exit 1 + restart: unless-stopped api: build: context: . @@ -50,7 +59,8 @@ services: container_name: api env_file: [.env.docker] volumes: - - .:/app + - .:/app + - $HOME/.config/pydata:$HOME/.config/pydata ports: - "8000:8000" # Porta da api - "5678:5678" # Porta de debug @@ -61,6 +71,8 @@ services: condition: service_healthy database: condition: service_healthy + vector-database: + condition: service_healthy healthcheck: test: [CMD, curl, -f, http://localhost/healthcheck/] interval: 1m @@ -68,6 +80,8 @@ services: retries: 5 start_period: 30s restart: unless-stopped + volumes: esdata: - pgdata: \ No newline at end of file + pgdata: + chroma_data: From 098c6e6b13ae11c050a8fef7a6f1e9e2cee2b05e Mon Sep 17 00:00:00 2001 From: vrtornisiello Date: Thu, 22 May 2025 11:23:27 -0300 Subject: [PATCH 066/181] updated chatbot admin models --- backend/apps/chatbot/admin.py | 35 +++++++++++++---------------------- 1 file changed, 13 insertions(+), 22 deletions(-) diff --git a/backend/apps/chatbot/admin.py b/backend/apps/chatbot/admin.py index 37decafd..479ed253 100644 --- a/backend/apps/chatbot/admin.py +++ b/backend/apps/chatbot/admin.py @@ -5,44 +5,35 @@ class ThreadAdmin(admin.ModelAdmin): - list_display = [ + list_display = [field.name for field in Thread._meta.fields] + readonly_fields = list_display + search_fields = [ "id", - "created_at", + "account__email", ] + ordering = ["-created_at"] class MessagePairAdmin(admin.ModelAdmin): - list_display = [ - "id", - "user_message", - "assistant_message", - "created_at", - ] + list_display = [field.name for field in MessagePair._meta.fields] + readonly_fields = list_display search_fields = [ + "id", + "thread__id", "user_message", "assistant_message", ] - readonly_fields = [ - "created_at", - ] ordering = ["-created_at"] class FeedbackAdmin(admin.ModelAdmin): - list_display = [ - "message_pair_id", - "rating", - "created_at", - "updated_at", - ] + list_display = [field.name for field in Feedback._meta.fields] + readonly_fields = list_display search_fields = [ - ] - readonly_fields = [ - "created_at", - "updated_at", + "id", + "message_pair__id", ] ordering = ["-created_at"] - admin.site.register(Thread, ThreadAdmin) admin.site.register(MessagePair, MessagePairAdmin) admin.site.register(Feedback, FeedbackAdmin) From 47ddd5a894957a827649c9bc0672464f336aecfe Mon Sep 17 00:00:00 2001 From: vrtornisiello Date: Thu, 22 May 2025 11:30:47 -0300 Subject: [PATCH 067/181] update account admin model to show the `has_chatbot_access` flag --- backend/apps/account/admin.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/backend/apps/account/admin.py b/backend/apps/account/admin.py index fb08210a..a0c79058 100644 --- a/backend/apps/account/admin.py +++ b/backend/apps/account/admin.py @@ -204,6 +204,7 @@ class AccountAdmin(BaseAccountAdmin): "created_at", "is_admin", "is_subscriber", + "has_chatbot_access", ) list_filter = ( SuperUserListFilter, @@ -264,6 +265,7 @@ class AccountAdmin(BaseAccountAdmin): "is_active", "is_admin", "is_superuser", + "has_chatbot_access", "staff_groups", ) }, From 271ba8b5cad995abe858fbfa1470faa56bd55672 Mon Sep 17 00:00:00 2001 From: vrtornisiello Date: Thu, 22 May 2025 15:45:26 -0300 Subject: [PATCH 068/181] remove unused pydantic import --- backend/apps/chatbot/views.py | 1 - 1 file changed, 1 deletion(-) diff --git a/backend/apps/chatbot/views.py b/backend/apps/chatbot/views.py index efae1b2c..83ca790f 100644 --- a/backend/apps/chatbot/views.py +++ b/backend/apps/chatbot/views.py @@ -5,7 +5,6 @@ from typing import Type, TypeVar import chromadb -import pydantic from django.http import HttpResponse, JsonResponse from langchain_chroma import Chroma from langchain_openai import OpenAIEmbeddings From 7aa110162092f9e865380ec5600b9c848a8a08b5 Mon Sep 17 00:00:00 2001 From: vrtornisiello Date: Thu, 22 May 2025 15:46:01 -0300 Subject: [PATCH 069/181] cloning submodules in ci workflow --- .github/workflows/ci-python.yaml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.github/workflows/ci-python.yaml b/.github/workflows/ci-python.yaml index 52fd1059..f87c7011 100644 --- a/.github/workflows/ci-python.yaml +++ b/.github/workflows/ci-python.yaml @@ -18,6 +18,8 @@ jobs: steps: - name: Checkout uses: actions/checkout@v4 + with: + submodules: recursive - name: Set up poetry run: pipx install poetry - name: Set up python From fa82195cb25724cda12154d8883cce271ef01e65 Mon Sep 17 00:00:00 2001 From: vrtornisiello Date: Thu, 22 May 2025 15:46:47 -0300 Subject: [PATCH 070/181] update formatting --- .github/workflows/deploy-dev.yaml | 4 +--- .github/workflows/deploy-prod.yaml | 4 +--- .github/workflows/deploy-staging.yaml | 4 +--- 3 files changed, 3 insertions(+), 9 deletions(-) diff --git a/.github/workflows/deploy-dev.yaml b/.github/workflows/deploy-dev.yaml index 06f3b6e4..fe5846ee 100644 --- a/.github/workflows/deploy-dev.yaml +++ b/.github/workflows/deploy-dev.yaml @@ -8,9 +8,7 @@ on: jobs: deploy-dev: runs-on: ubuntu-latest - if: | - ${{ github.event_name == 'workflow_dispatch' }} - || ${{ github.event.workflow_run.conclusion == 'success' }} + if: ${{ github.event_name == 'workflow_dispatch' }} || ${{ github.event.workflow_run.conclusion == 'success' }} environment: name: development url: https://api.development.basedosdados.org diff --git a/.github/workflows/deploy-prod.yaml b/.github/workflows/deploy-prod.yaml index ca5c9c19..0dff3714 100644 --- a/.github/workflows/deploy-prod.yaml +++ b/.github/workflows/deploy-prod.yaml @@ -8,9 +8,7 @@ on: jobs: deploy-prod: runs-on: ubuntu-latest - if: | - ${{ github.event_name == 'workflow_dispatch' }} - || ${{ github.event.workflow_run.conclusion == 'success' }} + if: ${{ github.event_name == 'workflow_dispatch' }} || ${{ github.event.workflow_run.conclusion == 'success' }} environment: name: production url: https://backend.basedosdados.org diff --git a/.github/workflows/deploy-staging.yaml b/.github/workflows/deploy-staging.yaml index 5a4e1cbd..87ccb4dd 100644 --- a/.github/workflows/deploy-staging.yaml +++ b/.github/workflows/deploy-staging.yaml @@ -8,9 +8,7 @@ on: jobs: deploy-staging: runs-on: ubuntu-latest - if: | - ${{ github.event_name == 'workflow_dispatch' }} - || ${{ github.event.workflow_run.conclusion == 'success' }} + if: ${{ github.event_name == 'workflow_dispatch' }} || ${{ github.event.workflow_run.conclusion == 'success' }} environment: name: staging url: https://staging.backend.basedosdados.org From 5f4c1e478fb57b7adbf900be20109b1223b22a20 Mon Sep 17 00:00:00 2001 From: vrtornisiello Date: Thu, 22 May 2025 15:48:32 -0300 Subject: [PATCH 071/181] add missing `Account` migrations --- ...ription_en_role_description_es_and_more.py | 73 +++++++++++++++++++ .../0025_account_has_access_to_chatbot.py | 18 +++++ ...s_to_chatbot_account_has_chatbot_access.py | 18 +++++ 3 files changed, 109 insertions(+) create mode 100644 backend/apps/account/migrations/0024_role_description_en_role_description_es_and_more.py create mode 100644 backend/apps/account/migrations/0025_account_has_access_to_chatbot.py create mode 100644 backend/apps/account/migrations/0026_rename_has_access_to_chatbot_account_has_chatbot_access.py diff --git a/backend/apps/account/migrations/0024_role_description_en_role_description_es_and_more.py b/backend/apps/account/migrations/0024_role_description_en_role_description_es_and_more.py new file mode 100644 index 00000000..ad8e7ae4 --- /dev/null +++ b/backend/apps/account/migrations/0024_role_description_en_role_description_es_and_more.py @@ -0,0 +1,73 @@ +# Generated by Django 4.2.20 on 2025-04-14 17:04 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('account', '0023_alter_career_role_old_alter_career_team_old'), + ] + + operations = [ + migrations.AddField( + model_name='role', + name='description_en', + field=models.TextField(blank=True, null=True, verbose_name='Description'), + ), + migrations.AddField( + model_name='role', + name='description_es', + field=models.TextField(blank=True, null=True, verbose_name='Description'), + ), + migrations.AddField( + model_name='role', + name='description_pt', + field=models.TextField(blank=True, null=True, verbose_name='Description'), + ), + migrations.AddField( + model_name='role', + name='name_en', + field=models.CharField(max_length=100, null=True, unique=True, verbose_name='Name'), + ), + migrations.AddField( + model_name='role', + name='name_es', + field=models.CharField(max_length=100, null=True, unique=True, verbose_name='Name'), + ), + migrations.AddField( + model_name='role', + name='name_pt', + field=models.CharField(max_length=100, null=True, unique=True, verbose_name='Name'), + ), + migrations.AddField( + model_name='team', + name='description_en', + field=models.TextField(blank=True, null=True, verbose_name='Description'), + ), + migrations.AddField( + model_name='team', + name='description_es', + field=models.TextField(blank=True, null=True, verbose_name='Description'), + ), + migrations.AddField( + model_name='team', + name='description_pt', + field=models.TextField(blank=True, null=True, verbose_name='Description'), + ), + migrations.AddField( + model_name='team', + name='name_en', + field=models.CharField(max_length=100, null=True, unique=True, verbose_name='Name'), + ), + migrations.AddField( + model_name='team', + name='name_es', + field=models.CharField(max_length=100, null=True, unique=True, verbose_name='Name'), + ), + migrations.AddField( + model_name='team', + name='name_pt', + field=models.CharField(max_length=100, null=True, unique=True, verbose_name='Name'), + ), + ] diff --git a/backend/apps/account/migrations/0025_account_has_access_to_chatbot.py b/backend/apps/account/migrations/0025_account_has_access_to_chatbot.py new file mode 100644 index 00000000..4c0f5dd7 --- /dev/null +++ b/backend/apps/account/migrations/0025_account_has_access_to_chatbot.py @@ -0,0 +1,18 @@ +# Generated by Django 4.2.21 on 2025-05-08 14:04 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('account', '0024_role_description_en_role_description_es_and_more'), + ] + + operations = [ + migrations.AddField( + model_name='account', + name='has_access_to_chatbot', + field=models.BooleanField(default=False, help_text='Indica se o usuário tem acesso ao chatbot', verbose_name='Tem acesso ao chatbot'), + ), + ] diff --git a/backend/apps/account/migrations/0026_rename_has_access_to_chatbot_account_has_chatbot_access.py b/backend/apps/account/migrations/0026_rename_has_access_to_chatbot_account_has_chatbot_access.py new file mode 100644 index 00000000..0153fc73 --- /dev/null +++ b/backend/apps/account/migrations/0026_rename_has_access_to_chatbot_account_has_chatbot_access.py @@ -0,0 +1,18 @@ +# Generated by Django 4.2.21 on 2025-05-08 14:42 + +from django.db import migrations + + +class Migration(migrations.Migration): + + dependencies = [ + ('account', '0025_account_has_access_to_chatbot'), + ] + + operations = [ + migrations.RenameField( + model_name='account', + old_name='has_access_to_chatbot', + new_name='has_chatbot_access', + ), + ] From 96bc1c3f3c0d4ddf07444db8e494e63419c7bf71 Mon Sep 17 00:00:00 2001 From: vrtornisiello Date: Fri, 23 May 2025 09:26:23 -0300 Subject: [PATCH 072/181] we don't need to checkout submodules in the deploy step because the docker image is already built and we're only using the helm templates --- .github/workflows/deploy-dev.yaml | 1 - .github/workflows/deploy-prod.yaml | 1 - .github/workflows/deploy-staging.yaml | 1 - 3 files changed, 3 deletions(-) diff --git a/.github/workflows/deploy-dev.yaml b/.github/workflows/deploy-dev.yaml index fe5846ee..caa6d906 100644 --- a/.github/workflows/deploy-dev.yaml +++ b/.github/workflows/deploy-dev.yaml @@ -17,7 +17,6 @@ jobs: uses: actions/checkout@v4 with: ref: dev - submodules: recursive - name: Import secrets from Vault id: import_secrets uses: hashicorp/vault-action@v2.8.0 diff --git a/.github/workflows/deploy-prod.yaml b/.github/workflows/deploy-prod.yaml index 0dff3714..23489779 100644 --- a/.github/workflows/deploy-prod.yaml +++ b/.github/workflows/deploy-prod.yaml @@ -17,7 +17,6 @@ jobs: uses: actions/checkout@v4 with: ref: main - submodules: recursive - name: Import secrets from Vault id: import_secrets uses: hashicorp/vault-action@v2.8.0 diff --git a/.github/workflows/deploy-staging.yaml b/.github/workflows/deploy-staging.yaml index 87ccb4dd..bfc968bb 100644 --- a/.github/workflows/deploy-staging.yaml +++ b/.github/workflows/deploy-staging.yaml @@ -17,7 +17,6 @@ jobs: uses: actions/checkout@v4 with: ref: staging - submodules: recursive - name: Import secrets from Vault id: import_secrets uses: hashicorp/vault-action@v2.8.0 From 57939a25afd49e657fb35cd3ba940520f2d37dc1 Mon Sep 17 00:00:00 2001 From: vrtornisiello Date: Fri, 23 May 2025 09:27:24 -0300 Subject: [PATCH 073/181] we need to use a deploy key to checkout the `chatbot` submodule because it's private --- .github/workflows/ci-python.yaml | 5 +++++ .github/workflows/release-dev.yaml | 5 +++++ .github/workflows/release-prod.yaml | 5 +++++ .github/workflows/release-staging.yaml | 5 +++++ 4 files changed, 20 insertions(+) diff --git a/.github/workflows/ci-python.yaml b/.github/workflows/ci-python.yaml index f87c7011..2951370d 100644 --- a/.github/workflows/ci-python.yaml +++ b/.github/workflows/ci-python.yaml @@ -16,6 +16,11 @@ jobs: name: Test python runs-on: ubuntu-latest steps: + # this step is needed because the chatbot submodule is private + - name: Set up deploy keys for submodules + uses: webfactory/ssh-agent@v0.9.1 + with: + ssh-private-key: ${{ secrets.CHATBOT_SUBMODULE_DEPLOY_KEY }} - name: Checkout uses: actions/checkout@v4 with: diff --git a/.github/workflows/release-dev.yaml b/.github/workflows/release-dev.yaml index 5aec0861..9cce6d45 100644 --- a/.github/workflows/release-dev.yaml +++ b/.github/workflows/release-dev.yaml @@ -9,6 +9,11 @@ jobs: name: Release Image runs-on: ubuntu-latest steps: + # this step is needed because the chatbot submodule is private + - name: Set up deploy keys for submodules + uses: webfactory/ssh-agent@v0.9.1 + with: + ssh-private-key: ${{ secrets.CHATBOT_SUBMODULE_DEPLOY_KEY }} - name: Checkout uses: actions/checkout@v4 with: diff --git a/.github/workflows/release-prod.yaml b/.github/workflows/release-prod.yaml index d6e90e08..7bc54289 100644 --- a/.github/workflows/release-prod.yaml +++ b/.github/workflows/release-prod.yaml @@ -9,6 +9,11 @@ jobs: name: Release Image runs-on: ubuntu-latest steps: + # this step is needed because the chatbot submodule is private + - name: Set up deploy keys for submodules + uses: webfactory/ssh-agent@v0.9.1 + with: + ssh-private-key: ${{ secrets.CHATBOT_SUBMODULE_DEPLOY_KEY }} - name: Checkout uses: actions/checkout@v4 with: diff --git a/.github/workflows/release-staging.yaml b/.github/workflows/release-staging.yaml index a6a23f63..5912855b 100644 --- a/.github/workflows/release-staging.yaml +++ b/.github/workflows/release-staging.yaml @@ -9,6 +9,11 @@ jobs: name: Release Image runs-on: ubuntu-latest steps: + # this step is needed because the chatbot submodule is private + - name: Set up deploy keys for submodules + uses: webfactory/ssh-agent@v0.9.1 + with: + ssh-private-key: ${{ secrets.CHATBOT_SUBMODULE_DEPLOY_KEY }} - name: Checkout uses: actions/checkout@v4 with: From 97dbcf3d173bbc267cdb873d29f0469aeff64e2c Mon Sep 17 00:00:00 2001 From: vrtornisiello Date: Fri, 23 May 2025 09:38:04 -0300 Subject: [PATCH 074/181] we don't need to checkout submodules in this action because it only watches for new chart versions --- .github/workflows/release-chart.yaml | 2 -- 1 file changed, 2 deletions(-) diff --git a/.github/workflows/release-chart.yaml b/.github/workflows/release-chart.yaml index 3cbf6a57..000626c5 100644 --- a/.github/workflows/release-chart.yaml +++ b/.github/workflows/release-chart.yaml @@ -10,8 +10,6 @@ jobs: steps: - name: Checkout uses: actions/checkout@v4 - with: - submodules: recursive - name: Configure Git run: | git config user.name "$GITHUB_ACTOR" From 77e9d14bf84322b3f7540ebe8608ed962e381eb5 Mon Sep 17 00:00:00 2001 From: vrtornisiello Date: Fri, 23 May 2025 10:31:31 -0300 Subject: [PATCH 075/181] building `db_url` on runtime --- .env.docker | 1 - backend/apps/chatbot/views.py | 8 +++++++- 2 files changed, 7 insertions(+), 2 deletions(-) diff --git a/.env.docker b/.env.docker index 547cd6e3..d945c5c9 100644 --- a/.env.docker +++ b/.env.docker @@ -33,7 +33,6 @@ QUERY_PROJECT_ID= MODEL_URI= LANGCHAIN_TRACING_V2= LANGCHAIN_API_KEY= -DB_URL= CHROMA_HOST= CHROMA_PORT= SQL_CHROMA_COLLECTION= diff --git a/backend/apps/chatbot/views.py b/backend/apps/chatbot/views.py index 83ca790f..c20aa271 100644 --- a/backend/apps/chatbot/views.py +++ b/backend/apps/chatbot/views.py @@ -31,7 +31,13 @@ def _get_feedback_sender() -> LangSmithFeedbackSender: @cache def _get_sql_assistant() -> SQLAssistant: - db_url = os.environ["DB_URL"] + db_host = os.environ["DB_HOST"] + db_port = os.environ["DB_PORT"] + db_name = os.environ["DB_NAME"] + db_user = os.environ["DB_USER"] + db_password = os.environ["DB_PASSWORD"] + + db_url = f"postgresql://{db_user}:{db_password}@{db_host}:{db_port}/{db_name}" bq_billing_project = os.environ["BILLING_PROJECT_ID"] bq_query_project = os.environ["QUERY_PROJECT_ID"] From c04af7ba1db32f9380f90e91a83ebd709f3eb1d1 Mon Sep 17 00:00:00 2001 From: vrtornisiello Date: Fri, 23 May 2025 11:36:56 -0300 Subject: [PATCH 076/181] add non-sensitive env variables --- .github/workflows/deploy-dev.yaml | 5 +++++ .github/workflows/deploy-prod.yaml | 5 +++++ .github/workflows/deploy-staging.yaml | 5 +++++ charts/basedosdados-api/templates/deployment.yaml | 8 ++++++++ 4 files changed, 23 insertions(+) diff --git a/.github/workflows/deploy-dev.yaml b/.github/workflows/deploy-dev.yaml index caa6d906..c38b3bc0 100644 --- a/.github/workflows/deploy-dev.yaml +++ b/.github/workflows/deploy-dev.yaml @@ -61,6 +61,11 @@ jobs: envFrom: - secretRef: name: api-development-secrets + chatbot: + billing_project_id: basedosdados-dev + query_project_id: basedosdados-dev + model_uri: google/gemini-2.0-flash + langchain_tracing_v2: true settingsModule: "backend.settings.remote" database: host: "cloud-sql-proxy" diff --git a/.github/workflows/deploy-prod.yaml b/.github/workflows/deploy-prod.yaml index 23489779..f0726f91 100644 --- a/.github/workflows/deploy-prod.yaml +++ b/.github/workflows/deploy-prod.yaml @@ -61,6 +61,11 @@ jobs: envFrom: - secretRef: name: api-prod-secrets + chatbot: + billing_project_id: basedosdados + query_project_id: basedosdados + model_uri: google/gemini-2.0-flash + langchain_tracing_v2: true settingsModule: "backend.settings.remote" database: host: "cloud-sql-proxy" diff --git a/.github/workflows/deploy-staging.yaml b/.github/workflows/deploy-staging.yaml index bfc968bb..08292767 100644 --- a/.github/workflows/deploy-staging.yaml +++ b/.github/workflows/deploy-staging.yaml @@ -61,6 +61,11 @@ jobs: envFrom: - secretRef: name: api-staging-secrets + chatbot: + billing_project_id: basedosdados-staging + query_project_id: basedosdados-staging + model_uri: google/gemini-2.0-flash + langchain_tracing_v2: true settingsModule: "backend.settings.remote" database: host: "cloud-sql-proxy" diff --git a/charts/basedosdados-api/templates/deployment.yaml b/charts/basedosdados-api/templates/deployment.yaml index f40c079f..c9d3cf4a 100644 --- a/charts/basedosdados-api/templates/deployment.yaml +++ b/charts/basedosdados-api/templates/deployment.yaml @@ -52,6 +52,14 @@ spec: secretKeyRef: name: {{ .Values.database.passwordSecret | quote }} key: password + - name: BILLING_PROJECT_ID + value: {{ .Values.api.chatbot.billing_project_id | quote }} + - name: QUERY_PROJECT_ID + value: {{ .Values.api.chatbot.query_project_id | quote }} + - name: MODEL_URI + value: {{ .Values.api.chatbot.model_uri | quote }} + - name: LANGCHAIN_TRACING_V2 + value: {{ .Values.api.chatbot.langchain_tracing_v2 | quote }} - name: DJANGO_SETTINGS_MODULE value: {{ .Values.api.settingsModule | quote }} {{- with .Values.api.env }} From f86531097824aca1f6078485b0aded2ca37eb52a Mon Sep 17 00:00:00 2001 From: Ricardo Dahis Date: Tue, 27 May 2025 12:39:03 +1000 Subject: [PATCH 077/181] fix: rename migration to be consistent with newer one --- ...e_key_dictionarykey_alter_dictionarykey_options_and_more.py} | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) rename backend/apps/api/v1/migrations/{0055_rename_key_dictionarykey_alter_dictionarykey_options_and_more.py => 0056_rename_key_dictionarykey_alter_dictionarykey_options_and_more.py} (95%) diff --git a/backend/apps/api/v1/migrations/0055_rename_key_dictionarykey_alter_dictionarykey_options_and_more.py b/backend/apps/api/v1/migrations/0056_rename_key_dictionarykey_alter_dictionarykey_options_and_more.py similarity index 95% rename from backend/apps/api/v1/migrations/0055_rename_key_dictionarykey_alter_dictionarykey_options_and_more.py rename to backend/apps/api/v1/migrations/0056_rename_key_dictionarykey_alter_dictionarykey_options_and_more.py index e0c7c7dd..2d359753 100644 --- a/backend/apps/api/v1/migrations/0055_rename_key_dictionarykey_alter_dictionarykey_options_and_more.py +++ b/backend/apps/api/v1/migrations/0056_rename_key_dictionarykey_alter_dictionarykey_options_and_more.py @@ -6,7 +6,7 @@ class Migration(migrations.Migration): dependencies = [ - ("v1", "0054_alter_organization_area"), + ("v1", "0055_alter_type_fields_many_tables"), ] operations = [ From 2111338efb4bd96df752f0512deb6362da6ed5db Mon Sep 17 00:00:00 2001 From: Ricardo Dahis Date: Tue, 27 May 2025 14:40:24 +1000 Subject: [PATCH 078/181] fix: is_required, limit choices --- backend/apps/data_api/graphql.py | 4 ++-- backend/apps/data_api/migrations/0001_initial.py | 4 ---- 2 files changed, 2 insertions(+), 6 deletions(-) diff --git a/backend/apps/data_api/graphql.py b/backend/apps/data_api/graphql.py index c9908de0..6e525240 100644 --- a/backend/apps/data_api/graphql.py +++ b/backend/apps/data_api/graphql.py @@ -63,8 +63,8 @@ class Meta: filter_fields = { "id": ["exact"], "name": ["exact", "icontains"], - "required": ["exact"], - "type": ["exact"], + "is_required": ["exact"], + "type__name": ["exact"], } interfaces = (PlainTextNode,) connection_class = CountableConnection diff --git a/backend/apps/data_api/migrations/0001_initial.py b/backend/apps/data_api/migrations/0001_initial.py index ae5e53ea..996e02e2 100644 --- a/backend/apps/data_api/migrations/0001_initial.py +++ b/backend/apps/data_api/migrations/0001_initial.py @@ -8,8 +8,6 @@ from django.conf import settings from django.db import migrations, models -import backend.apps.data_api.models - class Migration(migrations.Migration): initial = True @@ -215,7 +213,6 @@ class Migration(migrations.Migration): "column", models.ForeignKey( blank=True, - limit_choices_to=backend.apps.data_api.models.limit_column_choices, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name="parameters", @@ -296,7 +293,6 @@ class Migration(migrations.Migration): name="table", field=models.ForeignKey( blank=True, - limit_choices_to=backend.apps.data_api.models.limit_table_choices, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name="endpoints", From 4e3adb6b03c04039def281f215ab8a2f60f25495 Mon Sep 17 00:00:00 2001 From: Ricardo Dahis Date: Tue, 27 May 2025 17:37:28 +1000 Subject: [PATCH 079/181] fix: admin show bool endpoint table --- backend/apps/api/v1/admin.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/backend/apps/api/v1/admin.py b/backend/apps/api/v1/admin.py index ad781c50..7a4deaf8 100644 --- a/backend/apps/api/v1/admin.py +++ b/backend/apps/api/v1/admin.py @@ -12,9 +12,8 @@ from django.shortcuts import render from django.urls import reverse from django.utils.html import format_html -from django_admin_inline_paginator_plus.admin import StackedInlinePaginated from django.utils.safestring import mark_safe - +from django_admin_inline_paginator_plus.admin import StackedInlinePaginated from modeltranslation.admin import TabbedTranslationAdmin, TranslationStackedInline from ordered_model.admin import OrderedInlineModelAdminMixin, OrderedStackedInline @@ -698,6 +697,7 @@ class TableAdmin(OrderedInlineModelAdminMixin, TabbedTranslationAdmin): "published_by", "data_cleaned_by", "auxiliary_files_url", + "is_data_api_endpoint", "created_at", "updated_at", ) From 021d0807fd7f2efd028a3024d4159c50520d9f96 Mon Sep 17 00:00:00 2001 From: Ricardo Dahis Date: Tue, 27 May 2025 23:20:12 +1000 Subject: [PATCH 080/181] feat: change credits with hashed API key --- backend/apps/data_api/admin.py | 5 ++- backend/apps/data_api/views.py | 76 +++++++++++++++++++++++++++------- 2 files changed, 64 insertions(+), 17 deletions(-) diff --git a/backend/apps/data_api/admin.py b/backend/apps/data_api/admin.py index d5c78fc9..02c969f0 100644 --- a/backend/apps/data_api/admin.py +++ b/backend/apps/data_api/admin.py @@ -20,6 +20,7 @@ class KeyInline(admin.TabularInline): readonly_fields = ( "id", "name", + "hash", "prefix", "is_active", "expires_at", @@ -115,6 +116,7 @@ def pricing_actions(self, obj): class KeyAdmin(admin.ModelAdmin): list_display = ( "name", + "hash", "account", "prefix", "balance", @@ -124,13 +126,14 @@ class KeyAdmin(admin.ModelAdmin): ) list_filter = ("is_active",) search_fields = ("name", "prefix", "account__email", "account__full_name") - readonly_fields = ("id", "prefix", "hash", "balance", "created_at", "updated_at") + readonly_fields = ("id", "hash", "prefix", "balance", "created_at", "updated_at") fieldsets = ( ( None, { "fields": ( "name", + "hash", "account", "prefix", "balance", diff --git a/backend/apps/data_api/views.py b/backend/apps/data_api/views.py index f8d26d72..1b94cf03 100644 --- a/backend/apps/data_api/views.py +++ b/backend/apps/data_api/views.py @@ -105,13 +105,30 @@ def get(self, request): @method_decorator(csrf_exempt, name="dispatch") class DataAPICreditAddView(View): - # TODO: remove GET method when in production + def _get_hashed_key(self, key=None, hashed_key=None): + if not key and not hashed_key: + return None, "Either key or hashed_key must be provided" + + if key and hashed_key: + # If both are provided, validate they match + computed_hash = sha256(key.encode()).hexdigest() + if computed_hash != hashed_key: + return None, "Provided key and hashed_key do not match" + return hashed_key, None + + if hashed_key: + return hashed_key, None + + # If only key is provided, hash it + return sha256(key.encode()).hexdigest(), None + def get(self, request): key = request.GET.get("key") + hashed_key = request.GET.get("hashed_key") amount = request.GET.get("amount") currency = request.GET.get("currency") - if not all([key, amount, currency]): + if not all([amount, currency]): return JsonResponse( {"error": "Missing required parameters", "success": False}, status=400 ) @@ -127,8 +144,10 @@ def get(self, request): except ValueError: return JsonResponse({"error": "Invalid amount format", "success": False}, status=400) - # Hash the API key - hashed_key = sha256(key.encode()).hexdigest() + # Get and validate hashed key + hashed_key, error = self._get_hashed_key(key, hashed_key) + if error: + return JsonResponse({"error": error, "success": False}, status=400) try: amount = Decimal(str(amount)) @@ -162,14 +181,17 @@ def post(self, request): metadata = payment_intent.metadata key = metadata.get("key") + hashed_key = metadata.get("hashed_key") amount = float(payment_intent.amount) / 100 # Convert from cents to BRL currency = payment_intent.currency.upper() - if not key: - raise ValueError("API key not found in payment metadata") + if not key and not hashed_key: + raise ValueError("Neither key nor hashed_key found in payment metadata") - # Hash the API key - hashed_key = sha256(key.encode()).hexdigest() + # Get and validate hashed key + hashed_key, error = self._get_hashed_key(key, hashed_key) + if error: + raise ValueError(error) try: amount = Decimal(str(amount)) @@ -196,13 +218,30 @@ def post(self, request): @method_decorator(csrf_exempt, name="dispatch") class DataAPICreditDeductView(View): - # TODO: remove GET method when in production + def _get_hashed_key(self, key=None, hashed_key=None): + if not key and not hashed_key: + return None, "Either key or hashed_key must be provided" + + if key and hashed_key: + # If both are provided, validate they match + computed_hash = sha256(key.encode()).hexdigest() + if computed_hash != hashed_key: + return None, "Provided key and hashed_key do not match" + return hashed_key, None + + if hashed_key: + return hashed_key, None + + # If only key is provided, hash it + return sha256(key.encode()).hexdigest(), None + def get(self, request): key = request.GET.get("key") + hashed_key = request.GET.get("hashed_key") amount = request.GET.get("amount") currency = request.GET.get("currency") - if not all([key, amount, currency]): + if not all([amount, currency]): return JsonResponse( {"error": "Missing required parameters", "success": False}, status=400 ) @@ -218,8 +257,10 @@ def get(self, request): except ValueError: return JsonResponse({"error": "Invalid amount format", "success": False}, status=400) - # Hash the API key - hashed_key = sha256(key.encode()).hexdigest() + # Get and validate hashed key + hashed_key, error = self._get_hashed_key(key, hashed_key) + if error: + return JsonResponse({"error": error, "success": False}, status=400) try: amount = Decimal(str(amount)) @@ -254,14 +295,17 @@ def post(self, request): metadata = payment_intent.metadata key = metadata.get("key") + hashed_key = metadata.get("hashed_key") amount = float(payment_intent.amount) / 100 # Convert from cents to currency units currency = payment_intent.currency.upper() - if not key: - raise ValueError("API key not found in payment metadata") + if not key and not hashed_key: + raise ValueError("Neither key nor hashed_key found in payment metadata") - # Hash the API key - hashed_key = sha256(key.encode()).hexdigest() + # Get and validate hashed key + hashed_key, error = self._get_hashed_key(key, hashed_key) + if error: + raise ValueError(error) try: amount = Decimal(str(amount)) From 0cda833fb63bb88598e18ac11b3ea652fe8f7220 Mon Sep 17 00:00:00 2001 From: Luiz Eduardo Date: Tue, 27 May 2025 18:33:41 -0300 Subject: [PATCH 081/181] test: fix ruff linting issues in dev branch (#820) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Correção de erros de linting que estavam impedindo a execução adequada dos testes --- backend/apps/account/admin.py | 2 +- ..._career_end_at_alter_career_level_and_more.py | 3 ++- .../account/migrations/0020_career_role_new.py | 3 ++- .../0021_rename_role_career_role_old_and_more.py | 3 ++- backend/apps/account/translation.py | 2 +- backend/apps/account/views.py | 6 +++--- .../v1/migrations/0050_table_is_deprecated.py | 2 ++ backend/apps/api/v1/search_views.py | 1 + .../apps/core/management/commands/populate.py | 14 ++++++++------ scripts/ai-database-translate.py | 2 ++ scripts/database-clean-area-slug.py | 16 +++++++++------- scripts/debug/debug.ipynb | 1 + 12 files changed, 34 insertions(+), 21 deletions(-) diff --git a/backend/apps/account/admin.py b/backend/apps/account/admin.py index fb08210a..49e7f7e7 100644 --- a/backend/apps/account/admin.py +++ b/backend/apps/account/admin.py @@ -10,7 +10,7 @@ from django.utils.translation import gettext_lazy from faker import Faker -from backend.apps.account.models import Account, BDGroup, BDRole, Team, Role, Career, Subscription +from backend.apps.account.models import Account, BDGroup, BDRole, Career, Role, Subscription, Team from backend.apps.account.tasks import sync_subscription_task diff --git a/backend/apps/account/migrations/0019_role_team_alter_career_end_at_alter_career_level_and_more.py b/backend/apps/account/migrations/0019_role_team_alter_career_end_at_alter_career_level_and_more.py index 7e656d4e..385e059a 100644 --- a/backend/apps/account/migrations/0019_role_team_alter_career_end_at_alter_career_level_and_more.py +++ b/backend/apps/account/migrations/0019_role_team_alter_career_end_at_alter_career_level_and_more.py @@ -1,7 +1,8 @@ +# -*- coding: utf-8 -*- # Generated by Django 4.2.18 on 2025-02-04 04:02 -from django.db import migrations, models import django.db.models.deletion +from django.db import migrations, models class Migration(migrations.Migration): diff --git a/backend/apps/account/migrations/0020_career_role_new.py b/backend/apps/account/migrations/0020_career_role_new.py index 5a0c7269..5b666cae 100644 --- a/backend/apps/account/migrations/0020_career_role_new.py +++ b/backend/apps/account/migrations/0020_career_role_new.py @@ -1,7 +1,8 @@ +# -*- coding: utf-8 -*- # Generated by Django 4.2.18 on 2025-02-04 04:04 -from django.db import migrations, models import django.db.models.deletion +from django.db import migrations, models class Migration(migrations.Migration): diff --git a/backend/apps/account/migrations/0021_rename_role_career_role_old_and_more.py b/backend/apps/account/migrations/0021_rename_role_career_role_old_and_more.py index 3d091ece..814e6d60 100644 --- a/backend/apps/account/migrations/0021_rename_role_career_role_old_and_more.py +++ b/backend/apps/account/migrations/0021_rename_role_career_role_old_and_more.py @@ -1,6 +1,7 @@ +# -*- coding: utf-8 -*- # Generated by Django 4.2.18 on 2025-02-05 00:43 -from django.db import migrations, models +from django.db import migrations class Migration(migrations.Migration): diff --git a/backend/apps/account/translation.py b/backend/apps/account/translation.py index 627aae88..99666a57 100644 --- a/backend/apps/account/translation.py +++ b/backend/apps/account/translation.py @@ -1,7 +1,7 @@ # -*- coding: utf-8 -*- from modeltranslation.translator import TranslationOptions, translator -from .models import Account, Team, Role +from .models import Account, Role, Team class TeamTranslationOptions(TranslationOptions): diff --git a/backend/apps/account/views.py b/backend/apps/account/views.py index 3c1952c4..334d9df0 100644 --- a/backend/apps/account/views.py +++ b/backend/apps/account/views.py @@ -33,14 +33,14 @@ def post(self, request, uidb64): try: uid = force_str(urlsafe_base64_decode(uidb64)) user = user_model.objects.get(id=uid) - logger.info(f'Send Activation Email - User: {user}') + logger.info(f"Send Activation Email - User: {user}") except (TypeError, ValueError, OverflowError, user_model.DoesNotExist) as e: - logger.error(f'Send Activation Email - Error: {e}') + logger.error(f"Send Activation Email - Error: {e}") user = None if user: send_activation_email(user) - logger.info('Send Activation Email - Sended email for activation') + logger.info("Send Activation Email - Sended email for activation") return JsonResponse({}, status=200) else: return JsonResponse({}, status=422) diff --git a/backend/apps/api/v1/migrations/0050_table_is_deprecated.py b/backend/apps/api/v1/migrations/0050_table_is_deprecated.py index f171d55f..37082d85 100644 --- a/backend/apps/api/v1/migrations/0050_table_is_deprecated.py +++ b/backend/apps/api/v1/migrations/0050_table_is_deprecated.py @@ -1,3 +1,5 @@ +# -*- coding: utf-8 -*- +# ruff: noqa: E501 # Generated by Django 4.2.16 on 2024-11-06 04:30 from django.db import migrations, models diff --git a/backend/apps/api/v1/search_views.py b/backend/apps/api/v1/search_views.py index 92328b4a..f17a3274 100644 --- a/backend/apps/api/v1/search_views.py +++ b/backend/apps/api/v1/search_views.py @@ -1,4 +1,5 @@ # -*- coding: utf-8 -*- +# ruff: noqa: E501 from django.core.files.storage import default_storage as storage from django.http import JsonResponse diff --git a/backend/apps/core/management/commands/populate.py b/backend/apps/core/management/commands/populate.py index 7310fca4..69450768 100644 --- a/backend/apps/core/management/commands/populate.py +++ b/backend/apps/core/management/commands/populate.py @@ -1,4 +1,6 @@ # -*- coding: utf-8 -*- +# ruff: noqa: E501 + import json import os @@ -172,9 +174,9 @@ def disable_constraints(self, items): with connection.cursor() as cursor: cursor.execute( """ - SELECT column_name - FROM information_schema.columns - WHERE table_name = %s + SELECT column_name + FROM information_schema.columns + WHERE table_name = %s AND is_nullable = 'NO' AND column_name != 'id' """, @@ -199,9 +201,9 @@ def enable_constraints(self, items): with connection.cursor() as cursor: cursor.execute( """ - SELECT column_name - FROM information_schema.columns - WHERE table_name = %s + SELECT column_name + FROM information_schema.columns + WHERE table_name = %s AND is_nullable = 'YES' """, [item], diff --git a/scripts/ai-database-translate.py b/scripts/ai-database-translate.py index d4507507..8a1c11c9 100755 --- a/scripts/ai-database-translate.py +++ b/scripts/ai-database-translate.py @@ -1,3 +1,5 @@ +# -*- coding: utf-8 -*- +# ruff: noqa #!/usr/bin/env python3 """ AI Database Translation Script diff --git a/scripts/database-clean-area-slug.py b/scripts/database-clean-area-slug.py index e0ef6677..90b9e192 100644 --- a/scripts/database-clean-area-slug.py +++ b/scripts/database-clean-area-slug.py @@ -1,3 +1,5 @@ +# -*- coding: utf-8 -*- +# ruff: noqa: E501 #!/usr/bin/env python3 """ Area Slug Update Script @@ -92,7 +94,7 @@ def main(): print("Setting parent reference for Brazilian states...") cursor.execute( """ - UPDATE area + UPDATE area SET parent_id = %s, entity_id = %s WHERE slug LIKE 'br_%%' @@ -106,9 +108,9 @@ def main(): # First, let's check what we're working with cursor.execute( """ - SELECT slug - FROM area - WHERE slug LIKE 'br\_%%\_%%' + SELECT slug + FROM area + WHERE slug LIKE 'br\_%%\_%%' LIMIT 5; """ ) @@ -116,9 +118,9 @@ def main(): cursor.execute( """ - SELECT slug - FROM area - WHERE slug LIKE 'br\_%%' + SELECT slug + FROM area + WHERE slug LIKE 'br\_%%' AND slug NOT LIKE 'br\_%%\_%%' LIMIT 5; """ diff --git a/scripts/debug/debug.ipynb b/scripts/debug/debug.ipynb index 69dcea14..1cf7294f 100644 --- a/scripts/debug/debug.ipynb +++ b/scripts/debug/debug.ipynb @@ -6,6 +6,7 @@ "metadata": {}, "outputs": [], "source": [ + "# ruff: noqa\n", "import os\n", "import sys\n", "\n", From 6008cb87f63cec2243198a899dcffec356cbe4e5 Mon Sep 17 00:00:00 2001 From: Ricardo Dahis Date: Thu, 29 May 2025 16:05:23 +1000 Subject: [PATCH 082/181] apps.py so chatbot shows in admin --- backend/apps/chatbot/apps.py | 8 ++++++++ 1 file changed, 8 insertions(+) create mode 100644 backend/apps/chatbot/apps.py diff --git a/backend/apps/chatbot/apps.py b/backend/apps/chatbot/apps.py new file mode 100644 index 00000000..9f10e68d --- /dev/null +++ b/backend/apps/chatbot/apps.py @@ -0,0 +1,8 @@ +# -*- coding: utf-8 -*- +from django.apps import AppConfig + + +class ChatbotConfig(AppConfig): + default_auto_field = "django.db.models.BigAutoField" + name = "backend.apps.chatbot" + verbose_name = "Chatbot" From 83eb90a219e15232189d2fbb440cf081422ed4ee Mon Sep 17 00:00:00 2001 From: Ricardo Dahis Date: Thu, 29 May 2025 17:28:29 +1000 Subject: [PATCH 083/181] Re-trigger workflows From 8d2a6b77f26807e8ad69d6bc90fb91402d1c0795 Mon Sep 17 00:00:00 2001 From: Ricardo Dahis Date: Thu, 29 May 2025 22:12:30 +0930 Subject: [PATCH 084/181] Empty commit From ca87828faca3e17deee407255983c7c6ddcf310b Mon Sep 17 00:00:00 2001 From: vrtornisiello Date: Thu, 29 May 2025 17:37:24 -0300 Subject: [PATCH 085/181] remove chatbot related env variables --- .github/workflows/deploy-dev.yaml | 7 +------ .github/workflows/deploy-prod.yaml | 7 +------ .github/workflows/deploy-staging.yaml | 7 +------ charts/basedosdados-api/templates/deployment.yaml | 8 -------- 4 files changed, 3 insertions(+), 26 deletions(-) diff --git a/.github/workflows/deploy-dev.yaml b/.github/workflows/deploy-dev.yaml index c38b3bc0..ac3247af 100644 --- a/.github/workflows/deploy-dev.yaml +++ b/.github/workflows/deploy-dev.yaml @@ -8,7 +8,7 @@ on: jobs: deploy-dev: runs-on: ubuntu-latest - if: ${{ github.event_name == 'workflow_dispatch' }} || ${{ github.event.workflow_run.conclusion == 'success' }} + if: ${{ github.event_name == 'workflow_dispatch' || github.event.workflow_run.conclusion == 'success' }} environment: name: development url: https://api.development.basedosdados.org @@ -61,11 +61,6 @@ jobs: envFrom: - secretRef: name: api-development-secrets - chatbot: - billing_project_id: basedosdados-dev - query_project_id: basedosdados-dev - model_uri: google/gemini-2.0-flash - langchain_tracing_v2: true settingsModule: "backend.settings.remote" database: host: "cloud-sql-proxy" diff --git a/.github/workflows/deploy-prod.yaml b/.github/workflows/deploy-prod.yaml index f0726f91..f442eaa5 100644 --- a/.github/workflows/deploy-prod.yaml +++ b/.github/workflows/deploy-prod.yaml @@ -8,7 +8,7 @@ on: jobs: deploy-prod: runs-on: ubuntu-latest - if: ${{ github.event_name == 'workflow_dispatch' }} || ${{ github.event.workflow_run.conclusion == 'success' }} + if: ${{ github.event_name == 'workflow_dispatch' || github.event.workflow_run.conclusion == 'success' }} environment: name: production url: https://backend.basedosdados.org @@ -61,11 +61,6 @@ jobs: envFrom: - secretRef: name: api-prod-secrets - chatbot: - billing_project_id: basedosdados - query_project_id: basedosdados - model_uri: google/gemini-2.0-flash - langchain_tracing_v2: true settingsModule: "backend.settings.remote" database: host: "cloud-sql-proxy" diff --git a/.github/workflows/deploy-staging.yaml b/.github/workflows/deploy-staging.yaml index 08292767..d49a7881 100644 --- a/.github/workflows/deploy-staging.yaml +++ b/.github/workflows/deploy-staging.yaml @@ -8,7 +8,7 @@ on: jobs: deploy-staging: runs-on: ubuntu-latest - if: ${{ github.event_name == 'workflow_dispatch' }} || ${{ github.event.workflow_run.conclusion == 'success' }} + if: ${{ github.event_name == 'workflow_dispatch' || github.event.workflow_run.conclusion == 'success' }} environment: name: staging url: https://staging.backend.basedosdados.org @@ -61,11 +61,6 @@ jobs: envFrom: - secretRef: name: api-staging-secrets - chatbot: - billing_project_id: basedosdados-staging - query_project_id: basedosdados-staging - model_uri: google/gemini-2.0-flash - langchain_tracing_v2: true settingsModule: "backend.settings.remote" database: host: "cloud-sql-proxy" diff --git a/charts/basedosdados-api/templates/deployment.yaml b/charts/basedosdados-api/templates/deployment.yaml index c9d3cf4a..f40c079f 100644 --- a/charts/basedosdados-api/templates/deployment.yaml +++ b/charts/basedosdados-api/templates/deployment.yaml @@ -52,14 +52,6 @@ spec: secretKeyRef: name: {{ .Values.database.passwordSecret | quote }} key: password - - name: BILLING_PROJECT_ID - value: {{ .Values.api.chatbot.billing_project_id | quote }} - - name: QUERY_PROJECT_ID - value: {{ .Values.api.chatbot.query_project_id | quote }} - - name: MODEL_URI - value: {{ .Values.api.chatbot.model_uri | quote }} - - name: LANGCHAIN_TRACING_V2 - value: {{ .Values.api.chatbot.langchain_tracing_v2 | quote }} - name: DJANGO_SETTINGS_MODULE value: {{ .Values.api.settingsModule | quote }} {{- with .Values.api.env }} From ab8c168dbff2d9c6d1ef1d40e0e2512e27fabfa2 Mon Sep 17 00:00:00 2001 From: vrtornisiello Date: Fri, 30 May 2025 15:26:16 -0300 Subject: [PATCH 086/181] update `chatbot` package version to `0.4.4` --- chatbot | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/chatbot b/chatbot index 06e51366..fc126982 160000 --- a/chatbot +++ b/chatbot @@ -1 +1 @@ -Subproject commit 06e51366853450f4366c7bab40f7172166b255b9 +Subproject commit fc1269826229e4daad5c6cc7678ab55dc4739c08 From e01d38355c475bcf5f09381022d70962c3d68571 Mon Sep 17 00:00:00 2001 From: vrtornisiello Date: Fri, 30 May 2025 15:26:44 -0300 Subject: [PATCH 087/181] enable `chatbot` package logging --- backend/apps/chatbot/__init__.py | 0 backend/apps/chatbot/apps.py | 6 ++++++ 2 files changed, 6 insertions(+) create mode 100644 backend/apps/chatbot/__init__.py diff --git a/backend/apps/chatbot/__init__.py b/backend/apps/chatbot/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/backend/apps/chatbot/apps.py b/backend/apps/chatbot/apps.py index 9f10e68d..a3bb5799 100644 --- a/backend/apps/chatbot/apps.py +++ b/backend/apps/chatbot/apps.py @@ -1,8 +1,14 @@ # -*- coding: utf-8 -*- from django.apps import AppConfig +from loguru import logger class ChatbotConfig(AppConfig): default_auto_field = "django.db.models.BigAutoField" name = "backend.apps.chatbot" verbose_name = "Chatbot" + + def ready(self): + # Enable logs from the chatbot package + import chatbot + logger.enable(chatbot.__name__) From cda364ea80545aca052cdf5a16a4b45b42e2f681 Mon Sep 17 00:00:00 2001 From: vrtornisiello Date: Fri, 30 May 2025 15:35:49 -0300 Subject: [PATCH 088/181] update poetry lock file --- poetry.lock | 544 +++++++++++++++++++++++++++++++++++++++------------- 1 file changed, 406 insertions(+), 138 deletions(-) diff --git a/poetry.lock b/poetry.lock index 23019c6c..f0992390 100644 --- a/poetry.lock +++ b/poetry.lock @@ -158,6 +158,65 @@ files = [ tests = ["pytest (>=3.2.1,!=3.3.0)"] typecheck = ["mypy"] +[[package]] +name = "bottleneck" +version = "1.5.0" +description = "Fast NumPy array functions written in C" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "bottleneck-1.5.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:7962177b04b865b17e883ace01c68cf50353ef6a9437ec01bad1f5a1a2708490"}, + {file = "bottleneck-1.5.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8892f2d90d63a3dd5884e8f3fe7bbe8c569851a984023340ef926d2205332d96"}, + {file = "bottleneck-1.5.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1648f2a0d52b78f6e530385862e279ffa66baae2ce038bfdf5d8b29a638bac46"}, + {file = "bottleneck-1.5.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:f13b644207118564b95eb7b2130555fb4a4b2266a739b2a8f98a5276baa723ea"}, + {file = "bottleneck-1.5.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:9ca39aca62f0e827fc8c9b352352224ecb38a98d8f9cbc30f071672c31904aa2"}, + {file = "bottleneck-1.5.0-cp310-cp310-win32.whl", hash = "sha256:f9545206daaffaecf88d176f657b7c939f6d909275991121dc8dee936dcd8985"}, + {file = "bottleneck-1.5.0-cp310-cp310-win_amd64.whl", hash = "sha256:c15a5f009ea72f95d0a35e784c6944af2b6d7dab102341fb3c3412e41ce5adf6"}, + {file = "bottleneck-1.5.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:9be5dfdf1a662d1d4423d7b7e8dd9a1b7046dcc2ce67b6e94a31d1cc57a8558f"}, + {file = "bottleneck-1.5.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:16fead35c0b5d307815997eef67d03c2151f255ca889e0fc3d68703f41aa5302"}, + {file = "bottleneck-1.5.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:049162927cf802208cc8691fb99b108afe74656cdc96b9e2067cf56cb9d84056"}, + {file = "bottleneck-1.5.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:2f5e863a4fdaf9c85416789aeb333d1cdd3603037fd854ad58b0e2ac73be16cf"}, + {file = "bottleneck-1.5.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:8d123762f78717fc35ecf10cad45d08273fcb12ab40b3c847190b83fec236f03"}, + {file = "bottleneck-1.5.0-cp311-cp311-win32.whl", hash = "sha256:07c2c1aa39917b5c9be77e85791aa598e8b2c00f8597a198b93628bbfde72a3f"}, + {file = "bottleneck-1.5.0-cp311-cp311-win_amd64.whl", hash = "sha256:80ef9eea2a92fc5a1c04734aa1bcf317253241062c962eaa6e7f123b583d0109"}, + {file = "bottleneck-1.5.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:dbb0f0d38feda63050aa253cf9435e81a0ecfac954b0df84896636be9eabd9b6"}, + {file = "bottleneck-1.5.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:613165ce39bf6bd80f5307da0f05842ba534b213a89526f1eba82ea0099592fc"}, + {file = "bottleneck-1.5.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f218e4dae6511180dcc4f06d8300e0c81e7f3df382091f464c5a919d289fab8e"}, + {file = "bottleneck-1.5.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3886799cceb271eb67d057f6ecb13fb4582bda17a3b13b4fa0334638c59637c6"}, + {file = "bottleneck-1.5.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:dc8d553d4bf033d3e025cd32d4c034d2daf10709e31ced3909811d1c843e451c"}, + {file = "bottleneck-1.5.0-cp312-cp312-win32.whl", hash = "sha256:0dca825048a3076f34c4a35409e3277b31ceeb3cbb117bbe2a13ff5c214bcabc"}, + {file = "bottleneck-1.5.0-cp312-cp312-win_amd64.whl", hash = "sha256:f26005740e6ef6013eba8a48241606a963e862a601671eab064b7835cd12ef3d"}, + {file = "bottleneck-1.5.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:97285cfedf3545d9a010b2db2123f9750bf920081e29364cc465052973bd0b5a"}, + {file = "bottleneck-1.5.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1043d95674566063f638582cc8700c24c4427f532f86b9e7cfc9f9ec84abc1ff"}, + {file = "bottleneck-1.5.0-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:abc6a24a41f55765215005cec97dd69f41ac747ed0f4d446caa508531957eeda"}, + {file = "bottleneck-1.5.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:7967e0189defe9f49025bd6469ff0fe22af5463926af55c7ba1e4592051d8ef8"}, + {file = "bottleneck-1.5.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:27e38e829497ca0a5eebdb79d3293aaa424f3c31c13806e5c607fd414536b7c3"}, + {file = "bottleneck-1.5.0-cp313-cp313-win32.whl", hash = "sha256:1214a2bf3b36c66e3898aab821ad8366a3062db6f83a8f083e2f799d202e86ea"}, + {file = "bottleneck-1.5.0-cp313-cp313-win_amd64.whl", hash = "sha256:436a402f0d60a9d6541d7adb0929501225a151ad03b96b756e0b607db6a106f1"}, + {file = "bottleneck-1.5.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:fc0c0b661005b059fcb09988f8b5e2cd5e9c702e1bed24819ed38f85145140b5"}, + {file = "bottleneck-1.5.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:48c2657102f3288e178cc341f000475a32f49a3cd8b7067e091d5446fa899383"}, + {file = "bottleneck-1.5.0-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5c4c94cfcba46adfe71894c63c4b186c847965e73727dbaf5fd9ade41ef38e6e"}, + {file = "bottleneck-1.5.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:3f3e308416886e29441a0b71bce8f3eb4c7a4943be541fd918244aaf25534d36"}, + {file = "bottleneck-1.5.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:bda7c475d4a7e271dbd0b1d4bbce29065edc8891361857105b7212fe383c9a36"}, + {file = "bottleneck-1.5.0-cp313-cp313t-win32.whl", hash = "sha256:a107ed8b5f998918c24a1e476dbd2dfc3514ab0082df7132c460b01e6ffd8cf4"}, + {file = "bottleneck-1.5.0-cp313-cp313t-win_amd64.whl", hash = "sha256:816c910c5d1fb53adb32581c52a513b206f503ae253ace70cb32d1fe4e45af1d"}, + {file = "bottleneck-1.5.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:8ef667f3a8602c2d48f0c5dcbe5a018b3c5b978cfc0bb9d0af59797ea1f5d48b"}, + {file = "bottleneck-1.5.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5b442e53bc80ab106e9ed1bf5e0b443a205dabbf18d62147432df92c315aa22e"}, + {file = "bottleneck-1.5.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a3a84c1067667963e469878f3d8fc8af4aeafd3b46f53deb22e00cc9c7da40a6"}, + {file = "bottleneck-1.5.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:21a72f5b203ceffc56b73111b47ae63b86278c73c45d53baaae3612b2774fb84"}, + {file = "bottleneck-1.5.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:c90adb81d9ea0a7834426c2f2a3071f970f9fc562036a434d697c8265c4171db"}, + {file = "bottleneck-1.5.0-cp39-cp39-win32.whl", hash = "sha256:d5cd43f4fd0a353b1cd135031190fb67c95f820a5a99501a7c5524fef2191555"}, + {file = "bottleneck-1.5.0-cp39-cp39-win_amd64.whl", hash = "sha256:6dca60131912adc87ba6c8f5d3bb9e6f5f9e1bf4ec2193ecb4ab6d0001d4bbc0"}, + {file = "bottleneck-1.5.0.tar.gz", hash = "sha256:c860242cf20e69d5aab2ec3c5d6c8c2a15f19e4b25b28b8fca2c2a12cefae9d8"}, +] + +[package.dependencies] +numpy = "*" + +[package.extras] +doc = ["gitpython", "numpydoc", "sphinx"] + [[package]] name = "build" version = "1.2.2.post1" @@ -403,27 +462,27 @@ files = [ [[package]] name = "chatbot" -version = "0.4.2" +version = "0.4.4" description = "" optional = false -python-versions = "^3.10" +python-versions = ">=3.10,<4.0" groups = ["main"] files = [] develop = false [package.dependencies] -google-cloud-bigquery = "^3.25.0" +google-cloud-bigquery = ">=3.33.0,<4.0.0" grpcio = "1.71.0" -langchain = "0.3.20" +langchain = "0.3.25" langchain-chroma = "0.2.2" -langchain-google-vertexai = "2.0.15" -langchain-openai = "0.3.9" -langgraph = "0.3.11" -langgraph-checkpoint-postgres = "2.0.17" -loguru = "^0.7.2" -psycopg = "3.2.6" +langchain-google-vertexai = "2.0.24" +langchain-openai = "0.3.18" +langgraph = "0.4.7" +langgraph-checkpoint-postgres = "2.0.21" +loguru = ">=0.7.3,<0.8.0" +psycopg = "3.2.9" psycopg-pool = "3.2.6" -sqlparse = "^0.5.1" +sqlparse = ">=0.5.3,<0.6.0" [package.source] type = "directory" @@ -1138,7 +1197,7 @@ google-auth = ">=2.14.1,<3.0.dev0" googleapis-common-protos = ">=1.56.2,<2.0.dev0" grpcio = [ {version = ">=1.49.1,<2.0dev", optional = true, markers = "python_version >= \"3.11\" and extra == \"grpc\""}, - {version = ">=1.33.2,<2.0dev", optional = true, markers = "extra == \"grpc\""}, + {version = ">=1.33.2,<2.0dev", optional = true, markers = "python_version < \"3.11\" and extra == \"grpc\""}, ] grpcio-status = [ {version = ">=1.49.1,<2.0.dev0", optional = true, markers = "python_version >= \"3.11\" and extra == \"grpc\""}, @@ -1288,35 +1347,36 @@ xai = ["tensorflow (>=2.3.0,<3.0.0)"] [[package]] name = "google-cloud-bigquery" -version = "3.30.0" +version = "3.34.0" description = "Google BigQuery API client library" optional = false -python-versions = ">=3.7" +python-versions = ">=3.9" groups = ["main"] files = [ - {file = "google_cloud_bigquery-3.30.0-py2.py3-none-any.whl", hash = "sha256:f4d28d846a727f20569c9b2d2f4fa703242daadcb2ec4240905aa485ba461877"}, - {file = "google_cloud_bigquery-3.30.0.tar.gz", hash = "sha256:7e27fbafc8ed33cc200fe05af12ecd74d279fe3da6692585a3cef7aee90575b6"}, + {file = "google_cloud_bigquery-3.34.0-py3-none-any.whl", hash = "sha256:de20ded0680f8136d92ff5256270b5920dfe4fae479f5d0f73e90e5df30b1cf7"}, + {file = "google_cloud_bigquery-3.34.0.tar.gz", hash = "sha256:5ee1a78ba5c2ccb9f9a8b2bf3ed76b378ea68f49b6cac0544dc55cc97ff7c1ce"}, ] [package.dependencies] -google-api-core = {version = ">=2.11.1,<3.0.0dev", extras = ["grpc"]} -google-auth = ">=2.14.1,<3.0.0dev" -google-cloud-core = ">=2.4.1,<3.0.0dev" -google-resumable-media = ">=2.0.0,<3.0dev" -packaging = ">=20.0.0" -python-dateutil = ">=2.7.3,<3.0dev" -requests = ">=2.21.0,<3.0.0dev" +google-api-core = {version = ">=2.11.1,<3.0.0", extras = ["grpc"]} +google-auth = ">=2.14.1,<3.0.0" +google-cloud-core = ">=2.4.1,<3.0.0" +google-resumable-media = ">=2.0.0,<3.0.0" +packaging = ">=24.2.0" +python-dateutil = ">=2.8.2,<3.0.0" +requests = ">=2.21.0,<3.0.0" [package.extras] -all = ["google-cloud-bigquery[bigquery-v2,bqstorage,geopandas,ipython,ipywidgets,opentelemetry,pandas,tqdm]"] -bigquery-v2 = ["proto-plus (>=1.22.3,<2.0.0dev)", "protobuf (>=3.20.2,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5,<6.0.0dev)"] -bqstorage = ["google-cloud-bigquery-storage (>=2.6.0,<3.0.0dev)", "grpcio (>=1.47.0,<2.0dev)", "grpcio (>=1.49.1,<2.0dev) ; python_version >= \"3.11\"", "pyarrow (>=3.0.0)"] -geopandas = ["Shapely (>=1.8.4,<3.0.0dev)", "geopandas (>=0.9.0,<2.0dev)"] -ipython = ["bigquery-magics (>=0.1.0)"] -ipywidgets = ["ipykernel (>=6.0.0)", "ipywidgets (>=7.7.0)"] +all = ["google-cloud-bigquery[bigquery-v2,bqstorage,geopandas,ipython,ipywidgets,matplotlib,opentelemetry,pandas,tqdm]"] +bigquery-v2 = ["proto-plus (>=1.22.3,<2.0.0)", "protobuf (>=3.20.2,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5,<7.0.0)"] +bqstorage = ["google-cloud-bigquery-storage (>=2.18.0,<3.0.0)", "grpcio (>=1.47.0,<2.0.0)", "grpcio (>=1.49.1,<2.0.0) ; python_version >= \"3.11\"", "pyarrow (>=4.0.0)"] +geopandas = ["Shapely (>=1.8.4,<3.0.0)", "geopandas (>=0.9.0,<2.0.0)"] +ipython = ["bigquery-magics (>=0.6.0)", "ipython (>=7.23.1)"] +ipywidgets = ["ipykernel (>=6.2.0)", "ipywidgets (>=7.7.1)"] +matplotlib = ["matplotlib (>=3.10.3) ; python_version >= \"3.10\"", "matplotlib (>=3.7.1,<=3.9.2) ; python_version == \"3.9\""] opentelemetry = ["opentelemetry-api (>=1.1.0)", "opentelemetry-instrumentation (>=0.20b0)", "opentelemetry-sdk (>=1.1.0)"] -pandas = ["db-dtypes (>=0.3.0,<2.0.0dev)", "grpcio (>=1.47.0,<2.0dev)", "grpcio (>=1.49.1,<2.0dev) ; python_version >= \"3.11\"", "importlib-metadata (>=1.0.0) ; python_version < \"3.8\"", "pandas (>=1.1.0)", "pandas-gbq (>=0.26.1) ; python_version >= \"3.8\"", "pyarrow (>=3.0.0)"] -tqdm = ["tqdm (>=4.7.4,<5.0.0dev)"] +pandas = ["db-dtypes (>=1.0.4,<2.0.0)", "grpcio (>=1.47.0,<2.0.0)", "grpcio (>=1.49.1,<2.0.0) ; python_version >= \"3.11\"", "pandas (>=1.3.0)", "pandas-gbq (>=0.26.1)", "pyarrow (>=3.0.0)"] +tqdm = ["tqdm (>=4.23.4,<5.0.0)"] [[package]] name = "google-cloud-bigquery-storage" @@ -1334,7 +1394,7 @@ files = [ google-api-core = {version = ">=1.34.0,<2.0.dev0 || >=2.11.dev0,<3.0.0dev", extras = ["grpc"]} proto-plus = [ {version = ">=1.22.2,<2.0.0dev", markers = "python_version >= \"3.11\""}, - {version = ">=1.22.0,<2.0.0dev", markers = "python_version < \"3.11\""}, + {version = ">=1.22.0,<2.0.0dev"}, ] protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.0 || >4.21.0,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<5.0.0dev" @@ -1702,6 +1762,7 @@ files = [ {file = "greenlet-3.2.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a8fa80665b1a29faf76800173ff5325095f3e66a78e62999929809907aca5659"}, {file = "greenlet-3.2.2-cp39-cp39-win32.whl", hash = "sha256:6629311595e3fe7304039c67f00d145cd1d38cf723bb5b99cc987b23c1433d61"}, {file = "greenlet-3.2.2-cp39-cp39-win_amd64.whl", hash = "sha256:eeb27bece45c0c2a5842ac4c5a1b5c2ceaefe5711078eed4e8043159fa05c834"}, + {file = "greenlet-3.2.2.tar.gz", hash = "sha256:ad053d34421a2debba45aa3cc39acf454acbcd025b3fc1a9f8a0dee237abd485"}, ] [package.extras] @@ -2280,20 +2341,20 @@ adal = ["adal (>=1.0.2)"] [[package]] name = "langchain" -version = "0.3.20" +version = "0.3.25" description = "Building applications with LLMs through composability" optional = false -python-versions = "<4.0,>=3.9" +python-versions = ">=3.9" groups = ["main"] files = [ - {file = "langchain-0.3.20-py3-none-any.whl", hash = "sha256:273287f8e61ffdf7e811cf8799e6a71e9381325b8625fd6618900faba79cfdd0"}, - {file = "langchain-0.3.20.tar.gz", hash = "sha256:edcc3241703e1f6557ef5a5c35cd56f9ccc25ff12e38b4829c66d94971737a93"}, + {file = "langchain-0.3.25-py3-none-any.whl", hash = "sha256:931f7d2d1eaf182f9f41c5e3272859cfe7f94fc1f7cef6b3e5a46024b4884c21"}, + {file = "langchain-0.3.25.tar.gz", hash = "sha256:a1d72aa39546a23db08492d7228464af35c9ee83379945535ceef877340d2a3a"}, ] [package.dependencies] async-timeout = {version = ">=4.0.0,<5.0.0", markers = "python_version < \"3.11\""} -langchain-core = ">=0.3.41,<1.0.0" -langchain-text-splitters = ">=0.3.6,<1.0.0" +langchain-core = ">=0.3.58,<1.0.0" +langchain-text-splitters = ">=0.3.8,<1.0.0" langsmith = ">=0.1.17,<0.4" pydantic = ">=2.7.4,<3.0.0" PyYAML = ">=5.3" @@ -2303,6 +2364,7 @@ SQLAlchemy = ">=1.4,<3" [package.extras] anthropic = ["langchain-anthropic"] aws = ["langchain-aws"] +azure-ai = ["langchain-azure-ai"] cohere = ["langchain-cohere"] community = ["langchain-community"] deepseek = ["langchain-deepseek"] @@ -2314,6 +2376,7 @@ huggingface = ["langchain-huggingface"] mistralai = ["langchain-mistralai"] ollama = ["langchain-ollama"] openai = ["langchain-openai"] +perplexity = ["langchain-perplexity"] together = ["langchain-together"] xai = ["langchain-xai"] @@ -2339,47 +2402,48 @@ numpy = [ [[package]] name = "langchain-core" -version = "0.3.59" +version = "0.3.63" description = "Building applications with LLMs through composability" optional = false python-versions = ">=3.9" groups = ["main"] files = [ - {file = "langchain_core-0.3.59-py3-none-any.whl", hash = "sha256:9686baaff43f2c8175535da13faf40e6866769015e93130c3c1e4243e7244d70"}, - {file = "langchain_core-0.3.59.tar.gz", hash = "sha256:052a37cf298c505144f007e5aeede6ecff2dc92c827525d1ef59101eb3a4551c"}, + {file = "langchain_core-0.3.63-py3-none-any.whl", hash = "sha256:f91db8221b1bc6808f70b2e72fded1a94d50ee3f1dff1636fb5a5a514c64b7f5"}, + {file = "langchain_core-0.3.63.tar.gz", hash = "sha256:e2e30cfbb7684a5a0319f6cbf065fc3c438bfd1060302f085a122527890fb01e"}, ] [package.dependencies] jsonpatch = ">=1.33,<2.0" -langsmith = ">=0.1.125,<0.4" +langsmith = ">=0.1.126,<0.4" packaging = ">=23.2,<25" -pydantic = [ - {version = ">=2.5.2,<3.0.0", markers = "python_full_version < \"3.12.4\""}, - {version = ">=2.7.4,<3.0.0", markers = "python_full_version >= \"3.12.4\""}, -] +pydantic = ">=2.7.4" PyYAML = ">=5.3" tenacity = ">=8.1.0,<8.4.0 || >8.4.0,<10.0.0" typing-extensions = ">=4.7" [[package]] name = "langchain-google-vertexai" -version = "2.0.15" +version = "2.0.24" description = "An integration package connecting Google VertexAI and LangChain" optional = false python-versions = "<4.0,>=3.9" groups = ["main"] files = [ - {file = "langchain_google_vertexai-2.0.15-py3-none-any.whl", hash = "sha256:994d6ab6430ce4d13541f4ca363c1f26b5ec53ad7fef29a0e72f98d3335eb64f"}, - {file = "langchain_google_vertexai-2.0.15.tar.gz", hash = "sha256:92ccdb02bab323be6f11a3dfd68de07b0a3160e0019f114d954ce648fab84e98"}, + {file = "langchain_google_vertexai-2.0.24-py3-none-any.whl", hash = "sha256:1fd15f15cf98430cc8967745d1990deafcf73da67427ce180cba601320f00568"}, + {file = "langchain_google_vertexai-2.0.24.tar.gz", hash = "sha256:0bf8d20260193796d51ec9581f85c6672bc0f6b240bebb2cd006046c16f57809"}, ] [package.dependencies] -google-cloud-aiplatform = ">=1.81.0,<2.0.0" +bottleneck = ">=1.4.2,<2.0.0" +google-cloud-aiplatform = ">=1.92.0,<2.0.0" google-cloud-storage = ">=2.18.0,<3.0.0" httpx = ">=0.28.0,<0.29.0" httpx-sse = ">=0.4.0,<0.5.0" -langchain-core = ">=0.3.31,<0.4" +langchain-core = ">=0.3.55,<0.4" +numexpr = ">=2.8.6,<3.0.0" +pyarrow = ">=19.0.1,<20.0.0" pydantic = ">=2.9,<3.0" +validators = ">=0.22.0,<1" [package.extras] anthropic = ["anthropic[vertexai] (>=0.35.0,<1)"] @@ -2387,19 +2451,19 @@ mistral = ["langchain-mistralai (>=0.2.0,<1)"] [[package]] name = "langchain-openai" -version = "0.3.9" +version = "0.3.18" description = "An integration package connecting OpenAI and LangChain" optional = false -python-versions = "<4.0,>=3.9" +python-versions = ">=3.9" groups = ["main"] files = [ - {file = "langchain_openai-0.3.9-py3-none-any.whl", hash = "sha256:1ad95c09a620910c39a8eb826eb146bd96bfbc55e4fca78b1e28ffd5e4f5b261"}, - {file = "langchain_openai-0.3.9.tar.gz", hash = "sha256:a2897d15765a435eff3fed7043235c25ec1e192e6c45a81e9e4fae2951335fb3"}, + {file = "langchain_openai-0.3.18-py3-none-any.whl", hash = "sha256:1687b972a6f6ac125cb8b23c0043278ab3bce031983ef9b32c1277155f88a03e"}, + {file = "langchain_openai-0.3.18.tar.gz", hash = "sha256:8e0769e4042de099a6217bbdccf7cc06b14c462e900424cbfc340c5f46f079ba"}, ] [package.dependencies] -langchain-core = ">=0.3.45,<1.0.0" -openai = ">=1.66.3,<2.0.0" +langchain-core = ">=0.3.61,<1.0.0" +openai = ">=1.68.2,<2.0.0" tiktoken = ">=0.7,<1" [[package]] @@ -2419,71 +2483,73 @@ langchain-core = ">=0.3.51,<1.0.0" [[package]] name = "langgraph" -version = "0.3.11" +version = "0.4.7" description = "Building stateful, multi-actor applications with LLMs" optional = false -python-versions = "<4.0,>=3.9.0" +python-versions = ">=3.9" groups = ["main"] files = [ - {file = "langgraph-0.3.11-py3-none-any.whl", hash = "sha256:b7b5b8a07ed37649d6b928b46462a06911b5bc89b5ee329d53a41ad2f33d56a8"}, - {file = "langgraph-0.3.11.tar.gz", hash = "sha256:bcbc78e7b4b584c858f42362734d2190802a7c75966faf1f603057f296f358bc"}, + {file = "langgraph-0.4.7-py3-none-any.whl", hash = "sha256:a925a3881fcd631eccf076994f41012e9320cd1adacc9aeb89ffcb3442b61f86"}, + {file = "langgraph-0.4.7.tar.gz", hash = "sha256:8948a35f6f85805c8ac36e94d5492c86a34c39dcf6f405b0f84491bc444e3479"}, ] [package.dependencies] -langchain-core = ">=0.1,<0.4" -langgraph-checkpoint = ">=2.0.10,<3.0.0" -langgraph-prebuilt = ">=0.1.1,<0.2" -langgraph-sdk = ">=0.1.42,<0.2.0" +langchain-core = ">=0.1" +langgraph-checkpoint = ">=2.0.26" +langgraph-prebuilt = ">=0.2.0" +langgraph-sdk = ">=0.1.42" +pydantic = ">=2.7.4" +xxhash = ">=3.5.0" [[package]] name = "langgraph-checkpoint" -version = "2.0.25" +version = "2.0.26" description = "Library with base interfaces for LangGraph checkpoint savers." optional = false -python-versions = "<4.0.0,>=3.9.0" +python-versions = ">=3.9" groups = ["main"] files = [ - {file = "langgraph_checkpoint-2.0.25-py3-none-any.whl", hash = "sha256:23416a0f5bc9dd712ac10918fc13e8c9c4530c419d2985a441df71a38fc81602"}, - {file = "langgraph_checkpoint-2.0.25.tar.gz", hash = "sha256:77a63cab7b5f84dec1d49db561326ec28bdd48bcefb7fe4ac372069d2609287b"}, + {file = "langgraph_checkpoint-2.0.26-py3-none-any.whl", hash = "sha256:ad4907858ed320a208e14ac037e4b9244ec1cb5aa54570518166ae8b25752cec"}, + {file = "langgraph_checkpoint-2.0.26.tar.gz", hash = "sha256:2b800195532d5efb079db9754f037281225ae175f7a395523f4bf41223cbc9d6"}, ] [package.dependencies] -langchain-core = ">=0.2.38,<0.4" +langchain-core = {version = ">=0.2.38", markers = "python_version < \"4.0\""} ormsgpack = ">=1.8.0,<2.0.0" [[package]] name = "langgraph-checkpoint-postgres" -version = "2.0.17" +version = "2.0.21" description = "Library with a Postgres implementation of LangGraph checkpoint saver." optional = false python-versions = "<4.0.0,>=3.9.0" groups = ["main"] files = [ - {file = "langgraph_checkpoint_postgres-2.0.17-py3-none-any.whl", hash = "sha256:b5b8a385b129d0395f19cb01cbe050c585eca951e995a984285d39fb8dab3e65"}, - {file = "langgraph_checkpoint_postgres-2.0.17.tar.gz", hash = "sha256:ab88e8c003833c68e1d5a2f9623535697fcd536507e2423d1ec1b27393ad41d6"}, + {file = "langgraph_checkpoint_postgres-2.0.21-py3-none-any.whl", hash = "sha256:f0a50f2c1496778e00ea888415521bb2b7789a12052aa5ae54d82cf517b271e8"}, + {file = "langgraph_checkpoint_postgres-2.0.21.tar.gz", hash = "sha256:921915fd3de534b4c84469f93d03046c1ef1f224e44629212b172ec3e9b72ded"}, ] [package.dependencies] -langgraph-checkpoint = ">=2.0.15,<3.0.0" +langgraph-checkpoint = ">=2.0.21,<3.0.0" orjson = ">=3.10.1" psycopg = ">=3.2.0,<4.0.0" psycopg-pool = ">=3.2.0,<4.0.0" [[package]] name = "langgraph-prebuilt" -version = "0.1.8" +version = "0.2.2" description = "Library with high-level APIs for creating and executing LangGraph agents and tools." optional = false -python-versions = "<4.0.0,>=3.9.0" +python-versions = ">=3.9" groups = ["main"] files = [ - {file = "langgraph_prebuilt-0.1.8-py3-none-any.whl", hash = "sha256:ae97b828ae00be2cefec503423aa782e1bff165e9b94592e224da132f2526968"}, - {file = "langgraph_prebuilt-0.1.8.tar.gz", hash = "sha256:4de7659151829b2b955b6798df6800e580e617782c15c2c5b29b139697491831"}, + {file = "langgraph_prebuilt-0.2.2-py3-none-any.whl", hash = "sha256:72de5ef1d969a8f02ad7adc7cc1915bb9b4467912d57ba60da34b5a70fdad1f6"}, + {file = "langgraph_prebuilt-0.2.2.tar.gz", hash = "sha256:0a5d1f651f97c848cd1c3dd0ef017614f47ee74effb7375b59ac639e41b253f9"}, ] [package.dependencies] -langchain-core = ">=0.2.43,<0.3.0 || >0.3.0,<0.3.1 || >0.3.1,<0.3.2 || >0.3.2,<0.3.3 || >0.3.3,<0.3.4 || >0.3.4,<0.3.5 || >0.3.5,<0.3.6 || >0.3.6,<0.3.7 || >0.3.7,<0.3.8 || >0.3.8,<0.3.9 || >0.3.9,<0.3.10 || >0.3.10,<0.3.11 || >0.3.11,<0.3.12 || >0.3.12,<0.3.13 || >0.3.13,<0.3.14 || >0.3.14,<0.3.15 || >0.3.15,<0.3.16 || >0.3.16,<0.3.17 || >0.3.17,<0.3.18 || >0.3.18,<0.3.19 || >0.3.19,<0.3.20 || >0.3.20,<0.3.21 || >0.3.21,<0.3.22 || >0.3.22,<0.4.0" -langgraph-checkpoint = ">=2.0.10,<3.0.0" +langchain-core = ">=0.3.22" +langgraph-checkpoint = ">=2.0.10" [[package]] name = "langgraph-sdk" @@ -2533,14 +2599,14 @@ pytest = ["pytest (>=7.0.0)", "rich (>=13.9.4,<14.0.0)"] [[package]] name = "loguru" -version = "0.7.2" +version = "0.7.3" description = "Python logging made (stupidly) simple" optional = false -python-versions = ">=3.5" +python-versions = "<4.0,>=3.5" groups = ["main"] files = [ - {file = "loguru-0.7.2-py3-none-any.whl", hash = "sha256:003d71e3d3ed35f0f8984898359d65b79e5b21943f78af86aa5491210429b8eb"}, - {file = "loguru-0.7.2.tar.gz", hash = "sha256:e671a53522515f34fd406340ee968cb9ecafbc4b36c679da03c18fd8d0bd51ac"}, + {file = "loguru-0.7.3-py3-none-any.whl", hash = "sha256:31a33c10c8e1e10422bfd431aeb5d351c7cf7fa671e3c4df004162264b28220c"}, + {file = "loguru-0.7.3.tar.gz", hash = "sha256:19480589e77d47b8d85b2c827ad95d49bf31b0dcde16593892eb51dd18706eb6"}, ] [package.dependencies] @@ -2548,7 +2614,7 @@ colorama = {version = ">=0.3.4", markers = "sys_platform == \"win32\""} win32-setctime = {version = ">=1.0.0", markers = "sys_platform == \"win32\""} [package.extras] -dev = ["Sphinx (==7.2.5) ; python_version >= \"3.9\"", "colorama (==0.4.5) ; python_version < \"3.8\"", "colorama (==0.4.6) ; python_version >= \"3.8\"", "exceptiongroup (==1.1.3) ; python_version >= \"3.7\" and python_version < \"3.11\"", "freezegun (==1.1.0) ; python_version < \"3.8\"", "freezegun (==1.2.2) ; python_version >= \"3.8\"", "mypy (==v0.910) ; python_version < \"3.6\"", "mypy (==v0.971) ; python_version == \"3.6\"", "mypy (==v1.4.1) ; python_version == \"3.7\"", "mypy (==v1.5.1) ; python_version >= \"3.8\"", "pre-commit (==3.4.0) ; python_version >= \"3.8\"", "pytest (==6.1.2) ; python_version < \"3.8\"", "pytest (==7.4.0) ; python_version >= \"3.8\"", "pytest-cov (==2.12.1) ; python_version < \"3.8\"", "pytest-cov (==4.1.0) ; python_version >= \"3.8\"", "pytest-mypy-plugins (==1.9.3) ; python_version >= \"3.6\" and python_version < \"3.8\"", "pytest-mypy-plugins (==3.0.0) ; python_version >= \"3.8\"", "sphinx-autobuild (==2021.3.14) ; python_version >= \"3.9\"", "sphinx-rtd-theme (==1.3.0) ; python_version >= \"3.9\"", "tox (==3.27.1) ; python_version < \"3.8\"", "tox (==4.11.0) ; python_version >= \"3.8\""] +dev = ["Sphinx (==8.1.3) ; python_version >= \"3.11\"", "build (==1.2.2) ; python_version >= \"3.11\"", "colorama (==0.4.5) ; python_version < \"3.8\"", "colorama (==0.4.6) ; python_version >= \"3.8\"", "exceptiongroup (==1.1.3) ; python_version >= \"3.7\" and python_version < \"3.11\"", "freezegun (==1.1.0) ; python_version < \"3.8\"", "freezegun (==1.5.0) ; python_version >= \"3.8\"", "mypy (==v0.910) ; python_version < \"3.6\"", "mypy (==v0.971) ; python_version == \"3.6\"", "mypy (==v1.13.0) ; python_version >= \"3.8\"", "mypy (==v1.4.1) ; python_version == \"3.7\"", "myst-parser (==4.0.0) ; python_version >= \"3.11\"", "pre-commit (==4.0.1) ; python_version >= \"3.9\"", "pytest (==6.1.2) ; python_version < \"3.8\"", "pytest (==8.3.2) ; python_version >= \"3.8\"", "pytest-cov (==2.12.1) ; python_version < \"3.8\"", "pytest-cov (==5.0.0) ; python_version == \"3.8\"", "pytest-cov (==6.0.0) ; python_version >= \"3.9\"", "pytest-mypy-plugins (==1.9.3) ; python_version >= \"3.6\" and python_version < \"3.8\"", "pytest-mypy-plugins (==3.1.0) ; python_version >= \"3.8\"", "sphinx-rtd-theme (==3.0.2) ; python_version >= \"3.11\"", "tox (==3.27.1) ; python_version < \"3.8\"", "tox (==4.23.2) ; python_version >= \"3.8\"", "twine (==6.0.1) ; python_version >= \"3.11\""] [[package]] name = "maison" @@ -2736,6 +2802,55 @@ files = [ [package.dependencies] setuptools = "*" +[[package]] +name = "numexpr" +version = "2.10.2" +description = "Fast numerical expression evaluator for NumPy" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "numexpr-2.10.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b5b0e82d2109c1d9e63fcd5ea177d80a11b881157ab61178ddbdebd4c561ea46"}, + {file = "numexpr-2.10.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:3fc2b8035a0c2cdc352e58c3875cb668836018065cbf5752cb531015d9a568d8"}, + {file = "numexpr-2.10.2-cp310-cp310-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0db5ff5183935d1612653559c319922143e8fa3019007696571b13135f216458"}, + {file = "numexpr-2.10.2-cp310-cp310-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:15f59655458056fdb3a621b1bb8e071581ccf7e823916c7568bb7c9a3e393025"}, + {file = "numexpr-2.10.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:ce8cccf944339051e44a49a124a06287fe3066d0acbff33d1aa5aee10a96abb7"}, + {file = "numexpr-2.10.2-cp310-cp310-win32.whl", hash = "sha256:ba85371c9a8d03e115f4dfb6d25dfbce05387002b9bc85016af939a1da9624f0"}, + {file = "numexpr-2.10.2-cp310-cp310-win_amd64.whl", hash = "sha256:deb64235af9eeba59fcefa67e82fa80cfc0662e1b0aa373b7118a28da124d51d"}, + {file = "numexpr-2.10.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6b360eb8d392483410fe6a3d5a7144afa298c9a0aa3e9fe193e89590b47dd477"}, + {file = "numexpr-2.10.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d9a42f5c24880350d88933c4efee91b857c378aaea7e8b86221fff569069841e"}, + {file = "numexpr-2.10.2-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:83fcb11988b57cc25b028a36d285287d706d1f536ebf2662ea30bd990e0de8b9"}, + {file = "numexpr-2.10.2-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4213a92efa9770bc28e3792134e27c7e5c7e97068bdfb8ba395baebbd12f991b"}, + {file = "numexpr-2.10.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:ebdbef5763ca057eea0c2b5698e4439d084a0505d9d6e94f4804f26e8890c45e"}, + {file = "numexpr-2.10.2-cp311-cp311-win32.whl", hash = "sha256:3bf01ec502d89944e49e9c1b5cc7c7085be8ca2eb9dd46a0eafd218afbdbd5f5"}, + {file = "numexpr-2.10.2-cp311-cp311-win_amd64.whl", hash = "sha256:e2d0ae24b0728e4bc3f1d3f33310340d67321d36d6043f7ce26897f4f1042db0"}, + {file = "numexpr-2.10.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:b5323a46e75832334f1af86da1ef6ff0add00fbacdd266250be872b438bdf2be"}, + {file = "numexpr-2.10.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a42963bd4c62d8afa4f51e7974debfa39a048383f653544ab54f50a2f7ec6c42"}, + {file = "numexpr-2.10.2-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5191ba8f2975cb9703afc04ae845a929e193498c0e8bcd408ecb147b35978470"}, + {file = "numexpr-2.10.2-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:97298b14f0105a794bea06fd9fbc5c423bd3ff4d88cbc618860b83eb7a436ad6"}, + {file = "numexpr-2.10.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:f9d7805ccb6be2d3b0f7f6fad3707a09ac537811e8e9964f4074d28cb35543db"}, + {file = "numexpr-2.10.2-cp312-cp312-win32.whl", hash = "sha256:cb845b2d4f9f8ef0eb1c9884f2b64780a85d3b5ae4eeb26ae2b0019f489cd35e"}, + {file = "numexpr-2.10.2-cp312-cp312-win_amd64.whl", hash = "sha256:57b59cbb5dcce4edf09cd6ce0b57ff60312479930099ca8d944c2fac896a1ead"}, + {file = "numexpr-2.10.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:a37d6a51ec328c561b2ca8a2bef07025642eca995b8553a5267d0018c732976d"}, + {file = "numexpr-2.10.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:81d1dde7dd6166d8ff5727bb46ab42a6b0048db0e97ceb84a121334a404a800f"}, + {file = "numexpr-2.10.2-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5b3f814437d5a10797f8d89d2037cca2c9d9fa578520fc911f894edafed6ea3e"}, + {file = "numexpr-2.10.2-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9309f2e43fe6e4560699ef5c27d7a848b3ff38549b6b57194207cf0e88900527"}, + {file = "numexpr-2.10.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:ebb73b93f5c4d6994f357fa5a47a9f7a5485577e633b3c46a603cb01445bbb19"}, + {file = "numexpr-2.10.2-cp313-cp313-win32.whl", hash = "sha256:ec04c9a3c050c175348801e27c18c68d28673b7bfb865ef88ce333be523bbc01"}, + {file = "numexpr-2.10.2-cp313-cp313-win_amd64.whl", hash = "sha256:d7a3fc83c959288544db3adc70612475d8ad53a66c69198105c74036182d10dd"}, + {file = "numexpr-2.10.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:0495f8111c3633e265248709b8b3b521bbfa646ba384909edd10e2b9a588a83a"}, + {file = "numexpr-2.10.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2aa05ac71bee3b1253e73173c4d7fa96a09a18970c0226f1c2c07a71ffe988dc"}, + {file = "numexpr-2.10.2-cp39-cp39-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c3a23c3002ab330056fbdd2785871937a6f2f2fa85d06c8d0ff74ea8418119d1"}, + {file = "numexpr-2.10.2-cp39-cp39-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a018a7d81326f4c73d8b5aee61794d7d8514512f43957c0db61eb2a8a86848c7"}, + {file = "numexpr-2.10.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:037859b17a0abe2b489d4c2cfdadd2bf458ec80dd83f338ea5544c7987e06b85"}, + {file = "numexpr-2.10.2-cp39-cp39-win32.whl", hash = "sha256:eb278ccda6f893a312aa0452701bb17d098b7b14eb7c9381517d509cce0a39a3"}, + {file = "numexpr-2.10.2-cp39-cp39-win_amd64.whl", hash = "sha256:734b64c6d6a597601ce9d0ef7b666e678ec015b446f1d1412c23903c021436c3"}, + {file = "numexpr-2.10.2.tar.gz", hash = "sha256:b0aff6b48ebc99d2f54f27b5f73a58cb92fde650aeff1b397c71c8788b4fff1a"}, +] + +[package.dependencies] +numpy = ">=1.23.0" + [[package]] name = "numpy" version = "1.26.4" @@ -3182,14 +3297,14 @@ files = [ [[package]] name = "packaging" -version = "23.2" +version = "24.2" description = "Core utilities for Python packages" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" groups = ["main", "test"] files = [ - {file = "packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7"}, - {file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"}, + {file = "packaging-24.2-py3-none-any.whl", hash = "sha256:09abb1bccd265c01f4a3aa3f7a7db064b36514d2cba19a2f694fe6150451a759"}, + {file = "packaging-24.2.tar.gz", hash = "sha256:c228a6dc5e932d346bc5739379109d49e8853dd8223571c7c5b55260edc0b97f"}, ] [[package]] @@ -3234,7 +3349,6 @@ files = [ [package.dependencies] numpy = [ {version = ">=1.23.2,<2", markers = "python_version == \"3.11\""}, - {version = ">=1.22.4,<2", markers = "python_version < \"3.11\""}, {version = ">=1.26.0,<2", markers = "python_version >= \"3.12\""}, {version = ">=1.22.4,<2", markers = "python_version < \"3.11\""}, ] @@ -3508,14 +3622,14 @@ files = [ [[package]] name = "psycopg" -version = "3.2.6" +version = "3.2.9" description = "PostgreSQL database adapter for Python" optional = false python-versions = ">=3.8" groups = ["main"] files = [ - {file = "psycopg-3.2.6-py3-none-any.whl", hash = "sha256:f3ff5488525890abb0566c429146add66b329e20d6d4835662b920cbbf90ac58"}, - {file = "psycopg-3.2.6.tar.gz", hash = "sha256:16fa094efa2698f260f2af74f3710f781e4a6f226efe9d1fd0c37f384639ed8a"}, + {file = "psycopg-3.2.9-py3-none-any.whl", hash = "sha256:01a8dadccdaac2123c916208c96e06631641c0566b22005493f09663c7a8d3b6"}, + {file = "psycopg-3.2.9.tar.gz", hash = "sha256:2fbb46fcd17bc81f993f28c47f1ebea38d66ae97cc2dbc3cad73b37cefbff700"}, ] [package.dependencies] @@ -3523,9 +3637,9 @@ typing-extensions = {version = ">=4.6", markers = "python_version < \"3.13\""} tzdata = {version = "*", markers = "sys_platform == \"win32\""} [package.extras] -binary = ["psycopg-binary (==3.2.6) ; implementation_name != \"pypy\""] -c = ["psycopg-c (==3.2.6) ; implementation_name != \"pypy\""] -dev = ["ast-comments (>=1.1.2)", "black (>=24.1.0)", "codespell (>=2.2)", "dnspython (>=2.1)", "flake8 (>=4.0)", "isort-psycopg", "isort[colors] (>=6.0)", "mypy (>=1.14)", "pre-commit (>=4.0.1)", "types-setuptools (>=57.4)", "wheel (>=0.37)"] +binary = ["psycopg-binary (==3.2.9) ; implementation_name != \"pypy\""] +c = ["psycopg-c (==3.2.9) ; implementation_name != \"pypy\""] +dev = ["ast-comments (>=1.1.2)", "black (>=24.1.0)", "codespell (>=2.2)", "dnspython (>=2.1)", "flake8 (>=4.0)", "isort-psycopg", "isort[colors] (>=6.0)", "mypy (>=1.14)", "pre-commit (>=4.0.1)", "types-setuptools (>=57.4)", "types-shapely (>=2.0)", "wheel (>=0.37)"] docs = ["Sphinx (>=5.0)", "furo (==2022.6.21)", "sphinx-autobuild (>=2021.3.14)", "sphinx-autodoc-typehints (>=1.12)"] pool = ["psycopg-pool"] test = ["anyio (>=4.0)", "mypy (>=1.14)", "pproxy (>=2.7)", "pytest (>=6.2.5)", "pytest-cov (>=3.0)", "pytest-randomly (>=3.5)"] @@ -3629,52 +3743,58 @@ files = [ [[package]] name = "pyarrow" -version = "15.0.0" +version = "19.0.1" description = "Python library for Apache Arrow" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" groups = ["main"] files = [ - {file = "pyarrow-15.0.0-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:0a524532fd6dd482edaa563b686d754c70417c2f72742a8c990b322d4c03a15d"}, - {file = "pyarrow-15.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:60a6bdb314affa9c2e0d5dddf3d9cbb9ef4a8dddaa68669975287d47ece67642"}, - {file = "pyarrow-15.0.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:66958fd1771a4d4b754cd385835e66a3ef6b12611e001d4e5edfcef5f30391e2"}, - {file = "pyarrow-15.0.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1f500956a49aadd907eaa21d4fff75f73954605eaa41f61cb94fb008cf2e00c6"}, - {file = "pyarrow-15.0.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:6f87d9c4f09e049c2cade559643424da84c43a35068f2a1c4653dc5b1408a929"}, - {file = "pyarrow-15.0.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:85239b9f93278e130d86c0e6bb455dcb66fc3fd891398b9d45ace8799a871a1e"}, - {file = "pyarrow-15.0.0-cp310-cp310-win_amd64.whl", hash = "sha256:5b8d43e31ca16aa6e12402fcb1e14352d0d809de70edd185c7650fe80e0769e3"}, - {file = "pyarrow-15.0.0-cp311-cp311-macosx_10_15_x86_64.whl", hash = "sha256:fa7cd198280dbd0c988df525e50e35b5d16873e2cdae2aaaa6363cdb64e3eec5"}, - {file = "pyarrow-15.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8780b1a29d3c8b21ba6b191305a2a607de2e30dab399776ff0aa09131e266340"}, - {file = "pyarrow-15.0.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fe0ec198ccc680f6c92723fadcb97b74f07c45ff3fdec9dd765deb04955ccf19"}, - {file = "pyarrow-15.0.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:036a7209c235588c2f07477fe75c07e6caced9b7b61bb897c8d4e52c4b5f9555"}, - {file = "pyarrow-15.0.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:2bd8a0e5296797faf9a3294e9fa2dc67aa7f10ae2207920dbebb785c77e9dbe5"}, - {file = "pyarrow-15.0.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:e8ebed6053dbe76883a822d4e8da36860f479d55a762bd9e70d8494aed87113e"}, - {file = "pyarrow-15.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:17d53a9d1b2b5bd7d5e4cd84d018e2a45bc9baaa68f7e6e3ebed45649900ba99"}, - {file = "pyarrow-15.0.0-cp312-cp312-macosx_10_15_x86_64.whl", hash = "sha256:9950a9c9df24090d3d558b43b97753b8f5867fb8e521f29876aa021c52fda351"}, - {file = "pyarrow-15.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:003d680b5e422d0204e7287bb3fa775b332b3fce2996aa69e9adea23f5c8f970"}, - {file = "pyarrow-15.0.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f75fce89dad10c95f4bf590b765e3ae98bcc5ba9f6ce75adb828a334e26a3d40"}, - {file = "pyarrow-15.0.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0ca9cb0039923bec49b4fe23803807e4ef39576a2bec59c32b11296464623dc2"}, - {file = "pyarrow-15.0.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:9ed5a78ed29d171d0acc26a305a4b7f83c122d54ff5270810ac23c75813585e4"}, - {file = "pyarrow-15.0.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:6eda9e117f0402dfcd3cd6ec9bfee89ac5071c48fc83a84f3075b60efa96747f"}, - {file = "pyarrow-15.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:9a3a6180c0e8f2727e6f1b1c87c72d3254cac909e609f35f22532e4115461177"}, - {file = "pyarrow-15.0.0-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:19a8918045993349b207de72d4576af0191beef03ea655d8bdb13762f0cd6eac"}, - {file = "pyarrow-15.0.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:d0ec076b32bacb6666e8813a22e6e5a7ef1314c8069d4ff345efa6246bc38593"}, - {file = "pyarrow-15.0.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5db1769e5d0a77eb92344c7382d6543bea1164cca3704f84aa44e26c67e320fb"}, - {file = "pyarrow-15.0.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e2617e3bf9df2a00020dd1c1c6dce5cc343d979efe10bc401c0632b0eef6ef5b"}, - {file = "pyarrow-15.0.0-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:d31c1d45060180131caf10f0f698e3a782db333a422038bf7fe01dace18b3a31"}, - {file = "pyarrow-15.0.0-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:c8c287d1d479de8269398b34282e206844abb3208224dbdd7166d580804674b7"}, - {file = "pyarrow-15.0.0-cp38-cp38-win_amd64.whl", hash = "sha256:07eb7f07dc9ecbb8dace0f58f009d3a29ee58682fcdc91337dfeb51ea618a75b"}, - {file = "pyarrow-15.0.0-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:47af7036f64fce990bb8a5948c04722e4e3ea3e13b1007ef52dfe0aa8f23cf7f"}, - {file = "pyarrow-15.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:93768ccfff85cf044c418bfeeafce9a8bb0cee091bd8fd19011aff91e58de540"}, - {file = "pyarrow-15.0.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f6ee87fd6892700960d90abb7b17a72a5abb3b64ee0fe8db6c782bcc2d0dc0b4"}, - {file = "pyarrow-15.0.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:001fca027738c5f6be0b7a3159cc7ba16a5c52486db18160909a0831b063c4e4"}, - {file = "pyarrow-15.0.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:d1c48648f64aec09accf44140dccb92f4f94394b8d79976c426a5b79b11d4fa7"}, - {file = "pyarrow-15.0.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:972a0141be402bb18e3201448c8ae62958c9c7923dfaa3b3d4530c835ac81aed"}, - {file = "pyarrow-15.0.0-cp39-cp39-win_amd64.whl", hash = "sha256:f01fc5cf49081426429127aa2d427d9d98e1cb94a32cb961d583a70b7c4504e6"}, - {file = "pyarrow-15.0.0.tar.gz", hash = "sha256:876858f549d540898f927eba4ef77cd549ad8d24baa3207cf1b72e5788b50e83"}, + {file = "pyarrow-19.0.1-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:fc28912a2dc924dddc2087679cc8b7263accc71b9ff025a1362b004711661a69"}, + {file = "pyarrow-19.0.1-cp310-cp310-macosx_12_0_x86_64.whl", hash = "sha256:fca15aabbe9b8355800d923cc2e82c8ef514af321e18b437c3d782aa884eaeec"}, + {file = "pyarrow-19.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ad76aef7f5f7e4a757fddcdcf010a8290958f09e3470ea458c80d26f4316ae89"}, + {file = "pyarrow-19.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d03c9d6f2a3dffbd62671ca070f13fc527bb1867b4ec2b98c7eeed381d4f389a"}, + {file = "pyarrow-19.0.1-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:65cf9feebab489b19cdfcfe4aa82f62147218558d8d3f0fc1e9dea0ab8e7905a"}, + {file = "pyarrow-19.0.1-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:41f9706fbe505e0abc10e84bf3a906a1338905cbbcf1177b71486b03e6ea6608"}, + {file = "pyarrow-19.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:c6cb2335a411b713fdf1e82a752162f72d4a7b5dbc588e32aa18383318b05866"}, + {file = "pyarrow-19.0.1-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:cc55d71898ea30dc95900297d191377caba257612f384207fe9f8293b5850f90"}, + {file = "pyarrow-19.0.1-cp311-cp311-macosx_12_0_x86_64.whl", hash = "sha256:7a544ec12de66769612b2d6988c36adc96fb9767ecc8ee0a4d270b10b1c51e00"}, + {file = "pyarrow-19.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0148bb4fc158bfbc3d6dfe5001d93ebeed253793fff4435167f6ce1dc4bddeae"}, + {file = "pyarrow-19.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f24faab6ed18f216a37870d8c5623f9c044566d75ec586ef884e13a02a9d62c5"}, + {file = "pyarrow-19.0.1-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:4982f8e2b7afd6dae8608d70ba5bd91699077323f812a0448d8b7abdff6cb5d3"}, + {file = "pyarrow-19.0.1-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:49a3aecb62c1be1d822f8bf629226d4a96418228a42f5b40835c1f10d42e4db6"}, + {file = "pyarrow-19.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:008a4009efdb4ea3d2e18f05cd31f9d43c388aad29c636112c2966605ba33466"}, + {file = "pyarrow-19.0.1-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:80b2ad2b193e7d19e81008a96e313fbd53157945c7be9ac65f44f8937a55427b"}, + {file = "pyarrow-19.0.1-cp312-cp312-macosx_12_0_x86_64.whl", hash = "sha256:ee8dec072569f43835932a3b10c55973593abc00936c202707a4ad06af7cb294"}, + {file = "pyarrow-19.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4d5d1ec7ec5324b98887bdc006f4d2ce534e10e60f7ad995e7875ffa0ff9cb14"}, + {file = "pyarrow-19.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f3ad4c0eb4e2a9aeb990af6c09e6fa0b195c8c0e7b272ecc8d4d2b6574809d34"}, + {file = "pyarrow-19.0.1-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:d383591f3dcbe545f6cc62daaef9c7cdfe0dff0fb9e1c8121101cabe9098cfa6"}, + {file = "pyarrow-19.0.1-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:b4c4156a625f1e35d6c0b2132635a237708944eb41df5fbe7d50f20d20c17832"}, + {file = "pyarrow-19.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:5bd1618ae5e5476b7654c7b55a6364ae87686d4724538c24185bbb2952679960"}, + {file = "pyarrow-19.0.1-cp313-cp313-macosx_12_0_arm64.whl", hash = "sha256:e45274b20e524ae5c39d7fc1ca2aa923aab494776d2d4b316b49ec7572ca324c"}, + {file = "pyarrow-19.0.1-cp313-cp313-macosx_12_0_x86_64.whl", hash = "sha256:d9dedeaf19097a143ed6da37f04f4051aba353c95ef507764d344229b2b740ae"}, + {file = "pyarrow-19.0.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ebfb5171bb5f4a52319344ebbbecc731af3f021e49318c74f33d520d31ae0c4"}, + {file = "pyarrow-19.0.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f2a21d39fbdb948857f67eacb5bbaaf36802de044ec36fbef7a1c8f0dd3a4ab2"}, + {file = "pyarrow-19.0.1-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:99bc1bec6d234359743b01e70d4310d0ab240c3d6b0da7e2a93663b0158616f6"}, + {file = "pyarrow-19.0.1-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:1b93ef2c93e77c442c979b0d596af45e4665d8b96da598db145b0fec014b9136"}, + {file = "pyarrow-19.0.1-cp313-cp313-win_amd64.whl", hash = "sha256:d9d46e06846a41ba906ab25302cf0fd522f81aa2a85a71021826f34639ad31ef"}, + {file = "pyarrow-19.0.1-cp313-cp313t-macosx_12_0_arm64.whl", hash = "sha256:c0fe3dbbf054a00d1f162fda94ce236a899ca01123a798c561ba307ca38af5f0"}, + {file = "pyarrow-19.0.1-cp313-cp313t-macosx_12_0_x86_64.whl", hash = "sha256:96606c3ba57944d128e8a8399da4812f56c7f61de8c647e3470b417f795d0ef9"}, + {file = "pyarrow-19.0.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f04d49a6b64cf24719c080b3c2029a3a5b16417fd5fd7c4041f94233af732f3"}, + {file = "pyarrow-19.0.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5a9137cf7e1640dce4c190551ee69d478f7121b5c6f323553b319cac936395f6"}, + {file = "pyarrow-19.0.1-cp313-cp313t-manylinux_2_28_aarch64.whl", hash = "sha256:7c1bca1897c28013db5e4c83944a2ab53231f541b9e0c3f4791206d0c0de389a"}, + {file = "pyarrow-19.0.1-cp313-cp313t-manylinux_2_28_x86_64.whl", hash = "sha256:58d9397b2e273ef76264b45531e9d552d8ec8a6688b7390b5be44c02a37aade8"}, + {file = "pyarrow-19.0.1-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:b9766a47a9cb56fefe95cb27f535038b5a195707a08bf61b180e642324963b46"}, + {file = "pyarrow-19.0.1-cp39-cp39-macosx_12_0_x86_64.whl", hash = "sha256:6c5941c1aac89a6c2f2b16cd64fe76bcdb94b2b1e99ca6459de4e6f07638d755"}, + {file = "pyarrow-19.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fd44d66093a239358d07c42a91eebf5015aa54fccba959db899f932218ac9cc8"}, + {file = "pyarrow-19.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:335d170e050bcc7da867a1ed8ffb8b44c57aaa6e0843b156a501298657b1e972"}, + {file = "pyarrow-19.0.1-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:1c7556165bd38cf0cd992df2636f8bcdd2d4b26916c6b7e646101aff3c16f76f"}, + {file = "pyarrow-19.0.1-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:699799f9c80bebcf1da0983ba86d7f289c5a2a5c04b945e2f2bcf7e874a91911"}, + {file = "pyarrow-19.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:8464c9fbe6d94a7fe1599e7e8965f350fd233532868232ab2596a71586c5a429"}, + {file = "pyarrow-19.0.1.tar.gz", hash = "sha256:3bf266b485df66a400f282ac0b6d1b500b9d2ae73314a153dbe97d6d5cc8a99e"}, ] -[package.dependencies] -numpy = ">=1.16.6,<2" +[package.extras] +test = ["cffi", "hypothesis", "pandas", "pytest", "pytz"] [[package]] name = "pyasn1" @@ -4949,7 +5069,7 @@ description = "Fast implementation of asyncio event loop on top of libuv" optional = false python-versions = ">=3.8.0" groups = ["main"] -markers = "platform_python_implementation != \"PyPy\" and sys_platform != \"win32\" and sys_platform != \"cygwin\"" +markers = "sys_platform != \"win32\" and sys_platform != \"cygwin\" and platform_python_implementation != \"PyPy\"" files = [ {file = "uvloop-0.21.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:ec7e6b09a6fdded42403182ab6b832b71f4edaf7f37a9a0e371a01db5f0cb45f"}, {file = "uvloop-0.21.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:196274f2adb9689a289ad7d65700d37df0c0930fd8e4e743fa4834e850d7719d"}, @@ -4995,6 +5115,21 @@ dev = ["Cython (>=3.0,<4.0)", "setuptools (>=60)"] docs = ["Sphinx (>=4.1.2,<4.2.0)", "sphinx-rtd-theme (>=0.5.2,<0.6.0)", "sphinxcontrib-asyncio (>=0.3.0,<0.4.0)"] test = ["aiohttp (>=3.10.5)", "flake8 (>=5.0,<6.0)", "mypy (>=0.800)", "psutil", "pyOpenSSL (>=23.0.0,<23.1.0)", "pycodestyle (>=2.9.0,<2.10.0)"] +[[package]] +name = "validators" +version = "0.35.0" +description = "Python Data Validation for Humans™" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "validators-0.35.0-py3-none-any.whl", hash = "sha256:e8c947097eae7892cb3d26868d637f79f47b4a0554bc6b80065dfe5aac3705dd"}, + {file = "validators-0.35.0.tar.gz", hash = "sha256:992d6c48a4e77c81f1b4daba10d16c3a9bb0dbb79b3a19ea847ff0928e70497a"}, +] + +[package.extras] +crypto-eth-addresses = ["eth-hash[pycryptodome] (>=0.7.0)"] + [[package]] name = "virtualenv" version = "20.25.0" @@ -5301,6 +5436,139 @@ files = [ {file = "wrapt-1.17.2.tar.gz", hash = "sha256:41388e9d4d1522446fe79d3213196bd9e3b301a336965b9e27ca2788ebd122f3"}, ] +[[package]] +name = "xxhash" +version = "3.5.0" +description = "Python binding for xxHash" +optional = false +python-versions = ">=3.7" +groups = ["main"] +files = [ + {file = "xxhash-3.5.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ece616532c499ee9afbb83078b1b952beffef121d989841f7f4b3dc5ac0fd212"}, + {file = "xxhash-3.5.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:3171f693dbc2cef6477054a665dc255d996646b4023fe56cb4db80e26f4cc520"}, + {file = "xxhash-3.5.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7c5d3e570ef46adaf93fc81b44aca6002b5a4d8ca11bd0580c07eac537f36680"}, + {file = "xxhash-3.5.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7cb29a034301e2982df8b1fe6328a84f4b676106a13e9135a0d7e0c3e9f806da"}, + {file = "xxhash-3.5.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5d0d307d27099bb0cbeea7260eb39ed4fdb99c5542e21e94bb6fd29e49c57a23"}, + {file = "xxhash-3.5.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0342aafd421795d740e514bc9858ebddfc705a75a8c5046ac56d85fe97bf196"}, + {file = "xxhash-3.5.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3dbbd9892c5ebffeca1ed620cf0ade13eb55a0d8c84e0751a6653adc6ac40d0c"}, + {file = "xxhash-3.5.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:4cc2d67fdb4d057730c75a64c5923abfa17775ae234a71b0200346bfb0a7f482"}, + {file = "xxhash-3.5.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:ec28adb204b759306a3d64358a5e5c07d7b1dd0ccbce04aa76cb9377b7b70296"}, + {file = "xxhash-3.5.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:1328f6d8cca2b86acb14104e381225a3d7b42c92c4b86ceae814e5c400dbb415"}, + {file = "xxhash-3.5.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:8d47ebd9f5d9607fd039c1fbf4994e3b071ea23eff42f4ecef246ab2b7334198"}, + {file = "xxhash-3.5.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:b96d559e0fcddd3343c510a0fe2b127fbff16bf346dd76280b82292567523442"}, + {file = "xxhash-3.5.0-cp310-cp310-win32.whl", hash = "sha256:61c722ed8d49ac9bc26c7071eeaa1f6ff24053d553146d5df031802deffd03da"}, + {file = "xxhash-3.5.0-cp310-cp310-win_amd64.whl", hash = "sha256:9bed5144c6923cc902cd14bb8963f2d5e034def4486ab0bbe1f58f03f042f9a9"}, + {file = "xxhash-3.5.0-cp310-cp310-win_arm64.whl", hash = "sha256:893074d651cf25c1cc14e3bea4fceefd67f2921b1bb8e40fcfeba56820de80c6"}, + {file = "xxhash-3.5.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:02c2e816896dc6f85922ced60097bcf6f008dedfc5073dcba32f9c8dd786f3c1"}, + {file = "xxhash-3.5.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6027dcd885e21581e46d3c7f682cfb2b870942feeed58a21c29583512c3f09f8"}, + {file = "xxhash-3.5.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1308fa542bbdbf2fa85e9e66b1077eea3a88bef38ee8a06270b4298a7a62a166"}, + {file = "xxhash-3.5.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c28b2fdcee797e1c1961cd3bcd3d545cab22ad202c846235197935e1df2f8ef7"}, + {file = "xxhash-3.5.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:924361811732ddad75ff23e90efd9ccfda4f664132feecb90895bade6a1b4623"}, + {file = "xxhash-3.5.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:89997aa1c4b6a5b1e5b588979d1da048a3c6f15e55c11d117a56b75c84531f5a"}, + {file = "xxhash-3.5.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:685c4f4e8c59837de103344eb1c8a3851f670309eb5c361f746805c5471b8c88"}, + {file = "xxhash-3.5.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:dbd2ecfbfee70bc1a4acb7461fa6af7748ec2ab08ac0fa298f281c51518f982c"}, + {file = "xxhash-3.5.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:25b5a51dc3dfb20a10833c8eee25903fd2e14059e9afcd329c9da20609a307b2"}, + {file = "xxhash-3.5.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:a8fb786fb754ef6ff8c120cb96629fb518f8eb5a61a16aac3a979a9dbd40a084"}, + {file = "xxhash-3.5.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:a905ad00ad1e1c34fe4e9d7c1d949ab09c6fa90c919860c1534ff479f40fd12d"}, + {file = "xxhash-3.5.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:963be41bcd49f53af6d795f65c0da9b4cc518c0dd9c47145c98f61cb464f4839"}, + {file = "xxhash-3.5.0-cp311-cp311-win32.whl", hash = "sha256:109b436096d0a2dd039c355fa3414160ec4d843dfecc64a14077332a00aeb7da"}, + {file = "xxhash-3.5.0-cp311-cp311-win_amd64.whl", hash = "sha256:b702f806693201ad6c0a05ddbbe4c8f359626d0b3305f766077d51388a6bac58"}, + {file = "xxhash-3.5.0-cp311-cp311-win_arm64.whl", hash = "sha256:c4dcb4120d0cc3cc448624147dba64e9021b278c63e34a38789b688fd0da9bf3"}, + {file = "xxhash-3.5.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:14470ace8bd3b5d51318782cd94e6f94431974f16cb3b8dc15d52f3b69df8e00"}, + {file = "xxhash-3.5.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:59aa1203de1cb96dbeab595ded0ad0c0056bb2245ae11fac11c0ceea861382b9"}, + {file = "xxhash-3.5.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:08424f6648526076e28fae6ea2806c0a7d504b9ef05ae61d196d571e5c879c84"}, + {file = "xxhash-3.5.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:61a1ff00674879725b194695e17f23d3248998b843eb5e933007ca743310f793"}, + {file = "xxhash-3.5.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f2f2c61bee5844d41c3eb015ac652a0229e901074951ae48581d58bfb2ba01be"}, + {file = "xxhash-3.5.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9d32a592cac88d18cc09a89172e1c32d7f2a6e516c3dfde1b9adb90ab5df54a6"}, + {file = "xxhash-3.5.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:70dabf941dede727cca579e8c205e61121afc9b28516752fd65724be1355cc90"}, + {file = "xxhash-3.5.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:e5d0ddaca65ecca9c10dcf01730165fd858533d0be84c75c327487c37a906a27"}, + {file = "xxhash-3.5.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:3e5b5e16c5a480fe5f59f56c30abdeba09ffd75da8d13f6b9b6fd224d0b4d0a2"}, + {file = "xxhash-3.5.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:149b7914451eb154b3dfaa721315117ea1dac2cc55a01bfbd4df7c68c5dd683d"}, + {file = "xxhash-3.5.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:eade977f5c96c677035ff39c56ac74d851b1cca7d607ab3d8f23c6b859379cab"}, + {file = "xxhash-3.5.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:fa9f547bd98f5553d03160967866a71056a60960be00356a15ecc44efb40ba8e"}, + {file = "xxhash-3.5.0-cp312-cp312-win32.whl", hash = "sha256:f7b58d1fd3551b8c80a971199543379be1cee3d0d409e1f6d8b01c1a2eebf1f8"}, + {file = "xxhash-3.5.0-cp312-cp312-win_amd64.whl", hash = "sha256:fa0cafd3a2af231b4e113fba24a65d7922af91aeb23774a8b78228e6cd785e3e"}, + {file = "xxhash-3.5.0-cp312-cp312-win_arm64.whl", hash = "sha256:586886c7e89cb9828bcd8a5686b12e161368e0064d040e225e72607b43858ba2"}, + {file = "xxhash-3.5.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:37889a0d13b0b7d739cfc128b1c902f04e32de17b33d74b637ad42f1c55101f6"}, + {file = "xxhash-3.5.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:97a662338797c660178e682f3bc180277b9569a59abfb5925e8620fba00b9fc5"}, + {file = "xxhash-3.5.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7f85e0108d51092bdda90672476c7d909c04ada6923c14ff9d913c4f7dc8a3bc"}, + {file = "xxhash-3.5.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cd2fd827b0ba763ac919440042302315c564fdb797294d86e8cdd4578e3bc7f3"}, + {file = "xxhash-3.5.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:82085c2abec437abebf457c1d12fccb30cc8b3774a0814872511f0f0562c768c"}, + {file = "xxhash-3.5.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:07fda5de378626e502b42b311b049848c2ef38784d0d67b6f30bb5008642f8eb"}, + {file = "xxhash-3.5.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c279f0d2b34ef15f922b77966640ade58b4ccdfef1c4d94b20f2a364617a493f"}, + {file = "xxhash-3.5.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:89e66ceed67b213dec5a773e2f7a9e8c58f64daeb38c7859d8815d2c89f39ad7"}, + {file = "xxhash-3.5.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:bcd51708a633410737111e998ceb3b45d3dbc98c0931f743d9bb0a209033a326"}, + {file = "xxhash-3.5.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:3ff2c0a34eae7df88c868be53a8dd56fbdf592109e21d4bfa092a27b0bf4a7bf"}, + {file = "xxhash-3.5.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:4e28503dccc7d32e0b9817aa0cbfc1f45f563b2c995b7a66c4c8a0d232e840c7"}, + {file = "xxhash-3.5.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a6c50017518329ed65a9e4829154626f008916d36295b6a3ba336e2458824c8c"}, + {file = "xxhash-3.5.0-cp313-cp313-win32.whl", hash = "sha256:53a068fe70301ec30d868ece566ac90d873e3bb059cf83c32e76012c889b8637"}, + {file = "xxhash-3.5.0-cp313-cp313-win_amd64.whl", hash = "sha256:80babcc30e7a1a484eab952d76a4f4673ff601f54d5142c26826502740e70b43"}, + {file = "xxhash-3.5.0-cp313-cp313-win_arm64.whl", hash = "sha256:4811336f1ce11cac89dcbd18f3a25c527c16311709a89313c3acaf771def2d4b"}, + {file = "xxhash-3.5.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:6e5f70f6dca1d3b09bccb7daf4e087075ff776e3da9ac870f86ca316736bb4aa"}, + {file = "xxhash-3.5.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2e76e83efc7b443052dd1e585a76201e40b3411fe3da7af4fe434ec51b2f163b"}, + {file = "xxhash-3.5.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:33eac61d0796ca0591f94548dcfe37bb193671e0c9bcf065789b5792f2eda644"}, + {file = "xxhash-3.5.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0ec70a89be933ea49222fafc3999987d7899fc676f688dd12252509434636622"}, + {file = "xxhash-3.5.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd86b8e7f703ec6ff4f351cfdb9f428955859537125904aa8c963604f2e9d3e7"}, + {file = "xxhash-3.5.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0adfbd36003d9f86c8c97110039f7539b379f28656a04097e7434d3eaf9aa131"}, + {file = "xxhash-3.5.0-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:63107013578c8a730419adc05608756c3fa640bdc6abe806c3123a49fb829f43"}, + {file = "xxhash-3.5.0-cp37-cp37m-musllinux_1_2_i686.whl", hash = "sha256:683b94dbd1ca67557850b86423318a2e323511648f9f3f7b1840408a02b9a48c"}, + {file = "xxhash-3.5.0-cp37-cp37m-musllinux_1_2_ppc64le.whl", hash = "sha256:5d2a01dcce81789cf4b12d478b5464632204f4c834dc2d064902ee27d2d1f0ee"}, + {file = "xxhash-3.5.0-cp37-cp37m-musllinux_1_2_s390x.whl", hash = "sha256:a9d360a792cbcce2fe7b66b8d51274ec297c53cbc423401480e53b26161a290d"}, + {file = "xxhash-3.5.0-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:f0b48edbebea1b7421a9c687c304f7b44d0677c46498a046079d445454504737"}, + {file = "xxhash-3.5.0-cp37-cp37m-win32.whl", hash = "sha256:7ccb800c9418e438b44b060a32adeb8393764da7441eb52aa2aa195448935306"}, + {file = "xxhash-3.5.0-cp37-cp37m-win_amd64.whl", hash = "sha256:c3bc7bf8cb8806f8d1c9bf149c18708cb1c406520097d6b0a73977460ea03602"}, + {file = "xxhash-3.5.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:74752ecaa544657d88b1d1c94ae68031e364a4d47005a90288f3bab3da3c970f"}, + {file = "xxhash-3.5.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:dee1316133c9b463aa81aca676bc506d3f80d8f65aeb0bba2b78d0b30c51d7bd"}, + {file = "xxhash-3.5.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:602d339548d35a8579c6b013339fb34aee2df9b4e105f985443d2860e4d7ffaa"}, + {file = "xxhash-3.5.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:695735deeddfb35da1677dbc16a083445360e37ff46d8ac5c6fcd64917ff9ade"}, + {file = "xxhash-3.5.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1030a39ba01b0c519b1a82f80e8802630d16ab95dc3f2b2386a0b5c8ed5cbb10"}, + {file = "xxhash-3.5.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a5bc08f33c4966f4eb6590d6ff3ceae76151ad744576b5fc6c4ba8edd459fdec"}, + {file = "xxhash-3.5.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:160e0c19ee500482ddfb5d5570a0415f565d8ae2b3fd69c5dcfce8a58107b1c3"}, + {file = "xxhash-3.5.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:f1abffa122452481a61c3551ab3c89d72238e279e517705b8b03847b1d93d738"}, + {file = "xxhash-3.5.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:d5e9db7ef3ecbfc0b4733579cea45713a76852b002cf605420b12ef3ef1ec148"}, + {file = "xxhash-3.5.0-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:23241ff6423378a731d84864bf923a41649dc67b144debd1077f02e6249a0d54"}, + {file = "xxhash-3.5.0-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:82b833d5563fefd6fceafb1aed2f3f3ebe19f84760fdd289f8b926731c2e6e91"}, + {file = "xxhash-3.5.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:0a80ad0ffd78bef9509eee27b4a29e56f5414b87fb01a888353e3d5bda7038bd"}, + {file = "xxhash-3.5.0-cp38-cp38-win32.whl", hash = "sha256:50ac2184ffb1b999e11e27c7e3e70cc1139047e7ebc1aa95ed12f4269abe98d4"}, + {file = "xxhash-3.5.0-cp38-cp38-win_amd64.whl", hash = "sha256:392f52ebbb932db566973693de48f15ce787cabd15cf6334e855ed22ea0be5b3"}, + {file = "xxhash-3.5.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:bfc8cdd7f33d57f0468b0614ae634cc38ab9202c6957a60e31d285a71ebe0301"}, + {file = "xxhash-3.5.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e0c48b6300cd0b0106bf49169c3e0536408dfbeb1ccb53180068a18b03c662ab"}, + {file = "xxhash-3.5.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fe1a92cfbaa0a1253e339ccec42dbe6db262615e52df591b68726ab10338003f"}, + {file = "xxhash-3.5.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:33513d6cc3ed3b559134fb307aae9bdd94d7e7c02907b37896a6c45ff9ce51bd"}, + {file = "xxhash-3.5.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:eefc37f6138f522e771ac6db71a6d4838ec7933939676f3753eafd7d3f4c40bc"}, + {file = "xxhash-3.5.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a606c8070ada8aa2a88e181773fa1ef17ba65ce5dd168b9d08038e2a61b33754"}, + {file = "xxhash-3.5.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:42eca420c8fa072cc1dd62597635d140e78e384a79bb4944f825fbef8bfeeef6"}, + {file = "xxhash-3.5.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:604253b2143e13218ff1ef0b59ce67f18b8bd1c4205d2ffda22b09b426386898"}, + {file = "xxhash-3.5.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:6e93a5ad22f434d7876665444a97e713a8f60b5b1a3521e8df11b98309bff833"}, + {file = "xxhash-3.5.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:7a46e1d6d2817ba8024de44c4fd79913a90e5f7265434cef97026215b7d30df6"}, + {file = "xxhash-3.5.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:30eb2efe6503c379b7ab99c81ba4a779748e3830241f032ab46bd182bf5873af"}, + {file = "xxhash-3.5.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:c8aa771ff2c13dd9cda8166d685d7333d389fae30a4d2bb39d63ab5775de8606"}, + {file = "xxhash-3.5.0-cp39-cp39-win32.whl", hash = "sha256:5ed9ebc46f24cf91034544b26b131241b699edbfc99ec5e7f8f3d02d6eb7fba4"}, + {file = "xxhash-3.5.0-cp39-cp39-win_amd64.whl", hash = "sha256:220f3f896c6b8d0316f63f16c077d52c412619e475f9372333474ee15133a558"}, + {file = "xxhash-3.5.0-cp39-cp39-win_arm64.whl", hash = "sha256:a7b1d8315d9b5e9f89eb2933b73afae6ec9597a258d52190944437158b49d38e"}, + {file = "xxhash-3.5.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:2014c5b3ff15e64feecb6b713af12093f75b7926049e26a580e94dcad3c73d8c"}, + {file = "xxhash-3.5.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fab81ef75003eda96239a23eda4e4543cedc22e34c373edcaf744e721a163986"}, + {file = "xxhash-3.5.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4e2febf914ace002132aa09169cc572e0d8959d0f305f93d5828c4836f9bc5a6"}, + {file = "xxhash-3.5.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5d3a10609c51da2a1c0ea0293fc3968ca0a18bd73838455b5bca3069d7f8e32b"}, + {file = "xxhash-3.5.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:5a74f23335b9689b66eb6dbe2a931a88fcd7a4c2cc4b1cb0edba8ce381c7a1da"}, + {file = "xxhash-3.5.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:2b4154c00eb22e4d543f472cfca430e7962a0f1d0f3778334f2e08a7ba59363c"}, + {file = "xxhash-3.5.0-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d30bbc1644f726b825b3278764240f449d75f1a8bdda892e641d4a688b1494ae"}, + {file = "xxhash-3.5.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6fa0b72f2423e2aa53077e54a61c28e181d23effeaafd73fcb9c494e60930c8e"}, + {file = "xxhash-3.5.0-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:13de2b76c1835399b2e419a296d5b38dc4855385d9e96916299170085ef72f57"}, + {file = "xxhash-3.5.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:0691bfcc4f9c656bcb96cc5db94b4d75980b9d5589f2e59de790091028580837"}, + {file = "xxhash-3.5.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:297595fe6138d4da2c8ce9e72a04d73e58725bb60f3a19048bc96ab2ff31c692"}, + {file = "xxhash-3.5.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cc1276d369452040cbb943300dc8abeedab14245ea44056a2943183822513a18"}, + {file = "xxhash-3.5.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2061188a1ba352fc699c82bff722f4baacb4b4b8b2f0c745d2001e56d0dfb514"}, + {file = "xxhash-3.5.0-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:38c384c434021e4f62b8d9ba0bc9467e14d394893077e2c66d826243025e1f81"}, + {file = "xxhash-3.5.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:e6a4dd644d72ab316b580a1c120b375890e4c52ec392d4aef3c63361ec4d77d1"}, + {file = "xxhash-3.5.0-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:531af8845aaadcadf951b7e0c1345c6b9c68a990eeb74ff9acd8501a0ad6a1c9"}, + {file = "xxhash-3.5.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7ce379bcaa9fcc00f19affa7773084dd09f5b59947b3fb47a1ceb0179f91aaa1"}, + {file = "xxhash-3.5.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fd1b2281d01723f076df3c8188f43f2472248a6b63118b036e641243656b1b0f"}, + {file = "xxhash-3.5.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9c770750cc80e8694492244bca7251385188bc5597b6a39d98a9f30e8da984e0"}, + {file = "xxhash-3.5.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:b150b8467852e1bd844387459aa6fbe11d7f38b56e901f9f3b3e6aba0d660240"}, + {file = "xxhash-3.5.0.tar.gz", hash = "sha256:84f2caddf951c9cbf8dc2e22a89d4ccf5d86391ac6418fe81e3c67d0cf60b45f"}, +] + [[package]] name = "yamlfix" version = "1.16.0" @@ -5454,4 +5722,4 @@ cffi = ["cffi (>=1.11)"] [metadata] lock-version = "2.1" python-versions = ">=3.10,<3.13" -content-hash = "3f0b1890a850a10387b347751e01af37a449346449a3cc504fea672d3b8cd6c1" +content-hash = "1a7541968d9792593203b9021ce8e44dc75becfb2e41139e8c2f713b642e88d9" From 6b744d33cafee50d420dca463634ccb459255d06 Mon Sep 17 00:00:00 2001 From: vrtornisiello Date: Fri, 30 May 2025 16:05:43 -0300 Subject: [PATCH 089/181] fix imports and formatting to pass linting CI workflow --- backend/apps/chatbot/views.py | 48 +++++++++++++++++++++-------------- 1 file changed, 29 insertions(+), 19 deletions(-) diff --git a/backend/apps/chatbot/views.py b/backend/apps/chatbot/views.py index c20aa271..cfdc5c5f 100644 --- a/backend/apps/chatbot/views.py +++ b/backend/apps/chatbot/views.py @@ -17,18 +17,26 @@ from rest_framework.serializers import Serializer from rest_framework.views import APIView +from backend.apps.chatbot.database import ChatbotDatabase +from backend.apps.chatbot.feedback_sender import LangSmithFeedbackSender +from backend.apps.chatbot.models import Feedback, MessagePair, Thread +from backend.apps.chatbot.serializers import ( + FeedbackCreateSerializer, + FeedbackSerializer, + MessagePairSerializer, + ThreadSerializer, + UserMessageSerializer, +) from chatbot.assistants import SQLAssistant, SQLAssistantMessage, UserMessage -from .database import ChatbotDatabase -from .feedback_sender import LangSmithFeedbackSender -from .models import * -from .serializers import * ModelSerializer = TypeVar("ModelSerializer", bound=Serializer) + @cache def _get_feedback_sender() -> LangSmithFeedbackSender: return LangSmithFeedbackSender() + @cache def _get_sql_assistant() -> SQLAssistant: db_host = os.environ["DB_HOST"] @@ -61,9 +69,7 @@ def _get_sql_assistant() -> SQLAssistant: client=chroma_client, collection_name=chroma_collection, collection_metadata={"hnsw:space": "cosine"}, - embedding_function=OpenAIEmbeddings( - model="text-embedding-3-small" - ), + embedding_function=OpenAIEmbeddings(model="text-embedding-3-small"), ) else: vector_store = None @@ -71,10 +77,7 @@ def _get_sql_assistant() -> SQLAssistant: # Connection kwargs defined according to: # https://github.com/langchain-ai/langgraph/issues/2887 # https://langchain-ai.github.io/langgraph/how-tos/persistence_postgres - conn_kwargs = { - "autocommit": True, - "prepare_threshold": 0 - } + conn_kwargs = {"autocommit": True, "prepare_threshold": 0} pool = ConnectionPool( conninfo=db_url, @@ -82,19 +85,18 @@ def _get_sql_assistant() -> SQLAssistant: max_size=8, open=False, ) - pool.open() # TODO: where to close the pool? + pool.open() # TODO: where to close the pool? checkpointer = PostgresSaver(pool) checkpointer.setup() assistant = SQLAssistant( - database=database, - checkpointer=checkpointer, - vector_store=vector_store + database=database, checkpointer=checkpointer, vector_store=vector_store ) return assistant + class ThreadListView(APIView): permission_classes = [IsAuthenticated] @@ -102,7 +104,8 @@ def get(self, request: Request) -> JsonResponse: """Retrieve all threads associated with the authenticated user. Args: - request (Request): A Django REST framework `Request` object containing the authenticated user. + request (Request): A Django REST framework `Request` object + containing the authenticated user. Returns: JsonResponse: A JSON response containing a list of serialized threads. @@ -115,7 +118,8 @@ def post(self, request: Request) -> JsonResponse: """Create a new thread for the authenticated user. Args: - request (Request): A Django REST framework `Request` object containing the authenticated user. + request (Request): A Django REST framework `Request` object + containing the authenticated user. Returns: JsonResponse: A JSON response containing the serialized newly created thread. @@ -124,6 +128,7 @@ def post(self, request: Request) -> JsonResponse: serializer = ThreadSerializer(thread) return JsonResponse(serializer.data, status=201) + class ThreadDetailView(APIView): permission_classes = [IsAuthenticated] @@ -142,6 +147,7 @@ def get(self, request: Request, thread_id: uuid.UUID) -> JsonResponse: serializer = MessagePairSerializer(messages, many=True) return JsonResponse(serializer.data, safe=False) + class MessageListView(APIView): permission_classes = [IsAuthenticated] @@ -166,8 +172,7 @@ def post(self, request: Request, thread_id: uuid.UUID) -> JsonResponse: assistant = _get_sql_assistant() assistant_response: SQLAssistantMessage = assistant.invoke( - message=user_message, - thread_id=thread_id + message=user_message, thread_id=thread_id ) # TODO (nice to have): stream results @@ -184,6 +189,7 @@ def post(self, request: Request, thread_id: uuid.UUID) -> JsonResponse: return JsonResponse(serializer.data, status=201) + class FeedbackListView(APIView): permission_classes = [IsAuthenticated] @@ -221,6 +227,7 @@ def put(self, request: Request, message_pair_id: uuid.UUID) -> JsonResponse: return JsonResponse(serializer.data, status=status) + class CheckpointListView(APIView): permission_classes = [IsAuthenticated] @@ -243,6 +250,7 @@ def delete(self, request: Request, thread_id: uuid.UUID) -> HttpResponse: except Exception: return HttpResponse("Error clearing checkpoint", status=500) + def _get_thread_by_id(thread_id: uuid.UUID) -> Thread: """Retrieve a `Thread` object by its ID. @@ -260,6 +268,7 @@ def _get_thread_by_id(thread_id: uuid.UUID) -> Thread: except Thread.DoesNotExist: raise exceptions.NotFound + def _get_message_pair_by_id(message_pair_id: uuid.UUID) -> MessagePair: """Retrieve a `MessagePair` object by its ID. @@ -277,6 +286,7 @@ def _get_message_pair_by_id(message_pair_id: uuid.UUID) -> MessagePair: except MessagePair.DoesNotExist: raise exceptions.NotFound + def _validate(request: Request, model_serializer: Type[ModelSerializer]) -> ModelSerializer: """ Parse and validate the JSON payload from a request using a Django REST framework serializer. From dddbff64eed7b4174ff1dd61d7fea2a2f825126e Mon Sep 17 00:00:00 2001 From: vrtornisiello Date: Fri, 30 May 2025 16:57:10 -0300 Subject: [PATCH 090/181] fix imports and formatting to pass linting CI workflow --- .../0025_account_has_access_to_chatbot.py | 14 ++- backend/apps/chatbot/database/__init__.py | 3 + backend/apps/chatbot/database/database.py | 52 ++++++---- .../chatbot/database/metadata_formatter.py | 35 +++---- backend/apps/chatbot/feedback_sender.py | 29 +++--- .../apps/chatbot/migrations/0001_initial.py | 77 ++++++++++----- ...sync_status_feedback_synced_at_and_more.py | 21 +++-- .../0004_alter_feedback_sync_status.py | 13 ++- backend/apps/chatbot/models.py | 13 +-- backend/apps/chatbot/tests/test_endpoints.py | 94 ++++++++++--------- backend/apps/chatbot/urls.py | 18 ++-- 11 files changed, 219 insertions(+), 150 deletions(-) diff --git a/backend/apps/account/migrations/0025_account_has_access_to_chatbot.py b/backend/apps/account/migrations/0025_account_has_access_to_chatbot.py index 4c0f5dd7..d31d3119 100644 --- a/backend/apps/account/migrations/0025_account_has_access_to_chatbot.py +++ b/backend/apps/account/migrations/0025_account_has_access_to_chatbot.py @@ -1,18 +1,22 @@ +# -*- coding: utf-8 -*- # Generated by Django 4.2.21 on 2025-05-08 14:04 from django.db import migrations, models class Migration(migrations.Migration): - dependencies = [ - ('account', '0024_role_description_en_role_description_es_and_more'), + ("account", "0024_role_description_en_role_description_es_and_more"), ] operations = [ migrations.AddField( - model_name='account', - name='has_access_to_chatbot', - field=models.BooleanField(default=False, help_text='Indica se o usuário tem acesso ao chatbot', verbose_name='Tem acesso ao chatbot'), + model_name="account", + name="has_access_to_chatbot", + field=models.BooleanField( + default=False, + help_text="Indica se o usuário tem acesso ao chatbot", + verbose_name="Tem acesso ao chatbot", + ), ), ] diff --git a/backend/apps/chatbot/database/__init__.py b/backend/apps/chatbot/database/__init__.py index 5de7b00e..9049dfb3 100644 --- a/backend/apps/chatbot/database/__init__.py +++ b/backend/apps/chatbot/database/__init__.py @@ -1 +1,4 @@ +# -*- coding: utf-8 -*- from .database import ChatbotDatabase + +__all__ = ["ChatbotDatabase"] diff --git a/backend/apps/chatbot/database/database.py b/backend/apps/chatbot/database/database.py index 9ecf184f..9664e772 100644 --- a/backend/apps/chatbot/database/database.py +++ b/backend/apps/chatbot/database/database.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- import json import os @@ -7,7 +8,13 @@ from backend.apps.api.v1.models import Dataset -from .metadata_formatter import * +from .metadata_formatter import ( + ColumnMetadata, + DatasetMetadata, + MarkdownMetadataFormatter, + MetadataFormatter, + TableMetadata, +) class ChatbotDatabase: @@ -21,9 +28,11 @@ class ChatbotDatabase: Args: billing_project (str | None): - GCP project ID for billing. Falls back to the `BILLING_PROJECT_ID` environment variable if not provided. + GCP project ID for billing. Falls back to the `BILLING_PROJECT_ID` + environment variable if not provided. query_project (str | None): - GCP project ID for executing queries. Falls back to the `QUERY_PROJECT_ID` environment variable if not provided. + GCP project ID for executing queries. Falls back to the `QUERY_PROJECT_ID` + environment variable if not provided. metadata_formatter (MetadataFormatter | None): Custom formatter for metadata. Defaults to `MarkdownMetadataFormatter`. """ @@ -34,8 +43,8 @@ def __init__( query_project: str | None = None, metadata_formatter: MetadataFormatter | None = None, ): - billing_project = billing_project or os.getenv('BILLING_PROJECT_ID') - query_project = query_project or os.getenv('QUERY_PROJECT_ID') + billing_project = billing_project or os.getenv("BILLING_PROJECT_ID") + query_project = query_project or os.getenv("QUERY_PROJECT_ID") self._client = bq.Client(billing_project) self._project = query_project @@ -46,18 +55,17 @@ def __init__( self.formatter = MarkdownMetadataFormatter() @staticmethod - @cachetools.func.ttl_cache(ttl=60*60*24) + @cachetools.func.ttl_cache(ttl=60 * 60 * 24) def _get_metadata() -> list[DatasetMetadata]: """Fetch and return metadata for all datasets and their associated tables and columns. The metadata includes dataset and table IDs and descriptions, and column information such as name, type, and description. The result is cached for 24 hours. Returns: - list[DatasetMetadata]: A list of metadata objects, one for each dataset with at least one valid table. + list[DatasetMetadata]: A list of `DatasetMetadata` objects, + one for each dataset with at least one valid table. """ - datasets = Dataset.objects.prefetch_related( - "tables__cloud_tables__columns__bigquery_type" - ) + datasets = Dataset.objects.prefetch_related("tables__cloud_tables__columns__bigquery_type") datasets_metadata: list[DatasetMetadata] = [] @@ -84,7 +92,11 @@ def _get_metadata() -> list[DatasetMetadata]: for column in cloud_table.columns.all() ] - full_table_id = f"{cloud_table.gcp_project_id}.{cloud_table.gcp_dataset_id}.{cloud_table.gcp_table_id}" + full_table_id = ( + f"{cloud_table.gcp_project_id}." + f"{cloud_table.gcp_dataset_id}." + f"{cloud_table.gcp_table_id}" + ) tables_metadata.append( TableMetadata( @@ -92,7 +104,7 @@ def _get_metadata() -> list[DatasetMetadata]: full_table_id=full_table_id, name=table.name, description=table.description, - columns=columns_metadata + columns=columns_metadata, ) ) @@ -104,7 +116,7 @@ def _get_metadata() -> list[DatasetMetadata]: id=gcp_dataset_id, name=dataset.name, description=dataset.description, - tables=tables_metadata + tables=tables_metadata, ) ) @@ -117,8 +129,7 @@ def get_datasets_info(self) -> str: str: A formatted string containing metadata for the datasets. """ datasets_info = [ - self.formatter.format_dataset_metadata(dataset) - for dataset in self._get_metadata() + self.formatter.format_dataset_metadata(dataset) for dataset in self._get_metadata() ] return "\n\n---\n\n".join(datasets_info) @@ -134,15 +145,15 @@ def get_tables_info(self, dataset_names: str) -> str: """ dataset_ids = {id.strip() for id in dataset_names.split(",")} - datasets = [ - dataset for dataset in self._get_metadata() if dataset.id in dataset_ids - ] + datasets = [dataset for dataset in self._get_metadata() if dataset.id in dataset_ids] tables_info = [] for dataset in datasets: tables_info.append( - "\n\n".join([self.formatter.format_table_metadata(table) for table in dataset.tables]) + "\n\n".join( + [self.formatter.format_table_metadata(table) for table in dataset.tables] + ) ) return "\n\n---\n\n".join(tables_info) @@ -157,7 +168,8 @@ def query(self, query: str) -> str: Exception: Propagates any exceptions raised during query execution. Returns: - str: A JSON-formatted string representing the query results. Returns an empty string if no results are found. + str: A JSON-formatted string representing the query results. + Returns an empty string if no results are found. """ try: rows = self._client.query(query, project=self._project).result() diff --git a/backend/apps/chatbot/database/metadata_formatter.py b/backend/apps/chatbot/database/metadata_formatter.py index a2ebd266..80f9e0ed 100644 --- a/backend/apps/chatbot/database/metadata_formatter.py +++ b/backend/apps/chatbot/database/metadata_formatter.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- from typing import Protocol from pydantic import BaseModel, ConfigDict, Field @@ -6,24 +7,30 @@ class Metadata(BaseModel): model_config = ConfigDict(str_strip_whitespace=True) + class ColumnMetadata(Metadata): name: str = Field(description="BigQuery column name") type: str = Field(description="BigQuery column name") - description: str|None = Field(default=None, description="BigQuery column description") + description: str | None = Field(default=None, description="BigQuery column description") + class TableMetadata(Metadata): id: str = Field(description="BigQuery table id") - full_table_id: str = Field(description="BigQuery table_id in the format project_id.dataset_id.table_id") + full_table_id: str = Field( + description="BigQuery table_id in the format project_id.dataset_id.table_id" + ) name: str = Field(description="Table name") - description: str|None = Field(default=None, description="Table description") + description: str | None = Field(default=None, description="Table description") columns: list[ColumnMetadata] = Field(description="List of columns for this table") + class DatasetMetadata(Metadata): id: str = Field(description="BigQuery dataset id") name: str = Field(description="Dataset name") - description: str|None = Field(default=None, description="Dataset description") + description: str | None = Field(default=None, description="Dataset description") tables: list[TableMetadata] = Field(description="List of tables for this dataset") + class MetadataFormatter(Protocol): @staticmethod def format_dataset_metadata(dataset: DatasetMetadata) -> str: @@ -33,6 +40,7 @@ def format_dataset_metadata(dataset: DatasetMetadata) -> str: def format_table_metadata(table: TableMetadata) -> str: ... + class MarkdownMetadataFormatter: @staticmethod def format_dataset_metadata(dataset: DatasetMetadata) -> str: @@ -49,8 +57,7 @@ def format_dataset_metadata(dataset: DatasetMetadata) -> str: # Dataset tables tables_metadata = [ - f"- {table.full_table_id}: {table.description}" - for table in dataset.tables + f"- {table.full_table_id}: {table.description}" for table in dataset.tables ] metadata += "\n\n".join(tables_metadata) @@ -74,20 +81,16 @@ def format_table_metadata(table: TableMetadata) -> str: metadata += f"### Description:\n{table.description}\n\n" # Table schema - metadata += f"### Schema:\n" - fields = "\n\t".join([ - f"{field.name} {field.type}" - for field in table.columns - ]) + metadata += "### Schema:\n" + fields = "\n\t".join([f"{field.name} {field.type}" for field in table.columns]) metadata += f"CREATE TABLE {table.id} (\n\t{fields}\n)\n\n" # Table columns details - metadata += f"### Column Details:\n" + metadata += "### Column Details:\n" header = "|column name|column type|column description|\n|---|---|---|" - lines = "\n".join([ - f"|{field.name}|{field.type}|{field.description}|" - for field in table.columns - ]) + lines = "\n".join( + [f"|{field.name}|{field.type}|{field.description}|" for field in table.columns] + ) if lines: metadata += f"{header}\n{lines}" diff --git a/backend/apps/chatbot/feedback_sender.py b/backend/apps/chatbot/feedback_sender.py index a9dab149..f334a708 100644 --- a/backend/apps/chatbot/feedback_sender.py +++ b/backend/apps/chatbot/feedback_sender.py @@ -1,4 +1,5 @@ -from queue import Queue, Full +# -*- coding: utf-8 -*- +from queue import Full, Queue from threading import Thread import langsmith @@ -12,10 +13,7 @@ class LangSmithFeedbackSender: """A feedback sender that sends feedback to LangSmith using a background worker.""" def __init__(self, api_url: str | None = None, api_key: str | None = None): - self._langsmith_client = langsmith.Client( - api_url=api_url, - api_key=api_key - ) + self._langsmith_client = langsmith.Client(api_url=api_url, api_key=api_key) self._queue: Queue[tuple[Feedback, bool]] = Queue(maxsize=1000) @@ -37,15 +35,17 @@ def _create_langsmith_feedback(self, feedback: Feedback) -> bool: key="helpfulness", feedback_id=feedback.id, score=feedback.rating, - comment=feedback.comment + comment=feedback.comment, ) logger.info( - f"Successfully created feedback {feedback.id} for run {feedback.message_pair.id} on LangSmith" + f"Successfully created feedback {feedback.id} " + f"for run {feedback.message_pair.id} on LangSmith" ) return True except Exception: logger.exception( - f"Failed to create feedback {feedback.id} for run {feedback.message_pair.id} on LangSmith:" + f"Failed to create feedback {feedback.id} " + f"for run {feedback.message_pair.id} on LangSmith" ) return False @@ -60,17 +60,17 @@ def _update_langsmith_feedback(self, feedback: Feedback) -> bool: """ try: self._langsmith_client.update_feedback( - feedback_id=feedback.id, - score=feedback.rating, - comment=feedback.comment + feedback_id=feedback.id, score=feedback.rating, comment=feedback.comment ) logger.info( - f"Successfully updated feedback {feedback.id} for run {feedback.message_pair.id} on LangSmith" + f"Successfully updated feedback {feedback.id} " + f"for run {feedback.message_pair.id} on LangSmith" ) return True except Exception: logger.exception( - f"Failed to update feedback {feedback.id} for run {feedback.message_pair.id} on LangSmith:" + f"Failed to update feedback {feedback.id} " + f"for run {feedback.message_pair.id} on LangSmith" ) return False @@ -107,5 +107,6 @@ def send_feedback(self, feedback: Feedback, created: bool): except Full: operation = "create" if created else "update" logger.warning( - f"LangSmith feedbacks queue is full - could not {operation} feedback {feedback.id} on LangSmith" + f"LangSmith feedbacks queue is full - could not {operation} " + f"feedback {feedback.id} on LangSmith" ) diff --git a/backend/apps/chatbot/migrations/0001_initial.py b/backend/apps/chatbot/migrations/0001_initial.py index ce94391e..e5747e4c 100644 --- a/backend/apps/chatbot/migrations/0001_initial.py +++ b/backend/apps/chatbot/migrations/0001_initial.py @@ -1,13 +1,14 @@ +# -*- coding: utf-8 -*- # Generated by Django 4.2.20 on 2025-04-14 17:19 +import uuid + +import django.db.models.deletion from django.conf import settings from django.db import migrations, models -import django.db.models.deletion -import uuid class Migration(migrations.Migration): - initial = True dependencies = [ @@ -16,35 +17,65 @@ class Migration(migrations.Migration): operations = [ migrations.CreateModel( - name='Thread', + name="Thread", fields=[ - ('id', models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False)), - ('created_at', models.DateTimeField(auto_now_add=True)), - ('account', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)), + ( + "id", + models.UUIDField( + default=uuid.uuid4, editable=False, primary_key=True, serialize=False + ), + ), + ("created_at", models.DateTimeField(auto_now_add=True)), + ( + "account", + models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL + ), + ), ], ), migrations.CreateModel( - name='MessagePair', + name="MessagePair", fields=[ - ('id', models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False)), - ('model_uri', models.TextField()), - ('user_message', models.TextField()), - ('assistant_message', models.TextField()), - ('generated_queries', models.JSONField(blank=True, null=True)), - ('generated_chart', models.JSONField(blank=True, null=True)), - ('created_at', models.DateTimeField(auto_now_add=True)), - ('thread', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='chatbot.thread')), + ( + "id", + models.UUIDField( + default=uuid.uuid4, editable=False, primary_key=True, serialize=False + ), + ), + ("model_uri", models.TextField()), + ("user_message", models.TextField()), + ("assistant_message", models.TextField()), + ("generated_queries", models.JSONField(blank=True, null=True)), + ("generated_chart", models.JSONField(blank=True, null=True)), + ("created_at", models.DateTimeField(auto_now_add=True)), + ( + "thread", + models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, to="chatbot.thread" + ), + ), ], ), migrations.CreateModel( - name='Feedback', + name="Feedback", fields=[ - ('id', models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False)), - ('rating', models.SmallIntegerField(choices=[(0, 'Bad'), (1, 'Good')])), - ('comment', models.TextField(blank=True)), - ('created_at', models.DateTimeField(auto_now_add=True)), - ('updated_at', models.DateTimeField(auto_now=True)), - ('message_pair', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to='chatbot.messagepair')), + ( + "id", + models.UUIDField( + default=uuid.uuid4, editable=False, primary_key=True, serialize=False + ), + ), + ("rating", models.SmallIntegerField(choices=[(0, "Bad"), (1, "Good")])), + ("comment", models.TextField(blank=True)), + ("created_at", models.DateTimeField(auto_now_add=True)), + ("updated_at", models.DateTimeField(auto_now=True)), + ( + "message_pair", + models.OneToOneField( + on_delete=django.db.models.deletion.CASCADE, to="chatbot.messagepair" + ), + ), ], ), ] diff --git a/backend/apps/chatbot/migrations/0003_feedback_sync_status_feedback_synced_at_and_more.py b/backend/apps/chatbot/migrations/0003_feedback_sync_status_feedback_synced_at_and_more.py index 478de74c..ab1b220a 100644 --- a/backend/apps/chatbot/migrations/0003_feedback_sync_status_feedback_synced_at_and_more.py +++ b/backend/apps/chatbot/migrations/0003_feedback_sync_status_feedback_synced_at_and_more.py @@ -1,28 +1,31 @@ +# -*- coding: utf-8 -*- # Generated by Django 4.2.21 on 2025-05-08 17:05 from django.db import migrations, models class Migration(migrations.Migration): - dependencies = [ - ('chatbot', '0002_alter_feedback_comment'), + ("chatbot", "0002_alter_feedback_comment"), ] operations = [ migrations.AddField( - model_name='feedback', - name='sync_status', - field=models.CharField(choices=[('pending', 'Pending'), ('success', 'Success'), ('failed', 'Failed')], default='pending'), + model_name="feedback", + name="sync_status", + field=models.CharField( + choices=[("pending", "Pending"), ("success", "Success"), ("failed", "Failed")], + default="pending", + ), ), migrations.AddField( - model_name='feedback', - name='synced_at', + model_name="feedback", + name="synced_at", field=models.DateTimeField(blank=True, null=True), ), migrations.AlterField( - model_name='feedback', - name='updated_at', + model_name="feedback", + name="updated_at", field=models.DateTimeField(blank=True, null=True), ), ] diff --git a/backend/apps/chatbot/migrations/0004_alter_feedback_sync_status.py b/backend/apps/chatbot/migrations/0004_alter_feedback_sync_status.py index 52e83aca..666e7dea 100644 --- a/backend/apps/chatbot/migrations/0004_alter_feedback_sync_status.py +++ b/backend/apps/chatbot/migrations/0004_alter_feedback_sync_status.py @@ -1,18 +1,21 @@ +# -*- coding: utf-8 -*- # Generated by Django 4.2.21 on 2025-05-13 20:10 from django.db import migrations, models class Migration(migrations.Migration): - dependencies = [ - ('chatbot', '0003_feedback_sync_status_feedback_synced_at_and_more'), + ("chatbot", "0003_feedback_sync_status_feedback_synced_at_and_more"), ] operations = [ migrations.AlterField( - model_name='feedback', - name='sync_status', - field=models.TextField(choices=[('pending', 'Pending'), ('success', 'Success'), ('failed', 'Failed')], default='pending'), + model_name="feedback", + name="sync_status", + field=models.TextField( + choices=[("pending", "Pending"), ("success", "Success"), ("failed", "Failed")], + default="pending", + ), ), ] diff --git a/backend/apps/chatbot/models.py b/backend/apps/chatbot/models.py index 56459368..679dcc94 100644 --- a/backend/apps/chatbot/models.py +++ b/backend/apps/chatbot/models.py @@ -1,6 +1,5 @@ # -*- coding: utf-8 -*- import uuid -from typing import Any from django.db import models from django.utils import timezone @@ -13,6 +12,7 @@ class Thread(models.Model): account = models.ForeignKey(Account, on_delete=models.CASCADE) created_at = models.DateTimeField(auto_now_add=True) + class MessagePair(models.Model): id = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False) thread = models.ForeignKey(Thread, on_delete=models.CASCADE) @@ -23,6 +23,7 @@ class MessagePair(models.Model): generated_chart = models.JSONField(null=True, blank=True) created_at = models.DateTimeField(auto_now_add=True) + class Feedback(models.Model): id = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False) message_pair = models.OneToOneField(MessagePair, on_delete=models.CASCADE, primary_key=False) @@ -31,16 +32,12 @@ class Feedback(models.Model): created_at = models.DateTimeField(auto_now_add=True) updated_at = models.DateTimeField(null=True, blank=True) sync_status = models.TextField( - choices=[ - ("pending", "Pending"), - ("success", "Success"), - ("failed", "Failed") - ], - default="pending" + choices=[("pending", "Pending"), ("success", "Success"), ("failed", "Failed")], + default="pending", ) synced_at = models.DateTimeField(null=True, blank=True) - def user_update(self, data: dict[str, int|str]): + def user_update(self, data: dict[str, int | str]): for attr, value in data.items(): setattr(self, attr, value) self.updated_at = timezone.now() diff --git a/backend/apps/chatbot/tests/test_endpoints.py b/backend/apps/chatbot/tests/test_endpoints.py index 2eba5f44..c29e7e85 100644 --- a/backend/apps/chatbot/tests/test_endpoints.py +++ b/backend/apps/chatbot/tests/test_endpoints.py @@ -1,9 +1,12 @@ +# -*- coding: utf-8 -*- +import uuid + import pytest from rest_framework.test import APIClient from backend.apps.account.models import Account from backend.apps.chatbot import views -from backend.apps.chatbot.models import * +from backend.apps.chatbot.models import Feedback, MessagePair, Thread from chatbot.assistants import SQLAssistantMessage @@ -12,14 +15,12 @@ def __init__(self, *args, **kwargs): ... def invoke(self, *args, **kwargs): - return SQLAssistantMessage( - model_uri="google/gemini-2.0-flash", - content="mock response" - ) + return SQLAssistantMessage(model_uri="google/gemini-2.0-flash", content="mock response") def clear_thread(self, *args, **kwargs): ... + class MockLangSmithFeedbackSender: def __init__(self, *args, **kwargs): ... @@ -27,18 +28,22 @@ def __init__(self, *args, **kwargs): def send_feedback(self, *args, **kwargs): ... + @pytest.fixture def mock_email() -> str: return "mockemail@mockdomain.com" + @pytest.fixture def mock_password() -> str: return "mockpassword" + @pytest.fixture def client() -> APIClient: return APIClient() + @pytest.fixture def auth_user(mock_email: str, mock_password: str) -> Account: return Account.objects.create( @@ -48,28 +53,26 @@ def auth_user(mock_email: str, mock_password: str) -> Account: has_chatbot_access=True, ) + @pytest.fixture -def access_token( - client: APIClient, mock_email: str, mock_password: str, auth_user: Account -) -> str: +def access_token(client: APIClient, mock_email: str, mock_password: str, auth_user: Account) -> str: response = client.post( - path="/chatbot/token/", - data={"email": mock_email, "password": mock_password} + path="/chatbot/token/", data={"email": mock_email, "password": mock_password} ) assert response.status_code == 200 return response.data["access"] + @pytest.fixture def auth_client(access_token) -> APIClient: client = APIClient() client.credentials(HTTP_AUTHORIZATION=f"Bearer {access_token}") return client + @pytest.mark.django_db -def test_token_view_authorized( - client: APIClient, mock_email: str, mock_password: str -): +def test_token_view_authorized(client: APIClient, mock_email: str, mock_password: str): _ = Account.objects.create( email=mock_email, password=mock_password, @@ -78,16 +81,14 @@ def test_token_view_authorized( ) response = client.post( - path="/chatbot/token/", - data={"email": mock_email, "password": mock_password} + path="/chatbot/token/", data={"email": mock_email, "password": mock_password} ) assert response.status_code == 200 + @pytest.mark.django_db -def test_token_view_unauthorized( - client: APIClient, mock_email: str, mock_password: str -): +def test_token_view_unauthorized(client: APIClient, mock_email: str, mock_password: str): _ = Account.objects.create( email=mock_email, password=mock_password, @@ -96,29 +97,28 @@ def test_token_view_unauthorized( ) response = client.post( - path="/chatbot/token/", - data={"email": mock_email, "password": mock_password} + path="/chatbot/token/", data={"email": mock_email, "password": mock_password} ) assert response.status_code == 401 + @pytest.mark.django_db -def test_token_view_user_not_registered( - client: APIClient, mock_email: str, mock_password: str -): +def test_token_view_user_not_registered(client: APIClient, mock_email: str, mock_password: str): response = client.post( - path="/chatbot/token/", - data={"email": mock_email, "password": mock_password} + path="/chatbot/token/", data={"email": mock_email, "password": mock_password} ) assert response.status_code == 401 + @pytest.mark.django_db def test_thread_list_view_get(auth_client: APIClient): response = auth_client.get("/chatbot/threads/") assert response.status_code == 200 assert isinstance(response.json(), list) + @pytest.mark.django_db def test_thread_list_view_post(auth_client: APIClient): response = auth_client.post("/chatbot/threads/") @@ -131,6 +131,7 @@ def test_thread_list_view_post(auth_client: APIClient): assert "created_at" in thread_attrs assert Thread.objects.get(id=thread_attrs["id"]) + @pytest.mark.django_db def test_thread_detail_view_get(auth_client: APIClient, auth_user: Account): thread = Thread.objects.create(account=auth_user) @@ -139,11 +140,13 @@ def test_thread_detail_view_get(auth_client: APIClient, auth_user: Account): assert response.status_code == 200 assert isinstance(response.json(), list) + @pytest.mark.django_db def test_thread_detail_view_get_not_found(auth_client: APIClient): response = auth_client.get(f"/chatbot/threads/{uuid.uuid4()}/") assert response.status_code == 404 + @pytest.mark.django_db def test_message_list_view_post(monkeypatch, auth_client: APIClient, auth_user: Account): monkeypatch.setattr(views, "SQLAssistant", MockSQLAssistant) @@ -153,7 +156,7 @@ def test_message_list_view_post(monkeypatch, auth_client: APIClient, auth_user: response = auth_client.post( path=f"/chatbot/threads/{thread.id}/messages/", data={"id": str(uuid.uuid4()), "content": "mock message"}, - format="json" + format="json", ) assert response.status_code == 201 @@ -161,11 +164,12 @@ def test_message_list_view_post(monkeypatch, auth_client: APIClient, auth_user: response = auth_client.post( path=f"/chatbot/threads/{thread.id}/messages/", data={"content": "mock message"}, - format="json" + format="json", ) assert response.status_code == 201 + @pytest.mark.django_db def test_message_list_view_post_bad_request(auth_client: APIClient, auth_user: Account): thread = Thread.objects.create(account=auth_user) @@ -173,7 +177,7 @@ def test_message_list_view_post_bad_request(auth_client: APIClient, auth_user: A response = auth_client.post( path=f"/chatbot/threads/{thread.id}/messages/", data={"id": str(uuid.uuid4())}, - format="json" + format="json", ) assert response.status_code == 400 @@ -181,20 +185,22 @@ def test_message_list_view_post_bad_request(auth_client: APIClient, auth_user: A response = auth_client.post( path=f"/chatbot/threads/{thread.id}/messages/", data={"id": str(uuid.uuid4()), "content": []}, - format="json" + format="json", ) assert response.status_code == 400 + @pytest.mark.django_db def test_message_list_view_post_not_found(auth_client: APIClient): response = auth_client.post( path=f"/chatbot/threads/{uuid.uuid4()}/messages/", data={"id": str(uuid.uuid4()), "content": "mock message"}, - format="json" + format="json", ) assert response.status_code == 404 + @pytest.mark.django_db def test_feedback_list_view_put_create(monkeypatch, auth_client: APIClient, auth_user: Account): monkeypatch.setattr(views, "LangSmithFeedbackSender", MockLangSmithFeedbackSender) @@ -207,13 +213,14 @@ def test_feedback_list_view_put_create(monkeypatch, auth_client: APIClient, auth model_uri="google/gemini-2.0-flash", user_message="mock message", assistant_message="mock response", - ) for _ in range(2) + ) + for _ in range(2) ] response = auth_client.put( path=f"/chatbot/message-pairs/{message_pairs[0].id}/feedbacks/", data={"rating": 1, "comment": "good"}, - format="json" + format="json", ) assert response.status_code == 201 @@ -221,11 +228,12 @@ def test_feedback_list_view_put_create(monkeypatch, auth_client: APIClient, auth response = auth_client.put( path=f"/chatbot/message-pairs/{message_pairs[1].id}/feedbacks/", data={"rating": 1, "comment": None}, - format="json" + format="json", ) assert response.status_code == 201 + @pytest.mark.django_db def test_feedback_list_view_put_update(monkeypatch, auth_client: APIClient, auth_user: Account): monkeypatch.setattr(views, "LangSmithFeedbackSender", MockLangSmithFeedbackSender) @@ -236,23 +244,20 @@ def test_feedback_list_view_put_update(monkeypatch, auth_client: APIClient, auth thread=thread, model_uri="google/gemini-2.0-flash", user_message="mock message", - assistant_message="mock response" + assistant_message="mock response", ) - _ = Feedback.objects.create( - message_pair=message_pair, - rating=0, - comment="bad" - ) + _ = Feedback.objects.create(message_pair=message_pair, rating=0, comment="bad") response = auth_client.put( path=f"/chatbot/message-pairs/{message_pair.id}/feedbacks/", data={"rating": 1, "comment": "good"}, - format="json" + format="json", ) assert response.status_code == 200 + @pytest.mark.django_db def test_feedback_list_view_put_bad_request(auth_client: APIClient, auth_user: Account): thread = Thread.objects.create(account=auth_user) @@ -267,7 +272,7 @@ def test_feedback_list_view_put_bad_request(auth_client: APIClient, auth_user: A response = auth_client.put( path=f"/chatbot/message-pairs/{message_pair.id}/feedbacks/", data={"comment": "good"}, - format="json" + format="json", ) assert response.status_code == 400 @@ -275,27 +280,30 @@ def test_feedback_list_view_put_bad_request(auth_client: APIClient, auth_user: A response = auth_client.put( path=f"/chatbot/message-pairs/{message_pair.id}/feedbacks/", data={"rating": 1, "comment": []}, - format="json" + format="json", ) assert response.status_code == 400 + @pytest.mark.django_db def test_feedback_list_view_put_not_found(auth_client: APIClient, auth_user: Account): response = auth_client.put( path=f"/chatbot/message-pairs/{uuid.uuid4()}/feedbacks/", data={"rating": 1, "comment": "good"}, - format="json" + format="json", ) assert response.status_code == 404 + @pytest.mark.django_db def test_checkpoint_list_view_delete(auth_client: APIClient, auth_user: Account): thread = Thread.objects.create(account=auth_user) response = auth_client.delete(f"/chatbot/checkpoints/{thread.id}/") assert response.status_code == 200 + @pytest.mark.django_db def test_checkpoint_list_view_delete_not_found(auth_client: APIClient): response = auth_client.delete(f"/chatbot/checkpoints/{uuid.uuid4()}/") diff --git a/backend/apps/chatbot/urls.py b/backend/apps/chatbot/urls.py index a5c8f7bd..efe52aa8 100644 --- a/backend/apps/chatbot/urls.py +++ b/backend/apps/chatbot/urls.py @@ -1,17 +1,21 @@ # -*- coding: utf-8 -*- from django.urls import path -from rest_framework_simplejwt.views import (TokenObtainPairView, - TokenRefreshView) +from rest_framework_simplejwt.views import TokenObtainPairView, TokenRefreshView -from .views import (CheckpointListView, FeedbackListView, MessageListView, - ThreadDetailView, ThreadListView) +from .views import ( + CheckpointListView, + FeedbackListView, + MessageListView, + ThreadDetailView, + ThreadListView, +) urlpatterns = [ - path('chatbot/token/', TokenObtainPairView.as_view()), - path('chatbot/token/refresh/', TokenRefreshView.as_view()), + path("chatbot/token/", TokenObtainPairView.as_view()), + path("chatbot/token/refresh/", TokenRefreshView.as_view()), path("chatbot/threads/", ThreadListView.as_view()), path("chatbot/threads//", ThreadDetailView.as_view()), path("chatbot/threads//messages/", MessageListView.as_view()), path("chatbot/message-pairs//feedbacks/", FeedbackListView.as_view()), - path("chatbot/checkpoints//", CheckpointListView.as_view()) + path("chatbot/checkpoints//", CheckpointListView.as_view()), ] From f5f2a2313da9a0ca6a96e7c55f8bdcffc5142c13 Mon Sep 17 00:00:00 2001 From: aldemirlucas Date: Sat, 31 May 2025 20:40:46 -0300 Subject: [PATCH 091/181] =?UTF-8?q?feat:=20adicionar=20muta=C3=A7=C3=A3o?= =?UTF-8?q?=20para=20deletar=20assinatura=20Stripe=20imediatamente?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- backend/apps/account_payment/graphql.py | 23 +++++++++++++++++++++++ 1 file changed, 23 insertions(+) diff --git a/backend/apps/account_payment/graphql.py b/backend/apps/account_payment/graphql.py index 70e2e156..cc6b2dd3 100644 --- a/backend/apps/account_payment/graphql.py +++ b/backend/apps/account_payment/graphql.py @@ -358,6 +358,28 @@ def mutate(cls, root, info, subscription_id): return cls(errors=[str(e)]) +class StripeSubscriptionDeleteImmediatelyMutation(Mutation): + """Delete stripe subscription with immediate effect""" + + subscription = Field(StripeSubscriptionNode) + errors = List(String) + + class Arguments: + subscription_id = ID(required=True) + + @classmethod + @login_required + def mutate(cls, root, info, subscription_id): + try: + subscription = Subscription.objects.get(id=subscription_id) + stripe_subscription = subscription.subscription + stripe_subscription.cancel(at_period_end=False) + return None + except Exception as e: + logger.error(e) + return cls(errors=[str(e)]) + + class StripeSubscriptionCustomerCreateMutation(Mutation): """Add account to subscription""" @@ -553,6 +575,7 @@ class Mutation(ObjectType): update_stripe_customer = StripeCustomerUpdateMutation.Field() create_stripe_subscription = StripeSubscriptionCreateMutation.Field() delete_stripe_subscription = StripeSubscriptionDeleteMutation.Field() + delete_stripe_subscription_immediately = StripeSubscriptionDeleteImmediatelyMutation.Field() create_stripe_customer_subscription = StripeSubscriptionCustomerCreateMutation.Field() update_stripe_customer_subscription = StripeSubscriptionCustomerDeleteMutation.Field() delete_stripe_customer_all_members = StripeSubscriptionCustomerAllMembersDeleteMutation.Field() From 6b454d1b69d15fbfcc8bc673b19ad51f28c7085a Mon Sep 17 00:00:00 2001 From: Ricardo Dahis Date: Tue, 3 Jun 2025 11:23:00 +1000 Subject: [PATCH 092/181] increase dev ram memory limit to 1.5Gi --- .github/workflows/deploy-dev.yaml | 2 +- .github/workflows/release-dev.yaml | 6 ++---- .github/workflows/release-prod.yaml | 3 +-- .github/workflows/release-staging.yaml | 3 +-- docker-compose.override.yaml | 22 +++++++++------------- docker-compose.yaml | 20 ++++++++------------ 6 files changed, 22 insertions(+), 34 deletions(-) diff --git a/.github/workflows/deploy-dev.yaml b/.github/workflows/deploy-dev.yaml index ac3247af..f80bfdb3 100644 --- a/.github/workflows/deploy-dev.yaml +++ b/.github/workflows/deploy-dev.yaml @@ -54,7 +54,7 @@ jobs: resources: limits: cpu: 500m - memory: 1Gi + memory: 1.5Gi requests: cpu: 250m memory: 500Mi diff --git a/.github/workflows/release-dev.yaml b/.github/workflows/release-dev.yaml index 9cce6d45..93877463 100644 --- a/.github/workflows/release-dev.yaml +++ b/.github/workflows/release-dev.yaml @@ -2,8 +2,7 @@ name: Release Image (Development) on: push: - branches: - - dev + branches: [dev] jobs: release-docker: name: Release Image @@ -36,6 +35,5 @@ jobs: labels: | org.opencontainers.image.source=${{ github.event.repository.html_url }} org.opencontainers.image.revision=${{ github.sha }} - build-args: | + build-args: |- BUILDKIT_INLINE_CACHE=1 - diff --git a/.github/workflows/release-prod.yaml b/.github/workflows/release-prod.yaml index 7bc54289..745ac762 100644 --- a/.github/workflows/release-prod.yaml +++ b/.github/workflows/release-prod.yaml @@ -2,8 +2,7 @@ name: Release Image (Production) on: push: - branches: - - main + branches: [main] jobs: release-docker: name: Release Image diff --git a/.github/workflows/release-staging.yaml b/.github/workflows/release-staging.yaml index 5912855b..0ddc6d8f 100644 --- a/.github/workflows/release-staging.yaml +++ b/.github/workflows/release-staging.yaml @@ -2,8 +2,7 @@ name: Release Image (Staging) on: push: - branches: - - staging + branches: [staging] jobs: release-docker: name: Release Image diff --git a/docker-compose.override.yaml b/docker-compose.override.yaml index 690e27c2..036507fb 100644 --- a/docker-compose.override.yaml +++ b/docker-compose.override.yaml @@ -1,6 +1,7 @@ +--- services: index: - image: elasticsearch:8.17.2 # TODO : upgrade prod to elasticsearch:8.17.2 + image: elasticsearch:8.17.2 # TODO : upgrade prod to elasticsearch:8.17.2 container_name: index environment: - cluster.name=docker-cluster @@ -44,11 +45,9 @@ services: start_period: 1m restart: unless-stopped vector-database: - image: chromadb/chroma:0.6.3 # chromadb version that gets installed with langchain-chroma==0.2.2 - ports: - - 8001:8000 - volumes: - - chroma_data:/chroma/chroma + image: chromadb/chroma:0.6.3 # chromadb version that gets installed with langchain-chroma==0.2.2 + ports: [8001:8000] + volumes: [chroma_data:/chroma/chroma] healthcheck: test: curl -f http://localhost:8000/api/v2/heartbeat || exit 1 restart: unless-stopped @@ -58,13 +57,11 @@ services: dockerfile: Dockerfile container_name: api env_file: [.env.docker] - command: ["/app/start-server-dev.sh"] - volumes: - - .:/app - - $HOME/.config/pydata:$HOME/.config/pydata + command: [/app/start-server-dev.sh] + volumes: [.:/app, $HOME/.config/pydata:$HOME/.config/pydata] ports: - - "8000:8000" # Porta da api - - "5678:5678" # Porta de debug + - 8000:8000 # Porta da api + - 5678:5678 # Porta de debug depends_on: index: condition: service_healthy @@ -81,7 +78,6 @@ services: retries: 5 start_period: 30s restart: unless-stopped - volumes: esdata: pgdata: diff --git a/docker-compose.yaml b/docker-compose.yaml index 8360f37d..3ec7bbba 100644 --- a/docker-compose.yaml +++ b/docker-compose.yaml @@ -1,6 +1,7 @@ +--- services: index: - image: elasticsearch:8.17.2 # TODO : upgrade prod to elasticsearch:8.17.2 + image: elasticsearch:8.17.2 # TODO : upgrade prod to elasticsearch:8.17.2 container_name: index environment: - cluster.name=docker-cluster @@ -44,11 +45,9 @@ services: start_period: 1m restart: unless-stopped vector-database: - image: chromadb/chroma:0.6.3 # chromadb version that gets installed with langchain-chroma==0.2.2 - ports: - - 8001:8000 - volumes: - - chroma_data:/chroma/chroma + image: chromadb/chroma:0.6.3 # chromadb version that gets installed with langchain-chroma==0.2.2 + ports: [8001:8000] + volumes: [chroma_data:/chroma/chroma] healthcheck: test: curl -f http://localhost:8000/api/v2/heartbeat || exit 1 restart: unless-stopped @@ -58,12 +57,10 @@ services: dockerfile: Dockerfile container_name: api env_file: [.env.docker] - volumes: - - .:/app - - $HOME/.config/pydata:$HOME/.config/pydata + volumes: [.:/app, $HOME/.config/pydata:$HOME/.config/pydata] ports: - - "8000:8000" # Porta da api - - "5678:5678" # Porta de debug + - 8000:8000 # Porta da api + - 5678:5678 # Porta de debug depends_on: index: condition: service_healthy @@ -80,7 +77,6 @@ services: retries: 5 start_period: 30s restart: unless-stopped - volumes: esdata: pgdata: From 4bb0b82e0ddb59f40c4cf166a3e39c2d5986b5b5 Mon Sep 17 00:00:00 2001 From: Luiz Eduardo Date: Tue, 3 Jun 2025 16:13:38 -0300 Subject: [PATCH 093/181] build: increase dev ram memory limit to 1536Mi (k8s) (#833) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Este pull request visa aumentar o limite de memória (RAM) do pod dev no Google Kubernetes Engine (GKE) para acomodar as novas aplicações. Com a adição dessas aplicações, observou-se a necessidade de mais recursos para garantir o desempenho adequado e evitar problemas como falhas devido a consumo excessivo de memória. --- .github/workflows/deploy-dev.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/deploy-dev.yaml b/.github/workflows/deploy-dev.yaml index a921f7cb..25247317 100644 --- a/.github/workflows/deploy-dev.yaml +++ b/.github/workflows/deploy-dev.yaml @@ -56,7 +56,7 @@ jobs: resources: limits: cpu: 500m - memory: 1Gi + memory: 1536Mi requests: cpu: 250m memory: 500Mi From 99d578e02e5d5a9804011d214c18af92ae136d17 Mon Sep 17 00:00:00 2001 From: Luiz Eduardo Date: Tue, 3 Jun 2025 16:24:37 -0300 Subject: [PATCH 094/181] =?UTF-8?q?fix:=20mudar=20nome=20do=20bot=C3=A3o?= =?UTF-8?q?=20Ferramentas=20para=20Importar=20Colunas=20(#834)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Renomeando botão que anteriormente se chamada "Ferramentas" para " Importar Colunas" --- backend/templates/admin/change_form.html | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/backend/templates/admin/change_form.html b/backend/templates/admin/change_form.html index c559e2ea..da4eed7c 100644 --- a/backend/templates/admin/change_form.html +++ b/backend/templates/admin/change_form.html @@ -4,7 +4,7 @@ {% if opts.model_name == 'table' %} {% endif %} @@ -15,7 +15,7 @@