From cbdaf354bcc45b4664b537b81b693f2594b4843a Mon Sep 17 00:00:00 2001 From: Val Brodsky Date: Thu, 12 Sep 2024 10:12:58 -0700 Subject: [PATCH 1/3] Followup update for the feature schema test --- libs/labelbox/tests/integration/test_feature_schema.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/libs/labelbox/tests/integration/test_feature_schema.py b/libs/labelbox/tests/integration/test_feature_schema.py index 1dc940f08..d593ba9d6 100644 --- a/libs/labelbox/tests/integration/test_feature_schema.py +++ b/libs/labelbox/tests/integration/test_feature_schema.py @@ -107,9 +107,15 @@ def test_updates_a_feature_schema(client, feature_schema): assert updated_feature_schema.normalized["name"] == "new name" +<<<<<<< HEAD def test_does_not_include_used_feature_schema(client): tool = client.upsert_feature_schema(point.asdict()) feature_schema_id = tool.normalized["featureSchemaId"] +======= +def test_does_not_include_used_feature_schema(client, feature_schema): + tool = feature_schema + feature_schema_id = tool.normalized['featureSchemaId'] +>>>>>>> b54a37e7 (Followup update for the feature schema test) ontology = client.create_ontology_from_feature_schemas( name="ontology name", feature_schema_ids=[feature_schema_id], @@ -120,4 +126,3 @@ def test_does_not_include_used_feature_schema(client): assert feature_schema_id not in unused_feature_schemas client.delete_unused_ontology(ontology.uid) - client.delete_unused_feature_schema(feature_schema_id) From cc423a60121e3ad76a0d8e564d51f9b339b2d0f9 Mon Sep 17 00:00:00 2001 From: Val Brodsky Date: Thu, 12 Sep 2024 10:13:55 -0700 Subject: [PATCH 2/3] Fix to delete feature schema resources Refactor teardown handling into a separate helper Removed unused fixture --- libs/labelbox/tests/conftest.py | 195 ++++++++++++------ .../tests/data/annotation_import/conftest.py | 55 ++--- .../data/annotation_import/test_model_run.py | 19 +- libs/labelbox/tests/data/export/conftest.py | 20 +- .../tests/data/test_data_row_metadata.py | 15 -- libs/labelbox/tests/integration/conftest.py | 15 +- .../tests/integration/test_feature_schema.py | 22 +- 7 files changed, 181 insertions(+), 160 deletions(-) diff --git a/libs/labelbox/tests/conftest.py b/libs/labelbox/tests/conftest.py index 446db396b..7968e93e4 100644 --- a/libs/labelbox/tests/conftest.py +++ b/libs/labelbox/tests/conftest.py @@ -7,7 +7,9 @@ import re import uuid import time +from labelbox.schema.project import Project import requests +from labelbox.schema.ontology import Ontology import pytest from types import SimpleNamespace from typing import Type @@ -23,21 +25,11 @@ from labelbox.schema.queue_mode import QueueMode from labelbox import Client -from labelbox import Dataset, DataRow from labelbox import LabelingFrontend -from labelbox import OntologyBuilder, Tool, Option, Classification, MediaType -from labelbox.orm import query -from labelbox.pagination import PaginatedCollection +from labelbox import OntologyBuilder, Tool, Option, Classification from labelbox.schema.annotation_import import LabelImport -from labelbox.schema.catalog import Catalog from labelbox.schema.enums import AnnotationImportState -from labelbox.schema.invite import Invite -from labelbox.schema.quality_mode import QualityMode -from labelbox.schema.queue_mode import QueueMode -from labelbox.schema.user import User from labelbox.exceptions import LabelboxError -from contextlib import suppress -from labelbox import Client IMG_URL = "https://picsum.photos/200/300.jpg" MASKABLE_IMG_URL = "https://storage.googleapis.com/labelbox-datasets/image_sample_data/2560px-Kitano_Street_Kobe01s5s4110.jpeg" @@ -635,20 +627,17 @@ def organization(client): @pytest.fixture -def configured_project_with_label( - client, - rand_gen, - image_url, - project, - dataset, - data_row, - wait_for_label_processing, -): +def configured_project_with_label(client, rand_gen, dataset, data_row, + wait_for_label_processing, teardown_helpers): """Project with a connected dataset, having one datarow + Project contains an ontology with 1 bbox tool Additionally includes a create_label method for any needed extra labels One label is already created and yielded when using fixture """ + project = client.create_project(name=rand_gen(str), + queue_mode=QueueMode.Batch, + media_type=MediaType.Image) project._wait_until_data_rows_are_processed( data_row_ids=[data_row.uid], wait_processing_max_seconds=DATA_ROW_PROCESSING_WAIT_TIMEOUT_SECONDS, @@ -666,8 +655,7 @@ def configured_project_with_label( ) yield [project, dataset, data_row, label] - for label in project.labels(): - label.delete() + teardown_helpers.teardown_project_labels_ontology_feature_schemas(project) def _create_label(project, data_row, ontology, wait_for_label_processing): @@ -735,14 +723,17 @@ def big_dataset(dataset: Dataset): @pytest.fixture -def configured_batch_project_with_label( - project, dataset, data_row, wait_for_label_processing -): +def configured_batch_project_with_label(client, dataset, data_row, + wait_for_label_processing, rand_gen, + teardown_helpers): """Project with a batch having one datarow Project contains an ontology with 1 bbox tool Additionally includes a create_label method for any needed extra labels One label is already created and yielded when using fixture """ + project = client.create_project(name=rand_gen(str), + queue_mode=QueueMode.Batch, + media_type=MediaType.Image) data_rows = [dr.uid for dr in list(dataset.data_rows())] project._wait_until_data_rows_are_processed( data_row_ids=data_rows, sleep_interval=3 @@ -757,18 +748,20 @@ def configured_batch_project_with_label( yield [project, dataset, data_row, label] - for label in project.labels(): - label.delete() + teardown_helpers.teardown_project_labels_ontology_feature_schemas(project) @pytest.fixture -def configured_batch_project_with_multiple_datarows( - project, dataset, data_rows, wait_for_label_processing -): +def configured_batch_project_with_multiple_datarows(client, dataset, data_rows, + wait_for_label_processing, + rand_gen, teardown_helpers): """Project with a batch having multiple datarows Project contains an ontology with 1 bbox tool Additionally includes a create_label method for any needed extra labels """ + project = client.create_project(name=rand_gen(str), + queue_mode=QueueMode.Batch, + media_type=MediaType.Image) global_keys = [dr.global_key for dr in data_rows] batch_name = f"batch {uuid.uuid4()}" @@ -780,26 +773,7 @@ def configured_batch_project_with_multiple_datarows( yield [project, dataset, data_rows] - for label in project.labels(): - label.delete() - - -@pytest.fixture -def configured_batch_project_for_labeling_service( - project, data_row_and_global_key -): - """Project with a batch having multiple datarows - Project contains an ontology with 1 bbox tool - Additionally includes a create_label method for any needed extra labels - """ - global_keys = [data_row_and_global_key[1]] - - batch_name = f"batch {uuid.uuid4()}" - project.create_batch(batch_name, global_keys=global_keys) - - _setup_ontology(project) - - yield project + teardown_helpers.teardown_project_labels_ontology_feature_schemas(project) # NOTE this is nice heuristics, also there is this logic _wait_until_data_rows_are_processed in Project @@ -1061,14 +1035,11 @@ def project_with_empty_ontology(project): @pytest.fixture -def configured_project_with_complex_ontology( - client, initial_dataset, rand_gen, image_url -): - project = client.create_project( - name=rand_gen(str), - queue_mode=QueueMode.Batch, - media_type=MediaType.Image, - ) +def configured_project_with_complex_ontology(client, initial_dataset, rand_gen, + image_url, teardown_helpers): + project = client.create_project(name=rand_gen(str), + queue_mode=QueueMode.Batch, + media_type=MediaType.Image) dataset = initial_dataset data_row = dataset.create_data_row(row_data=image_url) data_row_ids = [data_row.uid] @@ -1127,7 +1098,7 @@ def configured_project_with_complex_ontology( project.setup(editor, ontology.asdict()) yield [project, data_row] - project.delete() + teardown_helpers.teardown_project_labels_ontology_feature_schemas(project) @pytest.fixture @@ -1146,13 +1117,13 @@ def valid_model_id(): @pytest.fixture -def requested_labeling_service( - rand_gen, - live_chat_evaluation_project_with_new_dataset, - chat_evaluation_ontology, - model_config, -): - project = live_chat_evaluation_project_with_new_dataset +def requested_labeling_service(rand_gen, client, chat_evaluation_ontology, + model_config, teardown_helpers): + project_name = f"test-model-evaluation-project-{rand_gen(str)}" + dataset_name = f"test-model-evaluation-dataset-{rand_gen(str)}" + project = client.create_model_evaluation_project(name=project_name, + dataset_name=dataset_name, + data_row_count=1) project.connect_ontology(chat_evaluation_ontology) project.upsert_instructions("tests/integration/media/sample_pdf.pdf") @@ -1164,3 +1135,95 @@ def requested_labeling_service( labeling_service.request() yield project, project.get_labeling_service() + + teardown_helpers.teardown_project_labels_ontology_feature_schemas(project) + + +class TearDownHelpers: + + @staticmethod + def teardown_project_labels_ontology_feature_schemas(project: Project): + """ + Call this function to release project, labels, ontology and feature schemas in fixture teardown + + NOTE: exception handling is not required as this is a fixture teardown + """ + ontology = project.ontology() + ontology_id = ontology.uid + client = project.client + classification_feature_schema_ids = [ + feature["featureSchemaId"] + for feature in ontology.normalized["classifications"] + ] + tool_feature_schema_ids = [ + feature["featureSchemaId"] for feature in ontology.normalized["tools"] + ] + + feature_schema_ids = classification_feature_schema_ids + tool_feature_schema_ids + labels = list(project.labels()) + for label in labels: + label.delete() + + project.delete() + client.delete_unused_ontology(ontology_id) + for feature_schema_id in feature_schema_ids: + try: + project.client.delete_unused_feature_schema(feature_schema_id) + except LabelboxError as e: + print( + f"Failed to delete feature schema {feature_schema_id}: {e}") + + @staticmethod + def teardown_ontology_feature_schemas(ontology: Ontology): + """ + Call this function to release project, labels, ontology and feature schemas in fixture teardown + + NOTE: exception handling is not required as this is a fixture teardown + """ + ontology_id = ontology.uid + client = ontology.client + classification_feature_schema_ids = [ + feature["featureSchemaId"] + for feature in ontology.normalized["classifications"] + ] + [ + option["featureSchemaId"] + for feature in ontology.normalized["classifications"] + for option in feature.get("options", []) + ] + + tool_feature_schema_ids = [ + feature["featureSchemaId"] for feature in ontology.normalized["tools"] + ] + [ + classification["featureSchemaId"] + for tool in ontology.normalized["tools"] + for classification in tool.get("classifications", []) + ] + [ + option["featureSchemaId"] + for tool in ontology.normalized["tools"] + for classification in tool.get("classifications", []) + for option in classification.get("options", []) + ] + + feature_schema_ids = classification_feature_schema_ids + tool_feature_schema_ids + + client.delete_unused_ontology(ontology_id) + for feature_schema_id in feature_schema_ids: + try: + project.client.delete_unused_feature_schema(feature_schema_id) + except LabelboxError as e: + print( + f"Failed to delete feature schema {feature_schema_id}: {e}") + + +class ModuleTearDownHelpers(TearDownHelpers): + ... + + +@pytest.fixture +def teardown_helpers(): + return TearDownHelpers() + + +@pytest.fixture(scope='module') +def module_teardown_helpers(): + return TearDownHelpers() diff --git a/libs/labelbox/tests/data/annotation_import/conftest.py b/libs/labelbox/tests/data/annotation_import/conftest.py index 39cede0bb..4b88a5499 100644 --- a/libs/labelbox/tests/data/annotation_import/conftest.py +++ b/libs/labelbox/tests/data/annotation_import/conftest.py @@ -1,4 +1,3 @@ -import itertools import uuid from labelbox.schema.model_run import ModelRun @@ -14,8 +13,6 @@ from typing import Tuple, Type from labelbox.schema.annotation_import import LabelImport, AnnotationImportState from pytest import FixtureRequest -from contextlib import suppress - """ The main fixtures of this library are configured_project and configured_project_by_global_key. Both fixtures generate data rows with a parametrize media type. They create the amount of data rows equal to the DATA_ROW_COUNT variable below. The data rows are generated with a factory fixture that returns a function that allows you to pass a global key. The ontologies are generated normalized and based on the MediaType given (i.e. only features supported by MediaType are created). This ontology is later used to obtain the correct annotations with the prediction_id_mapping and corresponding inferences. Each data row will have all possible annotations attached supported for the MediaType. """ @@ -719,7 +716,6 @@ def _create_project( ) project.connect_ontology(ontology) - data_row_data = [] for _ in range(DATA_ROW_COUNT): @@ -744,15 +740,12 @@ def _create_project( @pytest.fixture -def configured_project( - client: Client, - rand_gen, - data_row_json_by_media_type, - request: FixtureRequest, - normalized_ontology_by_media_type, - export_v2_test_helpers, - llm_prompt_response_creation_dataset_with_data_row, -): +def configured_project(client: Client, rand_gen, data_row_json_by_media_type, + request: FixtureRequest, + normalized_ontology_by_media_type, + export_v2_test_helpers, + llm_prompt_response_creation_dataset_with_data_row, + teardown_helpers): """Configure project for test. Request.param will contain the media type if not present will use Image MediaType. The project will have 10 data rows.""" media_type = getattr(request, "param", MediaType.Image) @@ -789,23 +782,18 @@ def configured_project( yield project - project.delete() + teardown_helpers.teardown_project_labels_ontology_feature_schemas(project) if dataset: dataset.delete() - client.delete_unused_ontology(ontology.uid) - @pytest.fixture() -def configured_project_by_global_key( - client: Client, - rand_gen, - data_row_json_by_media_type, - request: FixtureRequest, - normalized_ontology_by_media_type, - export_v2_test_helpers, -): +def configured_project_by_global_key(client: Client, rand_gen, + data_row_json_by_media_type, + request: FixtureRequest, + normalized_ontology_by_media_type, + export_v2_test_helpers, teardown_helpers): """Does the same thing as configured project but with global keys focus.""" media_type = getattr(request, "param", MediaType.Image) @@ -841,22 +829,16 @@ def configured_project_by_global_key( yield project - project.delete() + teardown_helpers.teardown_project_labels_ontology_feature_schemas(project) if dataset: dataset.delete() - client.delete_unused_ontology(ontology.uid) - @pytest.fixture(scope="module") -def module_project( - client: Client, - rand_gen, - data_row_json_by_media_type, - request: FixtureRequest, - normalized_ontology_by_media_type, -): +def module_project(client: Client, rand_gen, data_row_json_by_media_type, + request: FixtureRequest, normalized_ontology_by_media_type, + module_teardown_helpers): """Generates a image project that scopes to the test module(file). Used to reduce api calls.""" media_type = getattr(request, "param", MediaType.Image) @@ -889,13 +871,12 @@ def module_project( yield project - project.delete() + module_teardown_helpers.teardown_project_labels_ontology_feature_schemas( + project) if dataset: dataset.delete() - client.delete_unused_ontology(ontology.uid) - @pytest.fixture def prediction_id_mapping(request, normalized_ontology_by_media_type): diff --git a/libs/labelbox/tests/data/annotation_import/test_model_run.py b/libs/labelbox/tests/data/annotation_import/test_model_run.py index 9eca28429..54eb94d27 100644 --- a/libs/labelbox/tests/data/annotation_import/test_model_run.py +++ b/libs/labelbox/tests/data/annotation_import/test_model_run.py @@ -7,13 +7,22 @@ from labelbox import DataSplit, ModelRun -@pytest.mark.order(1) -def test_model_run(client, configured_project_with_label, data_row, rand_gen): +@pytest.fixture +def current_model(client, configured_project_with_label, rand_gen): project, _, _, label = configured_project_with_label - label_id = label.uid ontology = project.ontology() - data = {"name": rand_gen(str), "ontology_id": ontology.uid} - model = client.create_model(data["name"], data["ontology_id"]) + + model = client.create_model(rand_gen(str), ontology.uid) + yield model + + model.delete() + + +def test_model_run(client, configured_project_with_label, current_model, + data_row, rand_gen): + _, _, _, label = configured_project_with_label + label_id = label.uid + model = current_model name = rand_gen(str) config = {"batch_size": 100, "reruns": None} diff --git a/libs/labelbox/tests/data/export/conftest.py b/libs/labelbox/tests/data/export/conftest.py index 0836c2b9e..e527e7e94 100644 --- a/libs/labelbox/tests/data/export/conftest.py +++ b/libs/labelbox/tests/data/export/conftest.py @@ -241,9 +241,8 @@ def polygon_inference(prediction_id_mapping): @pytest.fixture -def configured_project_with_ontology( - client, initial_dataset, ontology, rand_gen, image_url -): +def configured_project_with_ontology(client, initial_dataset, ontology, + rand_gen, image_url, teardown_helpers): dataset = initial_dataset project = client.create_project( name=rand_gen(str), @@ -264,22 +263,21 @@ def configured_project_with_ontology( ) project.data_row_ids = data_row_ids yield project - project.delete() + teardown_helpers.teardown_project_labels_ontology_feature_schemas(project) @pytest.fixture -def configured_project_without_data_rows(client, ontology, rand_gen): - project = client.create_project( - name=rand_gen(str), - description=rand_gen(str), - queue_mode=QueueMode.Batch, - ) +def configured_project_without_data_rows(client, ontology, rand_gen, + teardown_helpers): + project = client.create_project(name=rand_gen(str), + description=rand_gen(str), + queue_mode=QueueMode.Batch) editor = list( client.get_labeling_frontends(where=LabelingFrontend.name == "editor") )[0] project.setup(editor, ontology) yield project - project.delete() + teardown_helpers.teardown_project_labels_ontology_feature_schemas(project) @pytest.fixture diff --git a/libs/labelbox/tests/data/test_data_row_metadata.py b/libs/labelbox/tests/data/test_data_row_metadata.py index 9a3690776..891cab9be 100644 --- a/libs/labelbox/tests/data/test_data_row_metadata.py +++ b/libs/labelbox/tests/data/test_data_row_metadata.py @@ -92,21 +92,6 @@ def make_named_metadata(dr_id) -> DataRowMetadata: return metadata -@pytest.mark.skip(reason="broken export v1 api, to be retired soon") -def test_export_empty_metadata( - client, configured_project_with_label, wait_for_data_row_processing -): - project, _, data_row, _ = configured_project_with_label - data_row = wait_for_data_row_processing(client, data_row) - - export_task = project.export(params={"metadata_fields": True}) - export_task.wait_till_done() - stream = export_task.get_buffered_stream() - data_row = [data_row.json for data_row in stream][0] - - assert data_row["metadata_fields"] == [] - - def test_bulk_export_datarow_metadata(data_row, mdo: DataRowMetadataOntology): metadata = make_metadata(data_row.uid) mdo.bulk_upsert([metadata]) diff --git a/libs/labelbox/tests/integration/conftest.py b/libs/labelbox/tests/integration/conftest.py index d37287fe8..836b916da 100644 --- a/libs/labelbox/tests/integration/conftest.py +++ b/libs/labelbox/tests/integration/conftest.py @@ -112,14 +112,11 @@ def configured_project( @pytest.fixture -def configured_project_with_complex_ontology( - client, initial_dataset, rand_gen, image_url -): - project = client.create_project( - name=rand_gen(str), - queue_mode=QueueMode.Batch, - media_type=MediaType.Image, - ) +def configured_project_with_complex_ontology(client, initial_dataset, rand_gen, + image_url, teardown_helpers): + project = client.create_project(name=rand_gen(str), + queue_mode=QueueMode.Batch, + media_type=MediaType.Image) dataset = initial_dataset data_row = dataset.create_data_row(row_data=image_url) data_row_ids = [data_row.uid] @@ -178,7 +175,7 @@ def configured_project_with_complex_ontology( project.setup(editor, ontology.asdict()) yield [project, data_row] - project.delete() + teardown_helpers.teardown_project_labels_ontology_feature_schemas(project) @pytest.fixture diff --git a/libs/labelbox/tests/integration/test_feature_schema.py b/libs/labelbox/tests/integration/test_feature_schema.py index d593ba9d6..61532ed37 100644 --- a/libs/labelbox/tests/integration/test_feature_schema.py +++ b/libs/labelbox/tests/integration/test_feature_schema.py @@ -58,9 +58,8 @@ def test_throws_an_error_if_feature_schema_to_delete_doesnt_exist(client): client.delete_unused_feature_schema("doesntexist") -def test_updates_a_feature_schema_title(client): - tool = client.upsert_feature_schema(point.asdict()) - feature_schema_id = tool.normalized["featureSchemaId"] +def test_updates_a_feature_schema_title(client, feature_schema): + feature_schema_id = feature_schema.normalized['featureSchemaId'] new_title = "new title" updated_feature_schema = client.update_feature_schema_title( feature_schema_id, new_title @@ -68,20 +67,15 @@ def test_updates_a_feature_schema_title(client): assert updated_feature_schema.normalized["name"] == new_title - client.delete_unused_feature_schema(feature_schema_id) - def test_throws_an_error_when_updating_a_feature_schema_with_empty_title( - client, -): - tool = client.upsert_feature_schema(point.asdict()) - feature_schema_id = tool.normalized["featureSchemaId"] + client, feature_schema): + tool = feature_schema + feature_schema_id = tool.normalized['featureSchemaId'] with pytest.raises(Exception): client.update_feature_schema_title(feature_schema_id, "") - client.delete_unused_feature_schema(feature_schema_id) - def test_throws_an_error_when_updating_not_existing_feature_schema(client): with pytest.raises(Exception): @@ -107,15 +101,9 @@ def test_updates_a_feature_schema(client, feature_schema): assert updated_feature_schema.normalized["name"] == "new name" -<<<<<<< HEAD -def test_does_not_include_used_feature_schema(client): - tool = client.upsert_feature_schema(point.asdict()) - feature_schema_id = tool.normalized["featureSchemaId"] -======= def test_does_not_include_used_feature_schema(client, feature_schema): tool = feature_schema feature_schema_id = tool.normalized['featureSchemaId'] ->>>>>>> b54a37e7 (Followup update for the feature schema test) ontology = client.create_ontology_from_feature_schemas( name="ontology name", feature_schema_ids=[feature_schema_id], From 46e6f82a8dec38b13e71be51f3a566eda2f37705 Mon Sep 17 00:00:00 2001 From: Val Brodsky Date: Mon, 16 Sep 2024 13:11:22 -0700 Subject: [PATCH 3/3] Rye fmt fixes --- .../labelbox/schema/bulk_import_request.py | 8 +- .../schema/labeling_service_dashboard.py | 38 +++-- libs/labelbox/tests/conftest.py | 132 +++++++++++------- .../tests/data/annotation_import/conftest.py | 45 ++++-- .../data/annotation_import/test_model_run.py | 5 +- libs/labelbox/tests/data/export/conftest.py | 19 +-- libs/labelbox/tests/integration/conftest.py | 13 +- .../tests/integration/test_feature_schema.py | 9 +- .../unit/test_labeling_service_dashboard.py | 102 +++++++------- 9 files changed, 219 insertions(+), 152 deletions(-) diff --git a/libs/labelbox/src/labelbox/schema/bulk_import_request.py b/libs/labelbox/src/labelbox/schema/bulk_import_request.py index 44ac7cd6a..8e11f3261 100644 --- a/libs/labelbox/src/labelbox/schema/bulk_import_request.py +++ b/libs/labelbox/src/labelbox/schema/bulk_import_request.py @@ -787,9 +787,7 @@ def validate_feature_schemas( # A union with custom construction logic to improve error messages class NDClassification( SpecialUnion, - Type[ # type: ignore - Union[NDText, NDRadio, NDChecklist] - ], + Type[Union[NDText, NDRadio, NDChecklist]], # type: ignore ): ... @@ -979,9 +977,7 @@ class NDTool( class NDAnnotation( SpecialUnion, - Type[ # type: ignore - Union[NDTool, NDClassification] - ], + Type[Union[NDTool, NDClassification]], # type: ignore ): @classmethod def build(cls: Any, data) -> "NDBase": diff --git a/libs/labelbox/src/labelbox/schema/labeling_service_dashboard.py b/libs/labelbox/src/labelbox/schema/labeling_service_dashboard.py index 2052897f6..c5e1fa11e 100644 --- a/libs/labelbox/src/labelbox/schema/labeling_service_dashboard.py +++ b/libs/labelbox/src/labelbox/schema/labeling_service_dashboard.py @@ -84,7 +84,8 @@ def __init__(self, **kwargs): super().__init__(**kwargs) if not self.client.enable_experimental: raise RuntimeError( - "Please enable experimental in client to use LabelingService") + "Please enable experimental in client to use LabelingService" + ) @property def service_type(self): @@ -97,20 +98,28 @@ def service_type(self): if self.editor_task_type is None: return sentence_case(self.media_type.value) - if (self.editor_task_type == EditorTaskType.OfflineModelChatEvaluation - and self.media_type == MediaType.Conversational): + if ( + self.editor_task_type == EditorTaskType.OfflineModelChatEvaluation + and self.media_type == MediaType.Conversational + ): return "Offline chat evaluation" - if (self.editor_task_type == EditorTaskType.ModelChatEvaluation and - self.media_type == MediaType.Conversational): + if ( + self.editor_task_type == EditorTaskType.ModelChatEvaluation + and self.media_type == MediaType.Conversational + ): return "Live chat evaluation" - if (self.editor_task_type == EditorTaskType.ResponseCreation and - self.media_type == MediaType.Text): + if ( + self.editor_task_type == EditorTaskType.ResponseCreation + and self.media_type == MediaType.Text + ): return "Response creation" - if (self.media_type == MediaType.LLMPromptCreation or - self.media_type == MediaType.LLMPromptResponseCreation): + if ( + self.media_type == MediaType.LLMPromptCreation + or self.media_type == MediaType.LLMPromptResponseCreation + ): return "Prompt response creation" return sentence_case(self.media_type.value) @@ -154,7 +163,8 @@ def get_all( pageInfo { endCursor } } } - """) + """ + ) else: template = Template( """query SearchProjectsPyApi($$first: Int, $$from: String) { @@ -164,11 +174,13 @@ def get_all( pageInfo { endCursor } } } - """) + """ + ) query_str = template.substitute( labeling_dashboard_selections=GRAPHQL_QUERY_SELECTIONS, search_query=build_search_filter(search_query) - if search_query else None, + if search_query + else None, ) params: Dict[str, Union[str, int]] = {} @@ -186,7 +198,7 @@ def convert_to_labeling_service_dashboard(client, data): experimental=True, ) - @model_validator(mode='before') + @model_validator(mode="before") def convert_boost_data(cls, data): if "boostStatus" in data: data["status"] = LabelingServiceStatus(data.pop("boostStatus")) diff --git a/libs/labelbox/tests/conftest.py b/libs/labelbox/tests/conftest.py index 7968e93e4..6d13a8d83 100644 --- a/libs/labelbox/tests/conftest.py +++ b/libs/labelbox/tests/conftest.py @@ -627,17 +627,25 @@ def organization(client): @pytest.fixture -def configured_project_with_label(client, rand_gen, dataset, data_row, - wait_for_label_processing, teardown_helpers): +def configured_project_with_label( + client, + rand_gen, + dataset, + data_row, + wait_for_label_processing, + teardown_helpers, +): """Project with a connected dataset, having one datarow Project contains an ontology with 1 bbox tool Additionally includes a create_label method for any needed extra labels One label is already created and yielded when using fixture """ - project = client.create_project(name=rand_gen(str), - queue_mode=QueueMode.Batch, - media_type=MediaType.Image) + project = client.create_project( + name=rand_gen(str), + queue_mode=QueueMode.Batch, + media_type=MediaType.Image, + ) project._wait_until_data_rows_are_processed( data_row_ids=[data_row.uid], wait_processing_max_seconds=DATA_ROW_PROCESSING_WAIT_TIMEOUT_SECONDS, @@ -723,17 +731,24 @@ def big_dataset(dataset: Dataset): @pytest.fixture -def configured_batch_project_with_label(client, dataset, data_row, - wait_for_label_processing, rand_gen, - teardown_helpers): +def configured_batch_project_with_label( + client, + dataset, + data_row, + wait_for_label_processing, + rand_gen, + teardown_helpers, +): """Project with a batch having one datarow Project contains an ontology with 1 bbox tool Additionally includes a create_label method for any needed extra labels One label is already created and yielded when using fixture """ - project = client.create_project(name=rand_gen(str), - queue_mode=QueueMode.Batch, - media_type=MediaType.Image) + project = client.create_project( + name=rand_gen(str), + queue_mode=QueueMode.Batch, + media_type=MediaType.Image, + ) data_rows = [dr.uid for dr in list(dataset.data_rows())] project._wait_until_data_rows_are_processed( data_row_ids=data_rows, sleep_interval=3 @@ -752,16 +767,23 @@ def configured_batch_project_with_label(client, dataset, data_row, @pytest.fixture -def configured_batch_project_with_multiple_datarows(client, dataset, data_rows, - wait_for_label_processing, - rand_gen, teardown_helpers): +def configured_batch_project_with_multiple_datarows( + client, + dataset, + data_rows, + wait_for_label_processing, + rand_gen, + teardown_helpers, +): """Project with a batch having multiple datarows Project contains an ontology with 1 bbox tool Additionally includes a create_label method for any needed extra labels """ - project = client.create_project(name=rand_gen(str), - queue_mode=QueueMode.Batch, - media_type=MediaType.Image) + project = client.create_project( + name=rand_gen(str), + queue_mode=QueueMode.Batch, + media_type=MediaType.Image, + ) global_keys = [dr.global_key for dr in data_rows] batch_name = f"batch {uuid.uuid4()}" @@ -1035,11 +1057,14 @@ def project_with_empty_ontology(project): @pytest.fixture -def configured_project_with_complex_ontology(client, initial_dataset, rand_gen, - image_url, teardown_helpers): - project = client.create_project(name=rand_gen(str), - queue_mode=QueueMode.Batch, - media_type=MediaType.Image) +def configured_project_with_complex_ontology( + client, initial_dataset, rand_gen, image_url, teardown_helpers +): + project = client.create_project( + name=rand_gen(str), + queue_mode=QueueMode.Batch, + media_type=MediaType.Image, + ) dataset = initial_dataset data_row = dataset.create_data_row(row_data=image_url) data_row_ids = [data_row.uid] @@ -1117,13 +1142,14 @@ def valid_model_id(): @pytest.fixture -def requested_labeling_service(rand_gen, client, chat_evaluation_ontology, - model_config, teardown_helpers): +def requested_labeling_service( + rand_gen, client, chat_evaluation_ontology, model_config, teardown_helpers +): project_name = f"test-model-evaluation-project-{rand_gen(str)}" dataset_name = f"test-model-evaluation-dataset-{rand_gen(str)}" - project = client.create_model_evaluation_project(name=project_name, - dataset_name=dataset_name, - data_row_count=1) + project = client.create_model_evaluation_project( + name=project_name, dataset_name=dataset_name, data_row_count=1 + ) project.connect_ontology(chat_evaluation_ontology) project.upsert_instructions("tests/integration/media/sample_pdf.pdf") @@ -1140,7 +1166,6 @@ def requested_labeling_service(rand_gen, client, chat_evaluation_ontology, class TearDownHelpers: - @staticmethod def teardown_project_labels_ontology_feature_schemas(project: Project): """ @@ -1156,10 +1181,13 @@ def teardown_project_labels_ontology_feature_schemas(project: Project): for feature in ontology.normalized["classifications"] ] tool_feature_schema_ids = [ - feature["featureSchemaId"] for feature in ontology.normalized["tools"] + feature["featureSchemaId"] + for feature in ontology.normalized["tools"] ] - feature_schema_ids = classification_feature_schema_ids + tool_feature_schema_ids + feature_schema_ids = ( + classification_feature_schema_ids + tool_feature_schema_ids + ) labels = list(project.labels()) for label in labels: label.delete() @@ -1171,7 +1199,8 @@ def teardown_project_labels_ontology_feature_schemas(project: Project): project.client.delete_unused_feature_schema(feature_schema_id) except LabelboxError as e: print( - f"Failed to delete feature schema {feature_schema_id}: {e}") + f"Failed to delete feature schema {feature_schema_id}: {e}" + ) @staticmethod def teardown_ontology_feature_schemas(ontology: Ontology): @@ -1191,20 +1220,27 @@ def teardown_ontology_feature_schemas(ontology: Ontology): for option in feature.get("options", []) ] - tool_feature_schema_ids = [ - feature["featureSchemaId"] for feature in ontology.normalized["tools"] - ] + [ - classification["featureSchemaId"] - for tool in ontology.normalized["tools"] - for classification in tool.get("classifications", []) - ] + [ - option["featureSchemaId"] - for tool in ontology.normalized["tools"] - for classification in tool.get("classifications", []) - for option in classification.get("options", []) - ] + tool_feature_schema_ids = ( + [ + feature["featureSchemaId"] + for feature in ontology.normalized["tools"] + ] + + [ + classification["featureSchemaId"] + for tool in ontology.normalized["tools"] + for classification in tool.get("classifications", []) + ] + + [ + option["featureSchemaId"] + for tool in ontology.normalized["tools"] + for classification in tool.get("classifications", []) + for option in classification.get("options", []) + ] + ) - feature_schema_ids = classification_feature_schema_ids + tool_feature_schema_ids + feature_schema_ids = ( + classification_feature_schema_ids + tool_feature_schema_ids + ) client.delete_unused_ontology(ontology_id) for feature_schema_id in feature_schema_ids: @@ -1212,11 +1248,11 @@ def teardown_ontology_feature_schemas(ontology: Ontology): project.client.delete_unused_feature_schema(feature_schema_id) except LabelboxError as e: print( - f"Failed to delete feature schema {feature_schema_id}: {e}") + f"Failed to delete feature schema {feature_schema_id}: {e}" + ) -class ModuleTearDownHelpers(TearDownHelpers): - ... +class ModuleTearDownHelpers(TearDownHelpers): ... @pytest.fixture @@ -1224,6 +1260,6 @@ def teardown_helpers(): return TearDownHelpers() -@pytest.fixture(scope='module') +@pytest.fixture(scope="module") def module_teardown_helpers(): return TearDownHelpers() diff --git a/libs/labelbox/tests/data/annotation_import/conftest.py b/libs/labelbox/tests/data/annotation_import/conftest.py index 4b88a5499..6543f54bf 100644 --- a/libs/labelbox/tests/data/annotation_import/conftest.py +++ b/libs/labelbox/tests/data/annotation_import/conftest.py @@ -13,6 +13,7 @@ from typing import Tuple, Type from labelbox.schema.annotation_import import LabelImport, AnnotationImportState from pytest import FixtureRequest + """ The main fixtures of this library are configured_project and configured_project_by_global_key. Both fixtures generate data rows with a parametrize media type. They create the amount of data rows equal to the DATA_ROW_COUNT variable below. The data rows are generated with a factory fixture that returns a function that allows you to pass a global key. The ontologies are generated normalized and based on the MediaType given (i.e. only features supported by MediaType are created). This ontology is later used to obtain the correct annotations with the prediction_id_mapping and corresponding inferences. Each data row will have all possible annotations attached supported for the MediaType. """ @@ -740,12 +741,16 @@ def _create_project( @pytest.fixture -def configured_project(client: Client, rand_gen, data_row_json_by_media_type, - request: FixtureRequest, - normalized_ontology_by_media_type, - export_v2_test_helpers, - llm_prompt_response_creation_dataset_with_data_row, - teardown_helpers): +def configured_project( + client: Client, + rand_gen, + data_row_json_by_media_type, + request: FixtureRequest, + normalized_ontology_by_media_type, + export_v2_test_helpers, + llm_prompt_response_creation_dataset_with_data_row, + teardown_helpers, +): """Configure project for test. Request.param will contain the media type if not present will use Image MediaType. The project will have 10 data rows.""" media_type = getattr(request, "param", MediaType.Image) @@ -789,11 +794,15 @@ def configured_project(client: Client, rand_gen, data_row_json_by_media_type, @pytest.fixture() -def configured_project_by_global_key(client: Client, rand_gen, - data_row_json_by_media_type, - request: FixtureRequest, - normalized_ontology_by_media_type, - export_v2_test_helpers, teardown_helpers): +def configured_project_by_global_key( + client: Client, + rand_gen, + data_row_json_by_media_type, + request: FixtureRequest, + normalized_ontology_by_media_type, + export_v2_test_helpers, + teardown_helpers, +): """Does the same thing as configured project but with global keys focus.""" media_type = getattr(request, "param", MediaType.Image) @@ -836,9 +845,14 @@ def configured_project_by_global_key(client: Client, rand_gen, @pytest.fixture(scope="module") -def module_project(client: Client, rand_gen, data_row_json_by_media_type, - request: FixtureRequest, normalized_ontology_by_media_type, - module_teardown_helpers): +def module_project( + client: Client, + rand_gen, + data_row_json_by_media_type, + request: FixtureRequest, + normalized_ontology_by_media_type, + module_teardown_helpers, +): """Generates a image project that scopes to the test module(file). Used to reduce api calls.""" media_type = getattr(request, "param", MediaType.Image) @@ -872,7 +886,8 @@ def module_project(client: Client, rand_gen, data_row_json_by_media_type, yield project module_teardown_helpers.teardown_project_labels_ontology_feature_schemas( - project) + project + ) if dataset: dataset.delete() diff --git a/libs/labelbox/tests/data/annotation_import/test_model_run.py b/libs/labelbox/tests/data/annotation_import/test_model_run.py index 54eb94d27..1174115c5 100644 --- a/libs/labelbox/tests/data/annotation_import/test_model_run.py +++ b/libs/labelbox/tests/data/annotation_import/test_model_run.py @@ -18,8 +18,9 @@ def current_model(client, configured_project_with_label, rand_gen): model.delete() -def test_model_run(client, configured_project_with_label, current_model, - data_row, rand_gen): +def test_model_run( + client, configured_project_with_label, current_model, data_row, rand_gen +): _, _, _, label = configured_project_with_label label_id = label.uid model = current_model diff --git a/libs/labelbox/tests/data/export/conftest.py b/libs/labelbox/tests/data/export/conftest.py index e527e7e94..0a62f39c8 100644 --- a/libs/labelbox/tests/data/export/conftest.py +++ b/libs/labelbox/tests/data/export/conftest.py @@ -2,7 +2,6 @@ import time import pytest from labelbox.schema.queue_mode import QueueMode -from labelbox.schema.media_type import MediaType from labelbox.schema.labeling_frontend import LabelingFrontend from labelbox.schema.annotation_import import LabelImport, AnnotationImportState @@ -241,8 +240,9 @@ def polygon_inference(prediction_id_mapping): @pytest.fixture -def configured_project_with_ontology(client, initial_dataset, ontology, - rand_gen, image_url, teardown_helpers): +def configured_project_with_ontology( + client, initial_dataset, ontology, rand_gen, image_url, teardown_helpers +): dataset = initial_dataset project = client.create_project( name=rand_gen(str), @@ -267,11 +267,14 @@ def configured_project_with_ontology(client, initial_dataset, ontology, @pytest.fixture -def configured_project_without_data_rows(client, ontology, rand_gen, - teardown_helpers): - project = client.create_project(name=rand_gen(str), - description=rand_gen(str), - queue_mode=QueueMode.Batch) +def configured_project_without_data_rows( + client, ontology, rand_gen, teardown_helpers +): + project = client.create_project( + name=rand_gen(str), + description=rand_gen(str), + queue_mode=QueueMode.Batch, + ) editor = list( client.get_labeling_frontends(where=LabelingFrontend.name == "editor") )[0] diff --git a/libs/labelbox/tests/integration/conftest.py b/libs/labelbox/tests/integration/conftest.py index 836b916da..c917a6164 100644 --- a/libs/labelbox/tests/integration/conftest.py +++ b/libs/labelbox/tests/integration/conftest.py @@ -112,11 +112,14 @@ def configured_project( @pytest.fixture -def configured_project_with_complex_ontology(client, initial_dataset, rand_gen, - image_url, teardown_helpers): - project = client.create_project(name=rand_gen(str), - queue_mode=QueueMode.Batch, - media_type=MediaType.Image) +def configured_project_with_complex_ontology( + client, initial_dataset, rand_gen, image_url, teardown_helpers +): + project = client.create_project( + name=rand_gen(str), + queue_mode=QueueMode.Batch, + media_type=MediaType.Image, + ) dataset = initial_dataset data_row = dataset.create_data_row(row_data=image_url) data_row_ids = [data_row.uid] diff --git a/libs/labelbox/tests/integration/test_feature_schema.py b/libs/labelbox/tests/integration/test_feature_schema.py index 61532ed37..46ec8c067 100644 --- a/libs/labelbox/tests/integration/test_feature_schema.py +++ b/libs/labelbox/tests/integration/test_feature_schema.py @@ -59,7 +59,7 @@ def test_throws_an_error_if_feature_schema_to_delete_doesnt_exist(client): def test_updates_a_feature_schema_title(client, feature_schema): - feature_schema_id = feature_schema.normalized['featureSchemaId'] + feature_schema_id = feature_schema.normalized["featureSchemaId"] new_title = "new title" updated_feature_schema = client.update_feature_schema_title( feature_schema_id, new_title @@ -69,9 +69,10 @@ def test_updates_a_feature_schema_title(client, feature_schema): def test_throws_an_error_when_updating_a_feature_schema_with_empty_title( - client, feature_schema): + client, feature_schema +): tool = feature_schema - feature_schema_id = tool.normalized['featureSchemaId'] + feature_schema_id = tool.normalized["featureSchemaId"] with pytest.raises(Exception): client.update_feature_schema_title(feature_schema_id, "") @@ -103,7 +104,7 @@ def test_updates_a_feature_schema(client, feature_schema): def test_does_not_include_used_feature_schema(client, feature_schema): tool = feature_schema - feature_schema_id = tool.normalized['featureSchemaId'] + feature_schema_id = tool.normalized["featureSchemaId"] ontology = client.create_ontology_from_feature_schemas( name="ontology name", feature_schema_ids=[feature_schema_id], diff --git a/libs/labelbox/tests/unit/test_labeling_service_dashboard.py b/libs/labelbox/tests/unit/test_labeling_service_dashboard.py index 8ecdef2f1..061efbadf 100644 --- a/libs/labelbox/tests/unit/test_labeling_service_dashboard.py +++ b/libs/labelbox/tests/unit/test_labeling_service_dashboard.py @@ -5,23 +5,23 @@ def test_no_tasks_remaining_count(): labeling_service_dashboard_data = { - 'id': 'cm0eeo4c301lg07061phfhva0', - 'name': 'TestStatus', - 'boostRequestedAt': '2024-08-28T22:08:07.446Z', - 'boostUpdatedAt': '2024-08-28T22:08:07.446Z', - 'boostRequestedBy': None, - 'boostStatus': 'SET_UP', - 'dataRowsCount': 0, - 'dataRowsDoneCount': 0, - 'dataRowsInReviewCount': 0, - 'dataRowsInReworkCount': 0, - 'tasksTotalCount': 0, - 'tasksCompletedCount': 0, - 'tasksRemainingCount': 0, - 'mediaType': 'image', - 'editorTaskType': None, - 'tags': [], - 'client': MagicMock() + "id": "cm0eeo4c301lg07061phfhva0", + "name": "TestStatus", + "boostRequestedAt": "2024-08-28T22:08:07.446Z", + "boostUpdatedAt": "2024-08-28T22:08:07.446Z", + "boostRequestedBy": None, + "boostStatus": "SET_UP", + "dataRowsCount": 0, + "dataRowsDoneCount": 0, + "dataRowsInReviewCount": 0, + "dataRowsInReworkCount": 0, + "tasksTotalCount": 0, + "tasksCompletedCount": 0, + "tasksRemainingCount": 0, + "mediaType": "image", + "editorTaskType": None, + "tags": [], + "client": MagicMock(), } lsd = LabelingServiceDashboard(**labeling_service_dashboard_data) assert lsd.tasks_remaining_count is None @@ -29,23 +29,23 @@ def test_no_tasks_remaining_count(): def test_tasks_remaining_count_exists(): labeling_service_dashboard_data = { - 'id': 'cm0eeo4c301lg07061phfhva0', - 'name': 'TestStatus', - 'boostRequestedAt': '2024-08-28T22:08:07.446Z', - 'boostUpdatedAt': '2024-08-28T22:08:07.446Z', - 'boostRequestedBy': None, - 'boostStatus': 'SET_UP', - 'dataRowsCount': 0, - 'dataRowsDoneCount': 0, - 'dataRowsInReviewCount': 0, - 'dataRowsInReworkCount': 0, - 'tasksTotalCount': 0, - 'tasksCompletedCount': 0, - 'tasksRemainingCount': 1, - 'mediaType': 'image', - 'editorTaskType': None, - 'tags': [], - 'client': MagicMock() + "id": "cm0eeo4c301lg07061phfhva0", + "name": "TestStatus", + "boostRequestedAt": "2024-08-28T22:08:07.446Z", + "boostUpdatedAt": "2024-08-28T22:08:07.446Z", + "boostRequestedBy": None, + "boostStatus": "SET_UP", + "dataRowsCount": 0, + "dataRowsDoneCount": 0, + "dataRowsInReviewCount": 0, + "dataRowsInReworkCount": 0, + "tasksTotalCount": 0, + "tasksCompletedCount": 0, + "tasksRemainingCount": 1, + "mediaType": "image", + "editorTaskType": None, + "tags": [], + "client": MagicMock(), } lsd = LabelingServiceDashboard(**labeling_service_dashboard_data) assert lsd.tasks_remaining_count == 1 @@ -53,23 +53,23 @@ def test_tasks_remaining_count_exists(): def test_tasks_total_no_tasks_remaining_count(): labeling_service_dashboard_data = { - 'id': 'cm0eeo4c301lg07061phfhva0', - 'name': 'TestStatus', - 'boostRequestedAt': '2024-08-28T22:08:07.446Z', - 'boostUpdatedAt': '2024-08-28T22:08:07.446Z', - 'boostRequestedBy': None, - 'boostStatus': 'SET_UP', - 'dataRowsCount': 0, - 'dataRowsDoneCount': 0, - 'dataRowsInReviewCount': 1, - 'dataRowsInReworkCount': 0, - 'tasksTotalCount': 1, - 'tasksCompletedCount': 0, - 'tasksRemainingCount': 0, - 'mediaType': 'image', - 'editorTaskType': None, - 'tags': [], - 'client': MagicMock() + "id": "cm0eeo4c301lg07061phfhva0", + "name": "TestStatus", + "boostRequestedAt": "2024-08-28T22:08:07.446Z", + "boostUpdatedAt": "2024-08-28T22:08:07.446Z", + "boostRequestedBy": None, + "boostStatus": "SET_UP", + "dataRowsCount": 0, + "dataRowsDoneCount": 0, + "dataRowsInReviewCount": 1, + "dataRowsInReworkCount": 0, + "tasksTotalCount": 1, + "tasksCompletedCount": 0, + "tasksRemainingCount": 0, + "mediaType": "image", + "editorTaskType": None, + "tags": [], + "client": MagicMock(), } lsd = LabelingServiceDashboard(**labeling_service_dashboard_data) assert lsd.tasks_remaining_count == 0