7
7
import re
8
8
import uuid
9
9
import time
10
+ from labelbox .schema .project import Project
10
11
import requests
12
+ from labelbox .schema .ontology import Ontology
11
13
import pytest
12
14
from types import SimpleNamespace
13
15
from typing import Type
23
25
from labelbox .schema .queue_mode import QueueMode
24
26
from labelbox import Client
25
27
26
- from labelbox import Dataset , DataRow
27
28
from labelbox import LabelingFrontend
28
- from labelbox import OntologyBuilder , Tool , Option , Classification , MediaType
29
- from labelbox .orm import query
30
- from labelbox .pagination import PaginatedCollection
29
+ from labelbox import OntologyBuilder , Tool , Option , Classification
31
30
from labelbox .schema .annotation_import import LabelImport
32
- from labelbox .schema .catalog import Catalog
33
31
from labelbox .schema .enums import AnnotationImportState
34
- from labelbox .schema .invite import Invite
35
- from labelbox .schema .quality_mode import QualityMode
36
- from labelbox .schema .queue_mode import QueueMode
37
- from labelbox .schema .user import User
38
32
from labelbox .exceptions import LabelboxError
39
- from contextlib import suppress
40
- from labelbox import Client
41
33
42
34
IMG_URL = "https://picsum.photos/200/300.jpg"
43
35
MASKABLE_IMG_URL = "https://storage.googleapis.com/labelbox-datasets/image_sample_data/2560px-Kitano_Street_Kobe01s5s4110.jpeg"
@@ -638,17 +630,22 @@ def organization(client):
638
630
def configured_project_with_label (
639
631
client ,
640
632
rand_gen ,
641
- image_url ,
642
- project ,
643
633
dataset ,
644
634
data_row ,
645
635
wait_for_label_processing ,
636
+ teardown_helpers ,
646
637
):
647
638
"""Project with a connected dataset, having one datarow
639
+
648
640
Project contains an ontology with 1 bbox tool
649
641
Additionally includes a create_label method for any needed extra labels
650
642
One label is already created and yielded when using fixture
651
643
"""
644
+ project = client .create_project (
645
+ name = rand_gen (str ),
646
+ queue_mode = QueueMode .Batch ,
647
+ media_type = MediaType .Image ,
648
+ )
652
649
project ._wait_until_data_rows_are_processed (
653
650
data_row_ids = [data_row .uid ],
654
651
wait_processing_max_seconds = DATA_ROW_PROCESSING_WAIT_TIMEOUT_SECONDS ,
@@ -666,8 +663,7 @@ def configured_project_with_label(
666
663
)
667
664
yield [project , dataset , data_row , label ]
668
665
669
- for label in project .labels ():
670
- label .delete ()
666
+ teardown_helpers .teardown_project_labels_ontology_feature_schemas (project )
671
667
672
668
673
669
def _create_label (project , data_row , ontology , wait_for_label_processing ):
@@ -736,13 +732,23 @@ def big_dataset(dataset: Dataset):
736
732
737
733
@pytest .fixture
738
734
def configured_batch_project_with_label (
739
- project , dataset , data_row , wait_for_label_processing
735
+ client ,
736
+ dataset ,
737
+ data_row ,
738
+ wait_for_label_processing ,
739
+ rand_gen ,
740
+ teardown_helpers ,
740
741
):
741
742
"""Project with a batch having one datarow
742
743
Project contains an ontology with 1 bbox tool
743
744
Additionally includes a create_label method for any needed extra labels
744
745
One label is already created and yielded when using fixture
745
746
"""
747
+ project = client .create_project (
748
+ name = rand_gen (str ),
749
+ queue_mode = QueueMode .Batch ,
750
+ media_type = MediaType .Image ,
751
+ )
746
752
data_rows = [dr .uid for dr in list (dataset .data_rows ())]
747
753
project ._wait_until_data_rows_are_processed (
748
754
data_row_ids = data_rows , sleep_interval = 3
@@ -757,18 +763,27 @@ def configured_batch_project_with_label(
757
763
758
764
yield [project , dataset , data_row , label ]
759
765
760
- for label in project .labels ():
761
- label .delete ()
766
+ teardown_helpers .teardown_project_labels_ontology_feature_schemas (project )
762
767
763
768
764
769
@pytest .fixture
765
770
def configured_batch_project_with_multiple_datarows (
766
- project , dataset , data_rows , wait_for_label_processing
771
+ client ,
772
+ dataset ,
773
+ data_rows ,
774
+ wait_for_label_processing ,
775
+ rand_gen ,
776
+ teardown_helpers ,
767
777
):
768
778
"""Project with a batch having multiple datarows
769
779
Project contains an ontology with 1 bbox tool
770
780
Additionally includes a create_label method for any needed extra labels
771
781
"""
782
+ project = client .create_project (
783
+ name = rand_gen (str ),
784
+ queue_mode = QueueMode .Batch ,
785
+ media_type = MediaType .Image ,
786
+ )
772
787
global_keys = [dr .global_key for dr in data_rows ]
773
788
774
789
batch_name = f"batch { uuid .uuid4 ()} "
@@ -780,26 +795,7 @@ def configured_batch_project_with_multiple_datarows(
780
795
781
796
yield [project , dataset , data_rows ]
782
797
783
- for label in project .labels ():
784
- label .delete ()
785
-
786
-
787
- @pytest .fixture
788
- def configured_batch_project_for_labeling_service (
789
- project , data_row_and_global_key
790
- ):
791
- """Project with a batch having multiple datarows
792
- Project contains an ontology with 1 bbox tool
793
- Additionally includes a create_label method for any needed extra labels
794
- """
795
- global_keys = [data_row_and_global_key [1 ]]
796
-
797
- batch_name = f"batch { uuid .uuid4 ()} "
798
- project .create_batch (batch_name , global_keys = global_keys )
799
-
800
- _setup_ontology (project )
801
-
802
- yield project
798
+ teardown_helpers .teardown_project_labels_ontology_feature_schemas (project )
803
799
804
800
805
801
# NOTE this is nice heuristics, also there is this logic _wait_until_data_rows_are_processed in Project
@@ -1062,7 +1058,7 @@ def project_with_empty_ontology(project):
1062
1058
1063
1059
@pytest .fixture
1064
1060
def configured_project_with_complex_ontology (
1065
- client , initial_dataset , rand_gen , image_url
1061
+ client , initial_dataset , rand_gen , image_url , teardown_helpers
1066
1062
):
1067
1063
project = client .create_project (
1068
1064
name = rand_gen (str ),
@@ -1127,7 +1123,7 @@ def configured_project_with_complex_ontology(
1127
1123
project .setup (editor , ontology .asdict ())
1128
1124
1129
1125
yield [project , data_row ]
1130
- project . delete ( )
1126
+ teardown_helpers . teardown_project_labels_ontology_feature_schemas ( project )
1131
1127
1132
1128
1133
1129
@pytest .fixture
@@ -1147,12 +1143,13 @@ def valid_model_id():
1147
1143
1148
1144
@pytest .fixture
1149
1145
def requested_labeling_service (
1150
- rand_gen ,
1151
- live_chat_evaluation_project_with_new_dataset ,
1152
- chat_evaluation_ontology ,
1153
- model_config ,
1146
+ rand_gen , client , chat_evaluation_ontology , model_config , teardown_helpers
1154
1147
):
1155
- project = live_chat_evaluation_project_with_new_dataset
1148
+ project_name = f"test-model-evaluation-project-{ rand_gen (str )} "
1149
+ dataset_name = f"test-model-evaluation-dataset-{ rand_gen (str )} "
1150
+ project = client .create_model_evaluation_project (
1151
+ name = project_name , dataset_name = dataset_name , data_row_count = 1
1152
+ )
1156
1153
project .connect_ontology (chat_evaluation_ontology )
1157
1154
1158
1155
project .upsert_instructions ("tests/integration/media/sample_pdf.pdf" )
@@ -1164,3 +1161,105 @@ def requested_labeling_service(
1164
1161
labeling_service .request ()
1165
1162
1166
1163
yield project , project .get_labeling_service ()
1164
+
1165
+ teardown_helpers .teardown_project_labels_ontology_feature_schemas (project )
1166
+
1167
+
1168
+ class TearDownHelpers :
1169
+ @staticmethod
1170
+ def teardown_project_labels_ontology_feature_schemas (project : Project ):
1171
+ """
1172
+ Call this function to release project, labels, ontology and feature schemas in fixture teardown
1173
+
1174
+ NOTE: exception handling is not required as this is a fixture teardown
1175
+ """
1176
+ ontology = project .ontology ()
1177
+ ontology_id = ontology .uid
1178
+ client = project .client
1179
+ classification_feature_schema_ids = [
1180
+ feature ["featureSchemaId" ]
1181
+ for feature in ontology .normalized ["classifications" ]
1182
+ ]
1183
+ tool_feature_schema_ids = [
1184
+ feature ["featureSchemaId" ]
1185
+ for feature in ontology .normalized ["tools" ]
1186
+ ]
1187
+
1188
+ feature_schema_ids = (
1189
+ classification_feature_schema_ids + tool_feature_schema_ids
1190
+ )
1191
+ labels = list (project .labels ())
1192
+ for label in labels :
1193
+ label .delete ()
1194
+
1195
+ project .delete ()
1196
+ client .delete_unused_ontology (ontology_id )
1197
+ for feature_schema_id in feature_schema_ids :
1198
+ try :
1199
+ project .client .delete_unused_feature_schema (feature_schema_id )
1200
+ except LabelboxError as e :
1201
+ print (
1202
+ f"Failed to delete feature schema { feature_schema_id } : { e } "
1203
+ )
1204
+
1205
+ @staticmethod
1206
+ def teardown_ontology_feature_schemas (ontology : Ontology ):
1207
+ """
1208
+ Call this function to release project, labels, ontology and feature schemas in fixture teardown
1209
+
1210
+ NOTE: exception handling is not required as this is a fixture teardown
1211
+ """
1212
+ ontology_id = ontology .uid
1213
+ client = ontology .client
1214
+ classification_feature_schema_ids = [
1215
+ feature ["featureSchemaId" ]
1216
+ for feature in ontology .normalized ["classifications" ]
1217
+ ] + [
1218
+ option ["featureSchemaId" ]
1219
+ for feature in ontology .normalized ["classifications" ]
1220
+ for option in feature .get ("options" , [])
1221
+ ]
1222
+
1223
+ tool_feature_schema_ids = (
1224
+ [
1225
+ feature ["featureSchemaId" ]
1226
+ for feature in ontology .normalized ["tools" ]
1227
+ ]
1228
+ + [
1229
+ classification ["featureSchemaId" ]
1230
+ for tool in ontology .normalized ["tools" ]
1231
+ for classification in tool .get ("classifications" , [])
1232
+ ]
1233
+ + [
1234
+ option ["featureSchemaId" ]
1235
+ for tool in ontology .normalized ["tools" ]
1236
+ for classification in tool .get ("classifications" , [])
1237
+ for option in classification .get ("options" , [])
1238
+ ]
1239
+ )
1240
+
1241
+ feature_schema_ids = (
1242
+ classification_feature_schema_ids + tool_feature_schema_ids
1243
+ )
1244
+
1245
+ client .delete_unused_ontology (ontology_id )
1246
+ for feature_schema_id in feature_schema_ids :
1247
+ try :
1248
+ project .client .delete_unused_feature_schema (feature_schema_id )
1249
+ except LabelboxError as e :
1250
+ print (
1251
+ f"Failed to delete feature schema { feature_schema_id } : { e } "
1252
+ )
1253
+
1254
+
1255
+ class ModuleTearDownHelpers (TearDownHelpers ): ...
1256
+
1257
+
1258
+ @pytest .fixture
1259
+ def teardown_helpers ():
1260
+ return TearDownHelpers ()
1261
+
1262
+
1263
+ @pytest .fixture (scope = "module" )
1264
+ def module_teardown_helpers ():
1265
+ return TearDownHelpers ()
0 commit comments