7
7
import re
8
8
import uuid
9
9
import time
10
+ from labelbox .schema .project import Project
10
11
import requests
12
+ from labelbox .schema .ontology import Ontology
11
13
import pytest
12
14
from types import SimpleNamespace
13
15
from typing import Type
23
25
from labelbox .schema .queue_mode import QueueMode
24
26
from labelbox import Client
25
27
26
- from labelbox import Dataset , DataRow
27
28
from labelbox import LabelingFrontend
28
- from labelbox import OntologyBuilder , Tool , Option , Classification , MediaType
29
- from labelbox .orm import query
30
- from labelbox .pagination import PaginatedCollection
29
+ from labelbox import OntologyBuilder , Tool , Option , Classification
31
30
from labelbox .schema .annotation_import import LabelImport
32
- from labelbox .schema .catalog import Catalog
33
31
from labelbox .schema .enums import AnnotationImportState
34
- from labelbox .schema .invite import Invite
35
- from labelbox .schema .quality_mode import QualityMode
36
- from labelbox .schema .queue_mode import QueueMode
37
- from labelbox .schema .user import User
38
32
from labelbox .exceptions import LabelboxError
39
- from contextlib import suppress
40
- from labelbox import Client
41
33
42
34
IMG_URL = "https://picsum.photos/200/300.jpg"
43
35
MASKABLE_IMG_URL = "https://storage.googleapis.com/labelbox-datasets/image_sample_data/2560px-Kitano_Street_Kobe01s5s4110.jpeg"
@@ -635,20 +627,17 @@ def organization(client):
635
627
636
628
637
629
@pytest .fixture
638
- def configured_project_with_label (
639
- client ,
640
- rand_gen ,
641
- image_url ,
642
- project ,
643
- dataset ,
644
- data_row ,
645
- wait_for_label_processing ,
646
- ):
630
+ def configured_project_with_label (client , rand_gen , dataset , data_row ,
631
+ wait_for_label_processing , teardown_helpers ):
647
632
"""Project with a connected dataset, having one datarow
633
+
648
634
Project contains an ontology with 1 bbox tool
649
635
Additionally includes a create_label method for any needed extra labels
650
636
One label is already created and yielded when using fixture
651
637
"""
638
+ project = client .create_project (name = rand_gen (str ),
639
+ queue_mode = QueueMode .Batch ,
640
+ media_type = MediaType .Image )
652
641
project ._wait_until_data_rows_are_processed (
653
642
data_row_ids = [data_row .uid ],
654
643
wait_processing_max_seconds = DATA_ROW_PROCESSING_WAIT_TIMEOUT_SECONDS ,
@@ -666,8 +655,7 @@ def configured_project_with_label(
666
655
)
667
656
yield [project , dataset , data_row , label ]
668
657
669
- for label in project .labels ():
670
- label .delete ()
658
+ teardown_helpers .teardown_project_labels_ontology_feature_schemas (project )
671
659
672
660
673
661
def _create_label (project , data_row , ontology , wait_for_label_processing ):
@@ -735,14 +723,17 @@ def big_dataset(dataset: Dataset):
735
723
736
724
737
725
@pytest .fixture
738
- def configured_batch_project_with_label (
739
- project , dataset , data_row , wait_for_label_processing
740
- ):
726
+ def configured_batch_project_with_label (client , dataset , data_row ,
727
+ wait_for_label_processing , rand_gen ,
728
+ teardown_helpers ):
741
729
"""Project with a batch having one datarow
742
730
Project contains an ontology with 1 bbox tool
743
731
Additionally includes a create_label method for any needed extra labels
744
732
One label is already created and yielded when using fixture
745
733
"""
734
+ project = client .create_project (name = rand_gen (str ),
735
+ queue_mode = QueueMode .Batch ,
736
+ media_type = MediaType .Image )
746
737
data_rows = [dr .uid for dr in list (dataset .data_rows ())]
747
738
project ._wait_until_data_rows_are_processed (
748
739
data_row_ids = data_rows , sleep_interval = 3
@@ -757,18 +748,20 @@ def configured_batch_project_with_label(
757
748
758
749
yield [project , dataset , data_row , label ]
759
750
760
- for label in project .labels ():
761
- label .delete ()
751
+ teardown_helpers .teardown_project_labels_ontology_feature_schemas (project )
762
752
763
753
764
754
@pytest .fixture
765
- def configured_batch_project_with_multiple_datarows (
766
- project , dataset , data_rows , wait_for_label_processing
767
- ):
755
+ def configured_batch_project_with_multiple_datarows (client , dataset , data_rows ,
756
+ wait_for_label_processing ,
757
+ rand_gen , teardown_helpers ):
768
758
"""Project with a batch having multiple datarows
769
759
Project contains an ontology with 1 bbox tool
770
760
Additionally includes a create_label method for any needed extra labels
771
761
"""
762
+ project = client .create_project (name = rand_gen (str ),
763
+ queue_mode = QueueMode .Batch ,
764
+ media_type = MediaType .Image )
772
765
global_keys = [dr .global_key for dr in data_rows ]
773
766
774
767
batch_name = f"batch { uuid .uuid4 ()} "
@@ -780,26 +773,7 @@ def configured_batch_project_with_multiple_datarows(
780
773
781
774
yield [project , dataset , data_rows ]
782
775
783
- for label in project .labels ():
784
- label .delete ()
785
-
786
-
787
- @pytest .fixture
788
- def configured_batch_project_for_labeling_service (
789
- project , data_row_and_global_key
790
- ):
791
- """Project with a batch having multiple datarows
792
- Project contains an ontology with 1 bbox tool
793
- Additionally includes a create_label method for any needed extra labels
794
- """
795
- global_keys = [data_row_and_global_key [1 ]]
796
-
797
- batch_name = f"batch { uuid .uuid4 ()} "
798
- project .create_batch (batch_name , global_keys = global_keys )
799
-
800
- _setup_ontology (project )
801
-
802
- yield project
776
+ teardown_helpers .teardown_project_labels_ontology_feature_schemas (project )
803
777
804
778
805
779
# NOTE this is nice heuristics, also there is this logic _wait_until_data_rows_are_processed in Project
@@ -1061,14 +1035,11 @@ def project_with_empty_ontology(project):
1061
1035
1062
1036
1063
1037
@pytest .fixture
1064
- def configured_project_with_complex_ontology (
1065
- client , initial_dataset , rand_gen , image_url
1066
- ):
1067
- project = client .create_project (
1068
- name = rand_gen (str ),
1069
- queue_mode = QueueMode .Batch ,
1070
- media_type = MediaType .Image ,
1071
- )
1038
+ def configured_project_with_complex_ontology (client , initial_dataset , rand_gen ,
1039
+ image_url , teardown_helpers ):
1040
+ project = client .create_project (name = rand_gen (str ),
1041
+ queue_mode = QueueMode .Batch ,
1042
+ media_type = MediaType .Image )
1072
1043
dataset = initial_dataset
1073
1044
data_row = dataset .create_data_row (row_data = image_url )
1074
1045
data_row_ids = [data_row .uid ]
@@ -1127,7 +1098,7 @@ def configured_project_with_complex_ontology(
1127
1098
project .setup (editor , ontology .asdict ())
1128
1099
1129
1100
yield [project , data_row ]
1130
- project . delete ( )
1101
+ teardown_helpers . teardown_project_labels_ontology_feature_schemas ( project )
1131
1102
1132
1103
1133
1104
@pytest .fixture
@@ -1146,13 +1117,13 @@ def valid_model_id():
1146
1117
1147
1118
1148
1119
@pytest .fixture
1149
- def requested_labeling_service (
1150
- rand_gen ,
1151
- live_chat_evaluation_project_with_new_dataset ,
1152
- chat_evaluation_ontology ,
1153
- model_config ,
1154
- ):
1155
- project = live_chat_evaluation_project_with_new_dataset
1120
+ def requested_labeling_service (rand_gen , client , chat_evaluation_ontology ,
1121
+ model_config , teardown_helpers ):
1122
+ project_name = f"test-model-evaluation-project- { rand_gen ( str ) } "
1123
+ dataset_name = f"test-model-evaluation-dataset- { rand_gen ( str ) } "
1124
+ project = client . create_model_evaluation_project ( name = project_name ,
1125
+ dataset_name = dataset_name ,
1126
+ data_row_count = 1 )
1156
1127
project .connect_ontology (chat_evaluation_ontology )
1157
1128
1158
1129
project .upsert_instructions ("tests/integration/media/sample_pdf.pdf" )
@@ -1164,3 +1135,95 @@ def requested_labeling_service(
1164
1135
labeling_service .request ()
1165
1136
1166
1137
yield project , project .get_labeling_service ()
1138
+
1139
+ teardown_helpers .teardown_project_labels_ontology_feature_schemas (project )
1140
+
1141
+
1142
+ class TearDownHelpers :
1143
+
1144
+ @staticmethod
1145
+ def teardown_project_labels_ontology_feature_schemas (project : Project ):
1146
+ """
1147
+ Call this function to release project, labels, ontology and feature schemas in fixture teardown
1148
+
1149
+ NOTE: exception handling is not required as this is a fixture teardown
1150
+ """
1151
+ ontology = project .ontology ()
1152
+ ontology_id = ontology .uid
1153
+ client = project .client
1154
+ classification_feature_schema_ids = [
1155
+ feature ["featureSchemaId" ]
1156
+ for feature in ontology .normalized ["classifications" ]
1157
+ ]
1158
+ tool_feature_schema_ids = [
1159
+ feature ["featureSchemaId" ] for feature in ontology .normalized ["tools" ]
1160
+ ]
1161
+
1162
+ feature_schema_ids = classification_feature_schema_ids + tool_feature_schema_ids
1163
+ labels = list (project .labels ())
1164
+ for label in labels :
1165
+ label .delete ()
1166
+
1167
+ project .delete ()
1168
+ client .delete_unused_ontology (ontology_id )
1169
+ for feature_schema_id in feature_schema_ids :
1170
+ try :
1171
+ project .client .delete_unused_feature_schema (feature_schema_id )
1172
+ except LabelboxError as e :
1173
+ print (
1174
+ f"Failed to delete feature schema { feature_schema_id } : { e } " )
1175
+
1176
+ @staticmethod
1177
+ def teardown_ontology_feature_schemas (ontology : Ontology ):
1178
+ """
1179
+ Call this function to release project, labels, ontology and feature schemas in fixture teardown
1180
+
1181
+ NOTE: exception handling is not required as this is a fixture teardown
1182
+ """
1183
+ ontology_id = ontology .uid
1184
+ client = ontology .client
1185
+ classification_feature_schema_ids = [
1186
+ feature ["featureSchemaId" ]
1187
+ for feature in ontology .normalized ["classifications" ]
1188
+ ] + [
1189
+ option ["featureSchemaId" ]
1190
+ for feature in ontology .normalized ["classifications" ]
1191
+ for option in feature .get ("options" , [])
1192
+ ]
1193
+
1194
+ tool_feature_schema_ids = [
1195
+ feature ["featureSchemaId" ] for feature in ontology .normalized ["tools" ]
1196
+ ] + [
1197
+ classification ["featureSchemaId" ]
1198
+ for tool in ontology .normalized ["tools" ]
1199
+ for classification in tool .get ("classifications" , [])
1200
+ ] + [
1201
+ option ["featureSchemaId" ]
1202
+ for tool in ontology .normalized ["tools" ]
1203
+ for classification in tool .get ("classifications" , [])
1204
+ for option in classification .get ("options" , [])
1205
+ ]
1206
+
1207
+ feature_schema_ids = classification_feature_schema_ids + tool_feature_schema_ids
1208
+
1209
+ client .delete_unused_ontology (ontology_id )
1210
+ for feature_schema_id in feature_schema_ids :
1211
+ try :
1212
+ project .client .delete_unused_feature_schema (feature_schema_id )
1213
+ except LabelboxError as e :
1214
+ print (
1215
+ f"Failed to delete feature schema { feature_schema_id } : { e } " )
1216
+
1217
+
1218
+ class ModuleTearDownHelpers (TearDownHelpers ):
1219
+ ...
1220
+
1221
+
1222
+ @pytest .fixture
1223
+ def teardown_helpers ():
1224
+ return TearDownHelpers ()
1225
+
1226
+
1227
+ @pytest .fixture (scope = 'module' )
1228
+ def module_teardown_helpers ():
1229
+ return TearDownHelpers ()
0 commit comments