Skip to content

Commit 11490ee

Browse files
author
Val Brodsky
committed
Fix tests
1 parent cfdba83 commit 11490ee

File tree

7 files changed

+80
-55
lines changed

7 files changed

+80
-55
lines changed

labelbox/data/annotation_types/data/generic_data_row_data.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -11,8 +11,8 @@ class GenericDataRowData(BaseData, _NoCoercionMixin):
1111
url: Optional[str] = None
1212
class_name: Literal["GenericDataRowData"] = "GenericDataRowData"
1313

14-
def create_url(self, signer: Callable[[bytes], str]) -> None:
15-
return None
14+
def create_url(self, signer: Callable[[bytes], str]) -> Optional[str]:
15+
return self.url
1616

1717
@pydantic_compat.root_validator(pre=True)
1818
def validate_one_datarow_key_present(cls, data):

labelbox/data/annotation_types/label.py

Lines changed: 1 addition & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -53,15 +53,9 @@ class Label(pydantic_compat.BaseModel):
5353
RelationshipAnnotation]] = []
5454
extra: Dict[str, Any] = {}
5555

56-
@staticmethod
57-
def is_data_type(data: Union[Dict[str, Any], DataType]) -> bool:
58-
if isinstance(data, DataType):
59-
return True
60-
return False
61-
6256
@pydantic_compat.root_validator(pre=True)
6357
def validate_data(cls, label):
64-
if not Label.is_data_type(label.get("data")):
58+
if isinstance(label.get("data"), Dict):
6559
label["data"]["class_name"] = "GenericDataRowData"
6660
else:
6761
warnings.warn(

labelbox/schema/id_type.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
import sys
22

3-
if sys.version_info > (3, 8):
3+
if sys.version_info >= (3, 9):
44
from strenum import StrEnum
55

66
class BaseStrEnum(StrEnum):

tests/conftest.py

Lines changed: 7 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -121,13 +121,13 @@ def rest_url(environ: str) -> str:
121121
return 'http://host.docker.internal:8080/api/v1'
122122

123123

124-
def testing_api_key(environ: str) -> str:
125-
for var in [
126-
"LABELBOX_TEST_API_KEY_PROD", "LABELBOX_TEST_API_KEY_STAGING",
127-
"LABELBOX_TEST_API_KEY_CUSTOM", "LABELBOX_TEST_API_KEY_LOCAL",
128-
"LABELBOX_TEST_API_KEY"
129-
]:
130-
value = os.environ.get(var)
124+
def testing_api_key(environ: Environ) -> str:
125+
keys = [
126+
f"LABELBOX_TEST_API_KEY_{environ.value.upper()}",
127+
"LABELBOX_TEST_API_KEY"
128+
]
129+
for key in keys:
130+
value = os.environ.get(key)
131131
if value is not None:
132132
return value
133133
raise Exception("Cannot find API to use for tests")
@@ -147,7 +147,6 @@ def __init__(self, environ: str) -> None:
147147
api_url = graphql_url(environ)
148148
api_key = testing_api_key(environ)
149149
rest_endpoint = rest_url(environ)
150-
151150
super().__init__(api_key,
152151
api_url,
153152
enable_experimental=True,

tests/data/annotation_import/conftest.py

Lines changed: 0 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -1922,20 +1922,3 @@ def rename_cuid_key_recursive(d):
19221922
@pytest.fixture
19231923
def helpers():
19241924
return Helpers
1925-
1926-
1927-
@pytest.fixture
1928-
def create_data_row_for_project(project, dataset, data_row_ndjson, batch_name):
1929-
data_row = dataset.create_data_row(data_row_ndjson)
1930-
1931-
project.create_batch(
1932-
batch_name,
1933-
[data_row.uid], # sample of data row objects
1934-
5, # priority between 1(Highest) - 5(lowest)
1935-
)
1936-
project.data_row_ids.append(data_row.uid)
1937-
1938-
yield data_row
1939-
1940-
data_row.delete()
1941-
project.delete()

tests/data/annotation_import/test_data_types.py

Lines changed: 22 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -131,6 +131,19 @@ def get_annotation_comparison_dicts_from_export(export_result, data_row_id,
131131
return converted_annotations
132132

133133

134+
def create_data_row_for_project(project, dataset, data_row_ndjson, batch_name):
135+
data_row = dataset.create_data_row(data_row_ndjson)
136+
137+
project.create_batch(
138+
batch_name,
139+
[data_row.uid], # sample of data row objects
140+
5, # priority between 1(Highest) - 5(lowest)
141+
)
142+
project.data_row_ids.append(data_row.uid)
143+
144+
return data_row
145+
146+
134147
# TODO: Add VideoData. Currently label import job finishes without errors but project.export_labels() returns empty list.
135148
@pytest.mark.parametrize(
136149
"data_type_class",
@@ -147,10 +160,15 @@ def get_annotation_comparison_dicts_from_export(export_result, data_row_id,
147160
LlmResponseCreationData,
148161
],
149162
)
150-
def test_import_data_types(client, configured_project, initial_dataset,
151-
rand_gen, data_row_json_by_data_type,
152-
annotations_by_data_type, data_type_class,
153-
create_data_row_for_project):
163+
def test_import_data_types(
164+
client,
165+
configured_project,
166+
initial_dataset,
167+
rand_gen,
168+
data_row_json_by_data_type,
169+
annotations_by_data_type,
170+
data_type_class,
171+
):
154172
project = configured_project
155173
project_id = project.uid
156174
dataset = initial_dataset
@@ -193,7 +211,6 @@ def test_import_data_types_by_global_key(
193211
rand_gen,
194212
data_row_json_by_data_type,
195213
annotations_by_data_type,
196-
create_data_row_for_project,
197214
):
198215
project = configured_project
199216
project_id = project.uid
@@ -285,7 +302,6 @@ def test_import_data_types_v2(
285302
export_v2_test_helpers,
286303
rand_gen,
287304
helpers,
288-
create_data_row_for_project,
289305
):
290306
project = configured_project
291307
dataset = initial_dataset
@@ -355,7 +371,6 @@ def test_import_label_annotations(
355371
data_class,
356372
annotations,
357373
rand_gen,
358-
create_data_row_for_project,
359374
):
360375
project = configured_project_with_one_data_row
361376
dataset = initial_dataset

tests/data/annotation_import/test_generic_data_types.py

Lines changed: 47 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -65,11 +65,28 @@
6565
]
6666

6767

68-
def test_import_data_types_by_global_key(client, configured_project,
69-
initial_dataset, rand_gen,
70-
data_row_json_by_data_type,
71-
annotations_by_data_type,
72-
create_data_row_for_project):
68+
def create_data_row_for_project(project, dataset, data_row_ndjson, batch_name):
69+
data_row = dataset.create_data_row(data_row_ndjson)
70+
71+
project.create_batch(
72+
batch_name,
73+
[data_row.uid], # sample of data row objects
74+
5, # priority between 1(Highest) - 5(lowest)
75+
)
76+
project.data_row_ids.append(data_row.uid)
77+
78+
return data_row
79+
80+
81+
def test_import_data_types_by_global_key(
82+
client,
83+
configured_project,
84+
initial_dataset,
85+
rand_gen,
86+
data_row_json_by_data_type,
87+
annotations_by_data_type,
88+
export_v2_test_helpers,
89+
):
7390
project = configured_project
7491
project_id = project.uid
7592
dataset = initial_dataset
@@ -93,17 +110,27 @@ def test_import_data_types_by_global_key(client, configured_project,
93110
) for annotations in annotations_list
94111
]
95112

113+
def find_data_row(dr):
114+
return dr['data_row']['id'] == data_row.uid
115+
96116
label_import = lb.LabelImport.create_from_objects(client, project_id,
97117
f"test-import-image",
98118
labels)
99119
label_import.wait_until_done()
100120

101121
assert label_import.state == AnnotationImportState.FINISHED
102122
assert len(label_import.errors) == 0
103-
exported_labels = project.export_labels(download=True)
104-
objects = exported_labels[0]["Label"]["objects"]
105-
classifications = exported_labels[0]["Label"]["classifications"]
123+
124+
result = export_v2_test_helpers.run_project_export_v2_task(project)
125+
exported_data = list(filter(find_data_row, result))[0]
126+
assert exported_data
127+
128+
label = exported_data['projects'][project.uid]['labels'][0]
129+
annotations = label['annotations']
130+
objects = annotations['objects']
131+
classifications = annotations['classifications']
106132
assert len(objects) + len(classifications) == len(labels)
133+
107134
data_row.delete()
108135

109136

@@ -151,11 +178,18 @@ def set_project_media_type_from_data_type(project, data_type_class):
151178
LlmResponseCreationData,
152179
],
153180
)
154-
def test_import_data_types_v2(client, configured_project, initial_dataset,
155-
data_row_json_by_data_type,
156-
annotations_by_data_type_v2, data_type_class,
157-
exports_v2_by_data_type, export_v2_test_helpers,
158-
rand_gen, helpers, create_data_row_for_project):
181+
def test_import_data_types_v2(
182+
client,
183+
configured_project,
184+
initial_dataset,
185+
data_row_json_by_data_type,
186+
annotations_by_data_type_v2,
187+
data_type_class,
188+
exports_v2_by_data_type,
189+
export_v2_test_helpers,
190+
rand_gen,
191+
helpers,
192+
):
159193
project = configured_project
160194
dataset = initial_dataset
161195
project_id = project.uid

0 commit comments

Comments
 (0)