Skip to content

Commit d088d63

Browse files
committed
Fixed bad tests
1 parent 9dd54c7 commit d088d63

File tree

3 files changed

+12
-44
lines changed

3 files changed

+12
-44
lines changed

libs/labelbox/src/labelbox/schema/data_row_metadata.py

Lines changed: 6 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -14,6 +14,7 @@
1414
Type,
1515
Union,
1616
overload,
17+
get_args,
1718
)
1819

1920
from pydantic import (
@@ -27,7 +28,7 @@
2728
)
2829
from typing_extensions import Annotated
2930

30-
from labelbox.schema.identifiable import GlobalKey, UniqueId
31+
from labelbox.schema.identifiable import GlobalKey, UniqueId, DataRowIdentifier
3132
from labelbox.schema.identifiables import DataRowIdentifiers, UniqueIds
3233
from labelbox.schema.ontology import SchemaId
3334
from labelbox.utils import (
@@ -87,7 +88,7 @@ class DataRowMetadata(_CamelCaseMixin):
8788

8889

8990
class DeleteDataRowMetadata(_CamelCaseMixin):
90-
data_row_id: Union[str, UniqueId, GlobalKey] = None
91+
data_row_id: Union[UniqueId, GlobalKey] = None
9192
fields: List[SchemaId]
9293

9394

@@ -646,21 +647,10 @@ def bulk_delete(
646647
>>> )
647648
>>> mdo.batch_delete([metadata])
648649
649-
>>> delete = DeleteDataRowMetadata(
650-
>>> data_row_id="global-key",
651-
>>> fields=[
652-
>>> "schema-id-1",
653-
>>> "schema-id-2"
654-
>>> ...
655-
>>> ]
656-
>>> )
657-
>>> mdo.batch_delete([metadata])
658-
659650
660651
Args:
661652
deletes: Data row and schema ids to delete
662-
For data row, we support UniqueId, str, and GlobalKey.
663-
If you pass a str, we will assume it is a UniqueId
653+
For data row, we support UniqueId and GlobalKey.
664654
Do not pass a mix of data row ids and global keys in the same list
665655
666656
Returns:
@@ -672,10 +662,8 @@ def bulk_delete(
672662
if not len(deletes):
673663
raise ValueError("The 'deletes' list cannot be empty.")
674664

675-
for delete in enumerate(deletes):
676-
if not isinstance(delete.data_row_id, UniqueId) or not isinstance(
677-
delete.data_row_id, GlobalKey
678-
):
665+
for delete in deletes:
666+
if not isinstance(delete.data_row_id, get_args(DataRowIdentifier)):
679667
raise ValueError(
680668
f"Invalid data row identifier type '{type(delete.data_row_id)}' for '{delete.data_row_id}'"
681669
)

libs/labelbox/tests/integration/test_data_row_delete_metadata.py

Lines changed: 5 additions & 25 deletions
Original file line numberDiff line numberDiff line change
@@ -121,14 +121,9 @@ def data_row_global_key(data_row):
121121
return GlobalKey(data_row.global_key)
122122

123123

124-
@pytest.fixture
125-
def data_row_id_as_str(data_row):
126-
return data_row.uid
127-
128-
129124
@pytest.mark.parametrize(
130125
"data_row_for_delete",
131-
["data_row_id_as_str", "data_row_unique_id", "data_row_global_key"],
126+
["data_row_unique_id", "data_row_global_key"],
132127
)
133128
def test_bulk_delete_datarow_metadata(
134129
data_row_for_delete, data_row, mdo, request
@@ -154,7 +149,7 @@ def test_bulk_delete_datarow_metadata(
154149

155150
@pytest.mark.parametrize(
156151
"data_row_for_delete",
157-
["data_row_id_as_str", "data_row_unique_id", "data_row_global_key"],
152+
["data_row_unique_id", "data_row_global_key"],
158153
)
159154
def test_bulk_partial_delete_datarow_metadata(
160155
data_row_for_delete, data_row, mdo, request
@@ -195,21 +190,6 @@ def data_row_unique_ids(big_dataset):
195190
return deletes
196191

197192

198-
@pytest.fixture
199-
def data_row_ids_as_str(big_dataset):
200-
deletes = []
201-
data_row_ids = [dr.uid for dr in big_dataset.data_rows()]
202-
203-
for data_row_id in data_row_ids:
204-
deletes.append(
205-
DeleteDataRowMetadata(
206-
data_row_id=data_row_id,
207-
fields=[SPLIT_SCHEMA_ID, CAPTURE_DT_SCHEMA_ID],
208-
)
209-
)
210-
return deletes
211-
212-
213193
@pytest.fixture
214194
def data_row_global_keys(big_dataset):
215195
deletes = []
@@ -227,7 +207,7 @@ def data_row_global_keys(big_dataset):
227207

228208
@pytest.mark.parametrize(
229209
"data_rows_for_delete",
230-
["data_row_ids_as_str", "data_row_unique_ids", "data_row_global_keys"],
210+
["data_row_unique_ids", "data_row_global_keys"],
231211
)
232212
def test_large_bulk_delete_datarow_metadata(
233213
data_rows_for_delete, big_dataset, mdo, request
@@ -267,7 +247,7 @@ def test_large_bulk_delete_datarow_metadata(
267247

268248
@pytest.mark.parametrize(
269249
"data_row_for_delete",
270-
["data_row_id_as_str", "data_row_unique_id", "data_row_global_key"],
250+
["data_row_unique_id", "data_row_global_key"],
271251
)
272252
def test_bulk_delete_datarow_enum_metadata(
273253
data_row_for_delete,
@@ -304,7 +284,7 @@ def test_bulk_delete_datarow_enum_metadata(
304284

305285
@pytest.mark.parametrize(
306286
"data_row_for_delete",
307-
["data_row_id_as_str", "data_row_unique_id", "data_row_global_key"],
287+
["data_row_unique_id", "data_row_global_key"],
308288
)
309289
def test_delete_non_existent_schema_id(
310290
data_row_for_delete, data_row, mdo, request

libs/labelbox/tests/integration/test_data_rows.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1081,7 +1081,7 @@ def test_invalid_media_type(dataset, conversational_content):
10811081
# TODO: What error kind should this be? It looks like for global key we are
10821082
# using malformed query. But for invalid contents in FileUploads we use InvalidQueryError
10831083
with pytest.raises(ResourceCreationError):
1084-
dataset.(
1084+
dataset._create_data_rows_sync(
10851085
[{**conversational_content, "media_type": "IMAGE"}]
10861086
)
10871087

0 commit comments

Comments
 (0)