Skip to content

Commit 4d6f90d

Browse files
committed
Revert tests
1 parent 4fd83f6 commit 4d6f90d

File tree

3 files changed

+51
-12
lines changed

3 files changed

+51
-12
lines changed

libs/labelbox/src/labelbox/schema/data_row_metadata.py

Lines changed: 26 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,5 @@
11
# type: ignore
2+
import warnings
23
from copy import deepcopy
34
from datetime import datetime
45
from enum import Enum
@@ -14,7 +15,6 @@
1415
Type,
1516
Union,
1617
overload,
17-
get_args,
1818
)
1919

2020
from pydantic import (
@@ -28,7 +28,7 @@
2828
)
2929
from typing_extensions import Annotated
3030

31-
from labelbox.schema.identifiable import GlobalKey, UniqueId, DataRowIdentifier
31+
from labelbox.schema.identifiable import GlobalKey, UniqueId
3232
from labelbox.schema.identifiables import DataRowIdentifiers, UniqueIds
3333
from labelbox.schema.ontology import SchemaId
3434
from labelbox.utils import (
@@ -88,7 +88,7 @@ class DataRowMetadata(_CamelCaseMixin):
8888

8989

9090
class DeleteDataRowMetadata(_CamelCaseMixin):
91-
data_row_id: Union[UniqueId, GlobalKey] = None
91+
data_row_id: Union[str, UniqueId, GlobalKey] = None
9292
fields: List[SchemaId]
9393

9494

@@ -647,10 +647,21 @@ def bulk_delete(
647647
>>> )
648648
>>> mdo.batch_delete([metadata])
649649
650+
>>> delete = DeleteDataRowMetadata(
651+
>>> data_row_id="global-key",
652+
>>> fields=[
653+
>>> "schema-id-1",
654+
>>> "schema-id-2"
655+
>>> ...
656+
>>> ]
657+
>>> )
658+
>>> mdo.batch_delete([metadata])
659+
650660
651661
Args:
652662
deletes: Data row and schema ids to delete
653-
For data row, we support UniqueId and GlobalKey.
663+
For data row, we support UniqueId, str, and GlobalKey.
664+
If you pass a str, we will assume it is a UniqueId
654665
Do not pass a mix of data row ids and global keys in the same list
655666
656667
Returns:
@@ -662,8 +673,17 @@ def bulk_delete(
662673
if not len(deletes):
663674
raise ValueError("The 'deletes' list cannot be empty.")
664675

665-
for delete in deletes:
666-
if not isinstance(delete.data_row_id, get_args(DataRowIdentifier)):
676+
for i, delete in enumerate(deletes):
677+
if isinstance(delete.data_row_id, str):
678+
deletes[i] = DeleteDataRowMetadata(
679+
data_row_id=UniqueId(delete.data_row_id),
680+
fields=delete.fields,
681+
)
682+
elif isinstance(delete.data_row_id, UniqueId):
683+
continue
684+
elif isinstance(delete.data_row_id, GlobalKey):
685+
continue
686+
else:
667687
raise ValueError(
668688
f"Invalid data row identifier type '{type(delete.data_row_id)}' for '{delete.data_row_id}'"
669689
)

libs/labelbox/tests/integration/test_data_row_delete_metadata.py

Lines changed: 25 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -121,9 +121,14 @@ def data_row_global_key(data_row):
121121
return GlobalKey(data_row.global_key)
122122

123123

124+
@pytest.fixture
125+
def data_row_id_as_str(data_row):
126+
return data_row.uid
127+
128+
124129
@pytest.mark.parametrize(
125130
"data_row_for_delete",
126-
["data_row_unique_id", "data_row_global_key"],
131+
["data_row_id_as_str", "data_row_unique_id", "data_row_global_key"],
127132
)
128133
def test_bulk_delete_datarow_metadata(
129134
data_row_for_delete, data_row, mdo, request
@@ -149,7 +154,7 @@ def test_bulk_delete_datarow_metadata(
149154

150155
@pytest.mark.parametrize(
151156
"data_row_for_delete",
152-
["data_row_unique_id", "data_row_global_key"],
157+
["data_row_id_as_str", "data_row_unique_id", "data_row_global_key"],
153158
)
154159
def test_bulk_partial_delete_datarow_metadata(
155160
data_row_for_delete, data_row, mdo, request
@@ -190,6 +195,21 @@ def data_row_unique_ids(big_dataset):
190195
return deletes
191196

192197

198+
@pytest.fixture
199+
def data_row_ids_as_str(big_dataset):
200+
deletes = []
201+
data_row_ids = [dr.uid for dr in big_dataset.data_rows()]
202+
203+
for data_row_id in data_row_ids:
204+
deletes.append(
205+
DeleteDataRowMetadata(
206+
data_row_id=data_row_id,
207+
fields=[SPLIT_SCHEMA_ID, CAPTURE_DT_SCHEMA_ID],
208+
)
209+
)
210+
return deletes
211+
212+
193213
@pytest.fixture
194214
def data_row_global_keys(big_dataset):
195215
deletes = []
@@ -207,7 +227,7 @@ def data_row_global_keys(big_dataset):
207227

208228
@pytest.mark.parametrize(
209229
"data_rows_for_delete",
210-
["data_row_unique_ids", "data_row_global_keys"],
230+
["data_row_ids_as_str", "data_row_unique_ids", "data_row_global_keys"],
211231
)
212232
def test_large_bulk_delete_datarow_metadata(
213233
data_rows_for_delete, big_dataset, mdo, request
@@ -247,7 +267,7 @@ def test_large_bulk_delete_datarow_metadata(
247267

248268
@pytest.mark.parametrize(
249269
"data_row_for_delete",
250-
["data_row_unique_id", "data_row_global_key"],
270+
["data_row_id_as_str", "data_row_unique_id", "data_row_global_key"],
251271
)
252272
def test_bulk_delete_datarow_enum_metadata(
253273
data_row_for_delete,
@@ -284,7 +304,7 @@ def test_bulk_delete_datarow_enum_metadata(
284304

285305
@pytest.mark.parametrize(
286306
"data_row_for_delete",
287-
["data_row_unique_id", "data_row_global_key"],
307+
["data_row_id_as_str", "data_row_unique_id", "data_row_global_key"],
288308
)
289309
def test_delete_non_existent_schema_id(
290310
data_row_for_delete, data_row, mdo, request

libs/labelbox/tests/integration/test_data_rows.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1081,7 +1081,6 @@ def test_invalid_media_type(dataset, conversational_content):
10811081
# TODO: What error kind should this be? It looks like for global key we are
10821082
# using malformed query. But for invalid contents in FileUploads we use InvalidQueryError
10831083
with pytest.raises(ResourceCreationError):
1084-
dataset.(
10851084
dataset._create_data_rows_sync(
10861085
[{**conversational_content, "media_type": "IMAGE"}]
10871086
)

0 commit comments

Comments
 (0)