Skip to content

Commit 9dd54c7

Browse files
committed
removed rest of deprecated
1 parent 7d82380 commit 9dd54c7

File tree

7 files changed

+9
-209
lines changed

7 files changed

+9
-209
lines changed

libs/labelbox/src/labelbox/schema/asset_attachment.py

Lines changed: 1 addition & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -7,15 +7,6 @@
77

88

99
class AttachmentType(str, Enum):
10-
@classmethod
11-
def __missing__(cls, value: object):
12-
if str(value) == "TEXT":
13-
warnings.warn(
14-
"The TEXT attachment type is deprecated. Use RAW_TEXT instead."
15-
)
16-
return cls.RAW_TEXT
17-
return value
18-
1910
VIDEO = "VIDEO"
2011
IMAGE = "IMAGE"
2112
IMAGE_OVERLAY = "IMAGE_OVERLAY"
@@ -30,7 +21,7 @@ class AssetAttachment(DbObject):
3021
"""Asset attachment provides extra context about an asset while labeling.
3122
3223
Attributes:
33-
attachment_type (str): IMAGE, VIDEO, IMAGE_OVERLAY, HTML, RAW_TEXT, TEXT_URL, or PDF_URL. TEXT attachment type is deprecated.
24+
attachment_type (str): IMAGE, VIDEO, IMAGE_OVERLAY, HTML, RAW_TEXT, TEXT_URL, or PDF_URL.
3425
attachment_value (str): URL to an external file or a string of text
3526
attachment_name (str): The name of the attachment
3627
"""

libs/labelbox/src/labelbox/schema/data_row_metadata.py

Lines changed: 4 additions & 20 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,4 @@
11
# type: ignore
2-
import warnings
32
from copy import deepcopy
43
from datetime import datetime
54
from enum import Enum
@@ -673,29 +672,14 @@ def bulk_delete(
673672
if not len(deletes):
674673
raise ValueError("The 'deletes' list cannot be empty.")
675674

676-
passed_strings = False
677-
for i, delete in enumerate(deletes):
678-
if isinstance(delete.data_row_id, str):
679-
passed_strings = True
680-
deletes[i] = DeleteDataRowMetadata(
681-
data_row_id=UniqueId(delete.data_row_id),
682-
fields=delete.fields,
683-
)
684-
elif isinstance(delete.data_row_id, UniqueId):
685-
continue
686-
elif isinstance(delete.data_row_id, GlobalKey):
687-
continue
688-
else:
675+
for delete in enumerate(deletes):
676+
if not isinstance(delete.data_row_id, UniqueId) or not isinstance(
677+
delete.data_row_id, GlobalKey
678+
):
689679
raise ValueError(
690680
f"Invalid data row identifier type '{type(delete.data_row_id)}' for '{delete.data_row_id}'"
691681
)
692682

693-
if passed_strings:
694-
warnings.warn(
695-
"Using string for data row id will be deprecated. Please use "
696-
"UniqueId instead."
697-
)
698-
699683
def _batch_delete(
700684
deletes: List[_DeleteBatchDataRowMetadata],
701685
) -> List[DataRowMetadataBatchResponse]:

libs/labelbox/src/labelbox/schema/dataset.py

Lines changed: 1 addition & 37 deletions
Original file line numberDiff line numberDiff line change
@@ -166,47 +166,13 @@ def create_data_row(self, items=None, **kwargs) -> "DataRow":
166166

167167
return self.client.get_data_row(res[0]["id"])
168168

169-
def create_data_rows_sync(
170-
self, items, file_upload_thread_count=FILE_UPLOAD_THREAD_COUNT
171-
) -> None:
172-
"""Synchronously bulk upload data rows.
173-
174-
Use this instead of `Dataset.create_data_rows` for smaller batches of data rows that need to be uploaded quickly.
175-
Cannot use this for uploads containing more than 1000 data rows.
176-
Each data row is also limited to 5 attachments.
177-
178-
Args:
179-
items (iterable of (dict or str)):
180-
See the docstring for `Dataset._create_descriptor_file` for more information.
181-
Returns:
182-
None. If the function doesn't raise an exception then the import was successful.
183-
184-
Raises:
185-
ResourceCreationError: If the `items` parameter does not conform to
186-
the specification in Dataset._create_descriptor_file or if the server did not accept the
187-
DataRow creation request (unknown reason).
188-
InvalidAttributeError: If there are fields in `items` not valid for
189-
a DataRow.
190-
ValueError: When the upload parameters are invalid
191-
"""
192-
warnings.warn(
193-
"This method is deprecated and will be "
194-
"removed in a future release. Please use create_data_rows instead."
195-
)
196-
197-
self._create_data_rows_sync(
198-
items, file_upload_thread_count=file_upload_thread_count
199-
)
200-
201-
return None # Return None if no exception is raised
202-
203169
def _create_data_rows_sync(
204170
self, items, file_upload_thread_count=FILE_UPLOAD_THREAD_COUNT
205171
) -> "DataUpsertTask":
206172
max_data_rows_supported = 1000
207173
if len(items) > max_data_rows_supported:
208174
raise ValueError(
209-
f"Dataset.create_data_rows_sync() supports a max of {max_data_rows_supported} data rows."
175+
f"Dataset._create_data_rows_sync() supports a max of {max_data_rows_supported} data rows."
210176
" For larger imports use the async function Dataset.create_data_rows()"
211177
)
212178
if file_upload_thread_count < 1:
@@ -235,8 +201,6 @@ def create_data_rows(
235201
) -> "DataUpsertTask":
236202
"""Asynchronously bulk upload data rows
237203
238-
Use this instead of `Dataset.create_data_rows_sync` uploads for batches that contain more than 1000 data rows.
239-
240204
Args:
241205
items (iterable of (dict or str))
242206

libs/labelbox/src/labelbox/schema/queue_mode.py

Lines changed: 0 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -4,9 +4,3 @@
44
class QueueMode(str, Enum):
55
Batch = "BATCH"
66
Dataset = "DATA_SET"
7-
8-
@classmethod
9-
def _missing_(cls, value):
10-
# Parses the deprecated "CATALOG" value back to QueueMode.Batch.
11-
if value == "CATALOG":
12-
return QueueMode.Batch

libs/labelbox/src/labelbox/schema/slice.py

Lines changed: 0 additions & 37 deletions
Original file line numberDiff line numberDiff line change
@@ -53,43 +53,6 @@ class CatalogSlice(Slice):
5353
Represents a Slice used for filtering data rows in Catalog.
5454
"""
5555

56-
def get_data_row_ids(self) -> PaginatedCollection:
57-
"""
58-
Fetches all data row ids that match this Slice
59-
60-
Returns:
61-
A PaginatedCollection of mapping of data row ids to global keys
62-
"""
63-
64-
warnings.warn(
65-
"get_data_row_ids will be deprecated. Use get_data_row_identifiers instead"
66-
)
67-
68-
query_str = """
69-
query getDataRowIdsBySavedQueryPyApi($id: ID!, $from: String, $first: Int!) {
70-
getDataRowIdsBySavedQuery(input: {
71-
savedQueryId: $id,
72-
after: $from
73-
first: $first
74-
}) {
75-
totalCount
76-
nodes
77-
pageInfo {
78-
endCursor
79-
hasNextPage
80-
}
81-
}
82-
}
83-
"""
84-
return PaginatedCollection(
85-
client=self.client,
86-
query=query_str,
87-
params={"id": str(self.uid)},
88-
dereferencing=["getDataRowIdsBySavedQuery", "nodes"],
89-
obj_class=lambda _, data_row_id: data_row_id,
90-
cursor_path=["getDataRowIdsBySavedQuery", "pageInfo", "endCursor"],
91-
)
92-
9356
def get_data_row_identifiers(self) -> PaginatedCollection:
9457
"""
9558
Fetches all data row ids and global keys (where defined) that match this Slice

libs/labelbox/tests/integration/test_data_rows.py

Lines changed: 3 additions & 95 deletions
Original file line numberDiff line numberDiff line change
@@ -500,8 +500,6 @@ def test_create_data_rows_with_metadata(mdo, dataset, image_url):
500500
[
501501
("create_data_rows", "class"),
502502
("create_data_rows", "dict"),
503-
("create_data_rows_sync", "class"),
504-
("create_data_rows_sync", "dict"),
505503
("create_data_row", "class"),
506504
("create_data_row", "dict"),
507505
],
@@ -539,7 +537,6 @@ def create_data_row(data_rows):
539537

540538
CREATION_FUNCTION = {
541539
"create_data_rows": dataset.create_data_rows,
542-
"create_data_rows_sync": dataset.create_data_rows_sync,
543540
"create_data_row": create_data_row,
544541
}
545542
data_rows = [METADATA_FIELDS[metadata_obj_type]]
@@ -804,49 +801,6 @@ def test_data_row_attachments(dataset, image_url):
804801
)
805802

806803

807-
def test_create_data_rows_sync_attachments(dataset, image_url):
808-
attachments = [
809-
("IMAGE", image_url, "image URL"),
810-
("RAW_TEXT", "test-text", None),
811-
("IMAGE_OVERLAY", image_url, "Overlay"),
812-
("HTML", image_url, None),
813-
]
814-
attachments_per_data_row = 3
815-
dataset.create_data_rows_sync(
816-
[
817-
{
818-
"row_data": image_url,
819-
"external_id": "test-id",
820-
"attachments": [
821-
{
822-
"type": attachment_type,
823-
"value": attachment_value,
824-
"name": attachment_name,
825-
}
826-
for _ in range(attachments_per_data_row)
827-
],
828-
}
829-
for attachment_type, attachment_value, attachment_name in attachments
830-
]
831-
)
832-
data_rows = list(dataset.data_rows())
833-
assert len(data_rows) == len(attachments)
834-
for data_row in data_rows:
835-
assert len(list(data_row.attachments())) == attachments_per_data_row
836-
837-
838-
def test_create_data_rows_sync_mixed_upload(dataset, image_url):
839-
n_local = 100
840-
n_urls = 100
841-
with NamedTemporaryFile() as fp:
842-
fp.write("Test data".encode())
843-
fp.flush()
844-
dataset.create_data_rows_sync(
845-
[{DataRow.row_data: image_url}] * n_urls + [fp.name] * n_local
846-
)
847-
assert len(list(dataset.data_rows())) == n_local + n_urls
848-
849-
850804
def test_create_data_row_attachment(data_row):
851805
att = data_row.create_attachment(
852806
"IMAGE", "https://example.com/image.jpg", "name"
@@ -1086,53 +1040,6 @@ def test_data_row_delete_and_create_with_same_global_key(
10861040
assert task.result[0]["global_key"] == global_key_1
10871041

10881042

1089-
def test_data_row_bulk_creation_sync_with_unique_global_keys(
1090-
dataset, sample_image
1091-
):
1092-
global_key_1 = str(uuid.uuid4())
1093-
global_key_2 = str(uuid.uuid4())
1094-
global_key_3 = str(uuid.uuid4())
1095-
1096-
dataset.create_data_rows_sync(
1097-
[
1098-
{DataRow.row_data: sample_image, DataRow.global_key: global_key_1},
1099-
{DataRow.row_data: sample_image, DataRow.global_key: global_key_2},
1100-
{DataRow.row_data: sample_image, DataRow.global_key: global_key_3},
1101-
]
1102-
)
1103-
1104-
assert {row.global_key for row in dataset.data_rows()} == {
1105-
global_key_1,
1106-
global_key_2,
1107-
global_key_3,
1108-
}
1109-
1110-
1111-
def test_data_row_bulk_creation_sync_with_same_global_keys(
1112-
dataset, sample_image
1113-
):
1114-
global_key_1 = str(uuid.uuid4())
1115-
1116-
with pytest.raises(ResourceCreationError) as exc_info:
1117-
dataset.create_data_rows_sync(
1118-
[
1119-
{
1120-
DataRow.row_data: sample_image,
1121-
DataRow.global_key: global_key_1,
1122-
},
1123-
{
1124-
DataRow.row_data: sample_image,
1125-
DataRow.global_key: global_key_1,
1126-
},
1127-
]
1128-
)
1129-
1130-
assert len(list(dataset.data_rows())) == 1
1131-
assert list(dataset.data_rows())[0].global_key == global_key_1
1132-
assert "Duplicate global key" in str(exc_info.value)
1133-
assert exc_info.value.args[1] # task id
1134-
1135-
11361043
@pytest.fixture
11371044
def conversational_data_rows(dataset, conversational_content):
11381045
examples = [
@@ -1174,7 +1081,7 @@ def test_invalid_media_type(dataset, conversational_content):
11741081
# TODO: What error kind should this be? It looks like for global key we are
11751082
# using malformed query. But for invalid contents in FileUploads we use InvalidQueryError
11761083
with pytest.raises(ResourceCreationError):
1177-
dataset.create_data_rows_sync(
1084+
dataset.(
11781085
[{**conversational_content, "media_type": "IMAGE"}]
11791086
)
11801087

@@ -1184,7 +1091,8 @@ def test_create_tiled_layer(dataset, tile_content):
11841091
{**tile_content, "media_type": "TMS_GEO"},
11851092
tile_content,
11861093
]
1187-
dataset.create_data_rows_sync(examples)
1094+
task = dataset.create_data_rows(examples)
1095+
task.wait_until_done()
11881096
data_rows = list(dataset.data_rows())
11891097
assert len(data_rows) == len(examples)
11901098
for data_row in data_rows:

libs/labelbox/tests/unit/test_queue_mode.py

Lines changed: 0 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -3,10 +3,6 @@
33
from labelbox.schema.queue_mode import QueueMode
44

55

6-
def test_parse_deprecated_catalog():
7-
assert QueueMode("CATALOG") == QueueMode.Batch
8-
9-
106
def test_parse_batch():
117
assert QueueMode("BATCH") == QueueMode.Batch
128

0 commit comments

Comments
 (0)