Skip to content

Commit 50098fe

Browse files
author
Val Brodsky
committed
Remove result_all error_all
1 parent 6ba61bb commit 50098fe

File tree

2 files changed

+9
-33
lines changed

2 files changed

+9
-33
lines changed

libs/labelbox/src/labelbox/schema/task.py

Lines changed: 4 additions & 23 deletions
Original file line numberDiff line numberDiff line change
@@ -242,7 +242,8 @@ def __init__(self, *args, **kwargs):
242242
@property
243243
def result(self) -> Optional[List[Dict[str, Any]]]: # type: ignore
244244
"""
245-
Fetches maximum 150K results. If you need to fetch more, use `result_all` property
245+
Fetches all results.
246+
Note, for large uploads (>150K data rows), it could take multiple minutes to complete
246247
"""
247248
if self.status == "FAILED":
248249
raise ValueError(f"Job failed. Errors : {self.errors}")
@@ -251,7 +252,8 @@ def result(self) -> Optional[List[Dict[str, Any]]]: # type: ignore
251252
@property
252253
def errors(self) -> Optional[List[Dict[str, Any]]]: # type: ignore
253254
"""
254-
Fetches maximum 150K errors. If you need to fetch more, use `errors_all` property
255+
Fetches all errors.
256+
Note, for large uploads / large number of errors (>150K), it could take multiple minutes to complete
255257
"""
256258
return self._errors_as_list()
257259

@@ -265,23 +267,6 @@ def failed_data_rows( # type: ignore
265267
self) -> Optional[List[Dict[str, Any]]]:
266268
return self.errors
267269

268-
@property
269-
def result_all(self) -> PaginatedCollection:
270-
"""
271-
This method uses our standard PaginatedCollection and allow to fetch any number of results
272-
See here for more https://docs.labelbox.com/reference/sdk-fundamental-concepts-1#iterate-over-paginatedcollection
273-
"""
274-
return self._download_results_paginated()
275-
276-
@property
277-
def errors_all(self) -> PaginatedCollection:
278-
"""
279-
This method uses our standard PaginatedCollection and allow to fetch any number of errors
280-
See here for more https://docs.labelbox.com/reference/sdk-fundamental-concepts-1#iterate-over-paginatedcollection
281-
"""
282-
283-
return self._download_errors_paginated()
284-
285270
def _download_results_paginated(self) -> PaginatedCollection:
286271
page_size = DOWNLOAD_RESULT_PAGE_SIZE
287272
from_cursor = None
@@ -389,8 +374,6 @@ def _results_as_list(self) -> Optional[List[Dict[str, Any]]]:
389374
for row in data:
390375
results.append(row)
391376
total_downloaded += 1
392-
if total_downloaded >= self.MAX_DOWNLOAD_SIZE:
393-
break
394377

395378
if len(results) == 0:
396379
return None
@@ -405,8 +388,6 @@ def _errors_as_list(self) -> Optional[List[Dict[str, Any]]]:
405388
for row in data:
406389
errors.append(row)
407390
total_downloaded += 1
408-
if total_downloaded >= self.MAX_DOWNLOAD_SIZE:
409-
break
410391

411392
if len(errors) == 0:
412393
return None

libs/labelbox/tests/integration/test_data_rows.py

Lines changed: 5 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -196,9 +196,6 @@ def test_data_row_bulk_creation(dataset, rand_gen, image_url):
196196
assert len(results) == 2
197197
row_data = [result["row_data"] for result in results]
198198
assert row_data == [image_url, image_url]
199-
results_all = task.result_all
200-
row_data = [result["row_data"] for result in results_all]
201-
assert row_data == [image_url, image_url]
202199

203200
data_rows = list(dataset.data_rows())
204201
assert len(data_rows) == 2
@@ -237,7 +234,7 @@ def test_data_row_bulk_creation_from_file(dataset, local_image_file, image_url):
237234
assert task.status == "COMPLETE"
238235
assert len(task.result) == 2
239236
assert task.has_errors() is False
240-
results = [r for r in task.result_all]
237+
results = task.result
241238
row_data = [result["row_data"] for result in results]
242239
assert len(row_data) == 2
243240

@@ -257,7 +254,7 @@ def test_data_row_bulk_creation_from_row_data_file_external_id(
257254
assert task.status == "COMPLETE"
258255
assert len(task.result) == 2
259256
assert task.has_errors() is False
260-
results = [r for r in task.result_all]
257+
results = task.result
261258
row_data = [result["row_data"] for result in results]
262259
assert len(row_data) == 2
263260
assert image_url in row_data
@@ -276,7 +273,7 @@ def test_data_row_bulk_creation_from_row_data_file(dataset, rand_gen,
276273
assert task.status == "COMPLETE"
277274
assert len(task.result) == 2
278275
assert task.has_errors() is False
279-
results = [r for r in task.result_all]
276+
results = task.result
280277
row_data = [result["row_data"] for result in results]
281278
assert len(row_data) == 2
282279

@@ -980,12 +977,10 @@ def test_data_row_bulk_creation_with_same_global_keys(dataset, sample_image,
980977
assert task.created_data_rows[0]['external_id'] == sample_image
981978
assert task.created_data_rows[0]['global_key'] == global_key_1
982979

983-
errors = task.errors_all
984-
all_errors = [er for er in errors]
985-
assert len(all_errors) == 1
980+
assert len(task.errors) == 1
986981
assert task.has_errors() is True
987982

988-
all_results = [result for result in task.result_all]
983+
all_results = task.result
989984
assert len(all_results) == 1
990985

991986

0 commit comments

Comments
 (0)