Skip to content

Commit 72dc0ed

Browse files
author
Val Brodsky
committed
PR feedback: fix constant name
1 parent d806f11 commit 72dc0ed

File tree

2 files changed

+13
-12
lines changed

2 files changed

+13
-12
lines changed

libs/labelbox/src/labelbox/schema/internal/data_row_uploader.py

Lines changed: 10 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,7 @@
22
import os
33
from concurrent.futures import ThreadPoolExecutor, as_completed
44

5-
from typing import Iterable, List, Union
5+
from typing import Iterable, List
66

77
from labelbox.exceptions import InvalidQueryError
88
from labelbox.exceptions import InvalidAttributeError
@@ -11,7 +11,8 @@
1111
from labelbox.orm.model import Field
1212
from labelbox.schema.embedding import EmbeddingVector
1313
from labelbox.pydantic_compat import BaseModel
14-
from labelbox.schema.internal.datarow_upload_constants import MAX_DATAROW_PER_API_OPERATION
14+
from labelbox.schema.internal.datarow_upload_constants import (
15+
MAX_DATAROW_PER_API_OPERATION, FILE_UPLOAD_THREAD_COUNT)
1516
from labelbox.schema.internal.data_row_upsert_item import DataRowUpsertItem
1617

1718

@@ -31,15 +32,15 @@ def create_descriptor_file(client,
3132
"""
3233
This function is shared by `Dataset.create_data_rows`, `Dataset.create_data_rows_sync` and `Dataset.update_data_rows`.
3334
It is used to prepare the input file. The user defined input is validated, processed, and json stringified.
34-
Finally the json data is uploaded to gcs and a uri is returned. This uri can be passed to
35+
Finally the json data is uploaded to gcs and a uri is returned. This uri can be passed as a parameter to a mutation that uploads data rows
3536
3637
Each element in `items` can be either a `str` or a `dict`. If
3738
it is a `str`, then it is interpreted as a local file path. The file
3839
is uploaded to Labelbox and a DataRow referencing it is created.
3940
4041
If an item is a `dict`, then it could support one of the two following structures
4142
1. For static imagery, video, and text it should map `DataRow` field names to values.
42-
At the minimum an `item` passed as a `dict` must contain a `row_data` key and value.
43+
At the minimum an `items` passed as a `dict` must contain a `row_data` key and value.
4344
If the value for row_data is a local file path and the path exists,
4445
then the local file will be uploaded to labelbox.
4546
@@ -73,7 +74,7 @@ def create_descriptor_file(client,
7374
a DataRow.
7475
ValueError: When the upload parameters are invalid
7576
"""
76-
file_upload_thread_count = 20
77+
file_upload_thread_count = FILE_UPLOAD_THREAD_COUNT
7778
DataRow = Entity.DataRow
7879
AssetAttachment = Entity.AssetAttachment
7980

@@ -184,7 +185,7 @@ def validate_keys(item):
184185
raise InvalidAttributeError(DataRow, invalid_keys)
185186
return item
186187

187-
def formatLegacyConversationalData(item):
188+
def format_legacy_conversational_data(item):
188189
messages = item.pop("conversationalData")
189190
version = item.pop("version", 1)
190191
type = item.pop("type", "application/vnd.labelbox.conversational")
@@ -215,7 +216,7 @@ def convert_item(data_row_item):
215216
return item
216217

217218
if "conversationalData" in item:
218-
formatLegacyConversationalData(item)
219+
format_legacy_conversational_data(item)
219220

220221
# Convert all payload variations into the same dict format
221222
item = format_row(item)
@@ -270,9 +271,9 @@ def upload_in_chunks(client, specs: List[DataRowUpsertItem],
270271
for i in range(0, len(specs), upsert_chunk_size)
271272
]
272273

273-
def _upload_chunk(_chunk):
274+
def _upload_chunk(chunk):
274275
return DataRowUploader.create_descriptor_file(client,
275-
_chunk,
276+
chunk,
276277
is_upsert=True)
277278

278279
with ThreadPoolExecutor(file_upload_thread_count) as executor:

libs/labelbox/src/labelbox/schema/task.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -233,7 +233,7 @@ class DataUpsertTask(Task):
233233
"""
234234
Task class for data row upsert operations
235235
"""
236-
__max_donwload_size: Final = MAX_DATAROW_PER_API_OPERATION
236+
MAX_DOWNLOAD_SIZE: Final = MAX_DATAROW_PER_API_OPERATION
237237

238238
def __init__(self, *args, **kwargs):
239239
super().__init__(*args, **kwargs)
@@ -389,7 +389,7 @@ def _results_as_list(self) -> Optional[List[Dict[str, Any]]]:
389389
for row in data:
390390
results.append(row)
391391
total_downloaded += 1
392-
if total_downloaded >= self.__max_donwload_size:
392+
if total_downloaded >= self.MAX_DOWNLOAD_SIZE:
393393
break
394394

395395
if len(results) == 0:
@@ -405,7 +405,7 @@ def _errors_as_list(self) -> Optional[List[Dict[str, Any]]]:
405405
for row in data:
406406
errors.append(row)
407407
total_downloaded += 1
408-
if total_downloaded >= self.__max_donwload_size:
408+
if total_downloaded >= self.MAX_DOWNLOAD_SIZE:
409409
break
410410

411411
if len(errors) == 0:

0 commit comments

Comments
 (0)