Skip to content

Commit 1f9f3a5

Browse files
Drew KaulDrew Kaul
authored andcommitted
fixes
1 parent b7c7aaa commit 1f9f3a5

File tree

3 files changed

+20
-21
lines changed

3 files changed

+20
-21
lines changed

nucleus/constants.py

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -33,7 +33,6 @@
3333
ERROR_CODES = "error_codes"
3434
ERROR_ITEMS = "upload_errors"
3535
ERROR_PAYLOAD = "error_payload"
36-
FRAMES = "frames"
3736
FRAMES_KEY = "frames"
3837
FX_KEY = "fx"
3938
FY_KEY = "fy"
@@ -80,7 +79,6 @@
8079
TYPE_KEY = "type"
8180
UPDATED_ITEMS = "updated_items"
8281
UPDATE_KEY = "update"
83-
URL = "url"
8482
URL_KEY = "url"
8583
VERTICES_KEY = "vertices"
8684
WIDTH_KEY = "width"

nucleus/dataset.py

Lines changed: 17 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -23,13 +23,13 @@
2323
DATASET_SLICES_KEY,
2424
DEFAULT_ANNOTATION_UPDATE_MODE,
2525
EXPORTED_ROWS,
26-
FRAMES,
26+
FRAMES_KEY,
2727
NAME_KEY,
2828
REFERENCE_IDS_KEY,
2929
REQUEST_ID_KEY,
3030
SCENES_KEY,
3131
UPDATE_KEY,
32-
URL,
32+
URL_KEY,
3333
)
3434
from .dataset_item import (
3535
DatasetItem,
@@ -209,7 +209,7 @@ def append(
209209
asynchronous=False,
210210
) -> Union[dict, AsyncJob]:
211211
"""
212-
Appends scenes or images with metadata (dataset items) to the dataset. Overwrites images on collision if forced.
212+
Appends images with metadata (dataset items) or scenes to the dataset. Overwrites images on collision if forced.
213213
214214
Parameters:
215215
:param items: items to upload
@@ -224,20 +224,20 @@ def append(
224224
'ignored_items': int,
225225
}
226226
"""
227-
all_dataset_items = all(
228-
(isinstance(item, DatasetItem) for item in items)
229-
)
230-
all_scenes = all((isinstance(item, LidarScene) for item in items))
231-
if not all_dataset_items and not all_scenes:
227+
dataset_items = [
228+
item for item in items if isinstance(item, DatasetItem)
229+
]
230+
scenes = [item for item in items if isinstance(item, LidarScene)]
231+
if dataset_items and scenes:
232232
raise Exception(
233233
"You must append either DatasetItems or Scenes to the dataset."
234234
)
235-
if all_scenes:
236-
return self.append_scenes(items, update, asynchronous)
235+
if scenes:
236+
return self.append_scenes(scenes, update, asynchronous)
237237

238-
check_for_duplicate_reference_ids(items)
238+
check_for_duplicate_reference_ids(dataset_items)
239239

240-
if len(items) > WARN_FOR_LARGE_UPLOAD and not asynchronous:
240+
if len(dataset_items) > WARN_FOR_LARGE_UPLOAD and not asynchronous:
241241
print(
242242
"Tip: for large uploads, get faster performance by importing your data "
243243
"into Nucleus directly from a cloud storage provider. See "
@@ -246,9 +246,9 @@ def append(
246246
)
247247

248248
if asynchronous:
249-
check_all_paths_remote(items)
249+
check_all_paths_remote(dataset_items)
250250
request_id = serialize_and_write_to_presigned_url(
251-
items, self.id, self._client
251+
dataset_items, self.id, self._client
252252
)
253253
response = self._client.make_request(
254254
payload={REQUEST_ID_KEY: request_id, UPDATE_KEY: update},
@@ -258,7 +258,7 @@ def append(
258258

259259
return self._client.populate_dataset(
260260
self.id,
261-
items,
261+
dataset_items,
262262
update=update,
263263
batch_size=batch_size,
264264
)
@@ -309,8 +309,8 @@ def upload_scenes(
309309
"""
310310
if asynchronous:
311311
for scene in payload[SCENES_KEY]:
312-
for frame in scene[FRAMES]:
313-
check_all_frame_paths_remote(frame[URL])
312+
for frame in scene[FRAMES_KEY]:
313+
check_all_frame_paths_remote(frame[URL_KEY])
314314
request_id = serialize_and_write_to_presigned_url(
315315
[payload], self.id, self._client
316316
)

nucleus/utils.py

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -122,7 +122,8 @@ def convert_export_payload(api_payload):
122122

123123

124124
def serialize_and_write(
125-
upload_units: Sequence[Union[DatasetItem, Annotation, LidarScene]], file_pointer
125+
upload_units: Sequence[Union[DatasetItem, Annotation, Dict, LidarScene]],
126+
file_pointer,
126127
):
127128
for unit in upload_units:
128129
try:
@@ -156,7 +157,7 @@ def upload_to_presigned_url(presigned_url: str, file_pointer: IO):
156157

157158

158159
def serialize_and_write_to_presigned_url(
159-
upload_units: Sequence[Union[DatasetItem, Annotation, LidarScene]],
160+
upload_units: Sequence[Union[DatasetItem, Annotation, Dict, LidarScene]],
160161
dataset_id: str,
161162
client,
162163
):

0 commit comments

Comments
 (0)