File tree Expand file tree Collapse file tree 3 files changed +34
-10
lines changed Expand file tree Collapse file tree 3 files changed +34
-10
lines changed Original file line number Diff line number Diff line change @@ -324,7 +324,7 @@ def populate_dataset(
324
324
self ,
325
325
dataset_id : str ,
326
326
dataset_items : List [DatasetItem ],
327
- batch_size : int = 30 ,
327
+ batch_size : int = 20 ,
328
328
update : bool = False ,
329
329
):
330
330
"""
@@ -495,6 +495,9 @@ async def _make_files_request(
495
495
files ,
496
496
route : str ,
497
497
session : aiohttp .ClientSession ,
498
+ retry_attempt = 0 ,
499
+ max_retries = 3 ,
500
+ sleep_intervals = (1 , 3 , 9 ),
498
501
):
499
502
"""
500
503
Makes an async post request with files to a Nucleus endpoint.
@@ -519,6 +522,7 @@ async def _make_files_request(
519
522
)
520
523
521
524
for sleep_time in RetryStrategy .sleep_times + [- 1 ]:
525
+
522
526
async with session .post (
523
527
endpoint ,
524
528
data = form ,
@@ -542,15 +546,27 @@ async def _make_files_request(
542
546
continue
543
547
544
548
if not response .ok :
545
- self .handle_bad_response (
546
- endpoint ,
547
- session .post ,
548
- aiohttp_response = (
549
- response .status ,
550
- response .reason ,
551
- data ,
552
- ),
553
- )
549
+ if retry_attempt < max_retries :
550
+ time .sleep (sleep_intervals [retry_attempt ])
551
+ retry_attempt += 1
552
+ return self ._make_files_request (
553
+ files ,
554
+ route ,
555
+ session ,
556
+ retry_attempt ,
557
+ max_retries ,
558
+ sleep_intervals ,
559
+ )
560
+ else :
561
+ self .handle_bad_response (
562
+ endpoint ,
563
+ session .post ,
564
+ aiohttp_response = (
565
+ response .status ,
566
+ response .reason ,
567
+ data ,
568
+ ),
569
+ )
554
570
555
571
return data
556
572
Original file line number Diff line number Diff line change @@ -261,6 +261,9 @@ def append(
261
261
'ignored_items': int,
262
262
}
263
263
"""
264
+ assert (
265
+ batch_size is None or batch_size < 30
266
+ ), "Please specify a batch size smaller than 30 to avoid timeouts."
264
267
dataset_items = [
265
268
item for item in items if isinstance (item , DatasetItem )
266
269
]
@@ -270,6 +273,9 @@ def append(
270
273
"You must append either DatasetItems or Scenes to the dataset."
271
274
)
272
275
if scenes :
276
+ assert (
277
+ asynchronous
278
+ ), "In order to avoid timeouts, you must set asynchronous=True when uploading scenes."
273
279
return self .append_scenes (scenes , update , asynchronous )
274
280
275
281
check_for_duplicate_reference_ids (dataset_items )
Original file line number Diff line number Diff line change @@ -252,6 +252,7 @@ def test_scene_add_frame():
252
252
}
253
253
254
254
255
+ @pytest .mark .skip ("Deactivated sync upload for scenes" )
255
256
def test_scene_upload_sync (dataset ):
256
257
payload = TEST_LIDAR_SCENES
257
258
scenes = [
@@ -272,6 +273,7 @@ def test_scene_upload_sync(dataset):
272
273
assert response ["new_scenes" ] == len (scenes )
273
274
274
275
276
+ @pytest .mark .skip ("Deactivated sync upload for scenes" )
275
277
@pytest .mark .integration
276
278
def test_scene_and_cuboid_upload_sync (dataset ):
277
279
payload = TEST_LIDAR_SCENES
You can’t perform that action at this time.
0 commit comments