@@ -274,6 +274,16 @@ def test_scene_upload_sync(dataset_scene):
274
274
assert response ["dataset_id" ] == dataset_scene .id
275
275
assert response ["new_scenes" ] == len (scenes )
276
276
277
+ uploaded_scenes = dataset_scene .scenes
278
+ assert len (uploaded_scenes ) == len (scenes )
279
+ assert all (
280
+ u ["reference_id" ] == o .reference_id
281
+ for u , o in zip (uploaded_scenes , scenes )
282
+ )
283
+ assert all (
284
+ u ["metadata" ] == o .metadata for u , o in zip (uploaded_scenes , scenes )
285
+ )
286
+
277
287
278
288
@pytest .mark .skip ("Deactivated sync upload for scenes" )
279
289
@pytest .mark .integration
@@ -289,6 +299,16 @@ def test_scene_and_cuboid_upload_sync(dataset_scene):
289
299
assert response ["dataset_id" ] == dataset_scene .id
290
300
assert response ["new_scenes" ] == len (scenes )
291
301
302
+ uploaded_scenes = dataset_scene .scenes
303
+ assert len (uploaded_scenes ) == len (scenes )
304
+ assert all (
305
+ u ["reference_id" ] == o .reference_id
306
+ for u , o in zip (uploaded_scenes , scenes )
307
+ )
308
+ assert all (
309
+ u ["metadata" ] == o .metadata for u , o in zip (uploaded_scenes , scenes )
310
+ )
311
+
292
312
lidar_item_ref = payload [SCENES_KEY ][0 ][FRAMES_KEY ][0 ]["lidar" ][
293
313
REFERENCE_ID_KEY
294
314
]
@@ -341,6 +361,16 @@ def test_scene_upload_async(dataset_scene):
341
361
"total_steps" : 1 ,
342
362
}
343
363
364
+ uploaded_scenes = dataset_scene .scenes
365
+ assert len (uploaded_scenes ) == len (scenes )
366
+ assert all (
367
+ u ["reference_id" ] == o .reference_id
368
+ for u , o in zip (uploaded_scenes , scenes )
369
+ )
370
+ assert all (
371
+ u ["metadata" ] == o .metadata for u , o in zip (uploaded_scenes , scenes )
372
+ )
373
+
344
374
345
375
@pytest .mark .skip (reason = "Temporarily skipped because failing 12/28/21" )
346
376
@pytest .mark .integration
@@ -349,7 +379,6 @@ def test_scene_upload_and_update(dataset_scene):
349
379
scenes = [
350
380
LidarScene .from_json (scene_json ) for scene_json in payload [SCENES_KEY ]
351
381
]
352
- reference_ids = [s .reference_id for s in scenes ]
353
382
update = payload [UPDATE_KEY ]
354
383
355
384
job = dataset_scene .append (scenes , update = update , asynchronous = True )
@@ -374,10 +403,15 @@ def test_scene_upload_and_update(dataset_scene):
374
403
"total_steps" : 1 ,
375
404
}
376
405
377
- fetched_scenes = [
378
- dataset_scene .get_scene (ref_id ) for ref_id in reference_ids
379
- ]
380
- assert len (fetched_scenes ) == len (scenes )
406
+ uploaded_scenes = dataset_scene .scenes
407
+ assert len (uploaded_scenes ) == len (scenes )
408
+ assert all (
409
+ u ["reference_id" ] == o .reference_id
410
+ for u , o in zip (uploaded_scenes , scenes )
411
+ )
412
+ assert all (
413
+ u ["metadata" ] == o .metadata for u , o in zip (uploaded_scenes , scenes )
414
+ )
381
415
382
416
job2 = dataset_scene .append (scenes , update = True , asynchronous = True )
383
417
job2 .sleep_until_complete ()
0 commit comments