3
3
ANNOTATIONS_KEY ,
4
4
DATASET_ITEM_ID_KEY ,
5
5
FRAMES_KEY ,
6
+ IMAGE_KEY ,
7
+ IMAGE_URL_KEY ,
6
8
ITEM_KEY ,
9
+ POINTCLOUD_KEY ,
10
+ POINTCLOUD_URL_KEY ,
7
11
REFERENCE_ID_KEY ,
8
12
SCENES_KEY ,
9
13
UPDATE_KEY ,
12
16
from nucleus import (
13
17
CuboidAnnotation ,
14
18
LidarScene ,
19
+ Frame ,
15
20
)
16
21
17
22
from .helpers import (
18
23
TEST_DATASET_3D_NAME ,
19
24
TEST_CUBOID_ANNOTATIONS ,
25
+ TEST_DATASET_ITEMS ,
26
+ TEST_LIDAR_ITEMS ,
20
27
TEST_LIDAR_SCENES ,
21
28
assert_cuboid_annotation_matches_dict ,
22
29
)
@@ -31,6 +38,32 @@ def dataset(CLIENT):
31
38
assert response == {"message" : "Beginning dataset deletion..." }
32
39
33
40
41
+ def test_frame_add_item (dataset ):
42
+ frame = Frame (index = 0 )
43
+ frame .add_item (TEST_DATASET_ITEMS [0 ], "camera" )
44
+ frame .add_item (TEST_LIDAR_ITEMS [0 ], "lidar" )
45
+
46
+ assert frame .get_index () == 0
47
+ assert frame .get_sensors () == ["camera" , "lidar" ]
48
+ for item in frame .get_items ():
49
+ assert item in [TEST_DATASET_ITEMS [0 ], TEST_LIDAR_ITEMS [0 ]]
50
+ assert frame .get_item ("lidar" ) == TEST_LIDAR_ITEMS [0 ]
51
+ assert frame .to_payload () == {
52
+ "camera" : {
53
+ "url" : TEST_DATASET_ITEMS [0 ].image_location ,
54
+ "reference_id" : TEST_DATASET_ITEMS [0 ].reference_id ,
55
+ "type" : IMAGE_KEY ,
56
+ "metadata" : TEST_DATASET_ITEMS [0 ].metadata or {},
57
+ },
58
+ "lidar" : {
59
+ "url" : TEST_LIDAR_ITEMS [0 ].pointcloud_location ,
60
+ "reference_id" : TEST_LIDAR_ITEMS [0 ].reference_id ,
61
+ "type" : POINTCLOUD_KEY ,
62
+ "metadata" : TEST_LIDAR_ITEMS [0 ].metadata or {},
63
+ },
64
+ }
65
+
66
+
34
67
def test_scene_upload_sync (dataset ):
35
68
payload = TEST_LIDAR_SCENES
36
69
scenes = [
@@ -44,33 +77,6 @@ def test_scene_upload_sync(dataset):
44
77
assert response ["new_scenes" ] == len (scenes )
45
78
46
79
47
- @pytest .mark .integration
48
- def test_scene_upload_async (dataset ):
49
- payload = TEST_LIDAR_SCENES
50
- scenes = [
51
- LidarScene .from_json (scene_json ) for scene_json in payload [SCENES_KEY ]
52
- ]
53
- update = payload [UPDATE_KEY ]
54
-
55
- job = dataset .append (scenes , update = update , asynchronous = True )
56
- job .sleep_until_complete ()
57
- status = job .status ()
58
-
59
- assert status == {
60
- "job_id" : job .job_id ,
61
- "status" : "Completed" ,
62
- "message" : {
63
- "SceneUploadResponse" : {
64
- "errors" : [],
65
- "dataset_id" : dataset .id ,
66
- "new_scenes" : len (scenes ),
67
- "ignored_scenes" : 0 ,
68
- "scenes_errored" : 0 ,
69
- }
70
- },
71
- }
72
-
73
-
74
80
@pytest .mark .integration
75
81
def test_scene_and_cuboid_upload_sync (dataset ):
76
82
payload = TEST_LIDAR_SCENES
@@ -103,3 +109,30 @@ def test_scene_and_cuboid_upload_sync(dataset):
103
109
assert_cuboid_annotation_matches_dict (
104
110
response_annotations [0 ], TEST_CUBOID_ANNOTATIONS [0 ]
105
111
)
112
+
113
+
114
+ @pytest .mark .integration
115
+ def test_scene_upload_async (dataset ):
116
+ payload = TEST_LIDAR_SCENES
117
+ scenes = [
118
+ LidarScene .from_json (scene_json ) for scene_json in payload [SCENES_KEY ]
119
+ ]
120
+ update = payload [UPDATE_KEY ]
121
+
122
+ job = dataset .append (scenes , update = update , asynchronous = True )
123
+ job .sleep_until_complete ()
124
+ status = job .status ()
125
+
126
+ assert status == {
127
+ "job_id" : job .job_id ,
128
+ "status" : "Completed" ,
129
+ "message" : {
130
+ "SceneUploadResponse" : {
131
+ "errors" : [],
132
+ "dataset_id" : dataset .id ,
133
+ "new_scenes" : len (scenes ),
134
+ "ignored_scenes" : 0 ,
135
+ "scenes_errored" : 0 ,
136
+ }
137
+ },
138
+ }
0 commit comments