Skip to content

Commit ade2577

Browse files
Drew KaulDrew Kaul
authored andcommitted
refactor scene integration tests to pass with new payload
1 parent 489da47 commit ade2577

File tree

4 files changed

+46
-28
lines changed

4 files changed

+46
-28
lines changed

nucleus/constants.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -57,6 +57,7 @@
5757
REFERENCE_IDS_KEY = "reference_ids"
5858
REFERENCE_ID_KEY = "reference_id"
5959
REQUEST_ID_KEY = "requestId"
60+
SCENES = "scenes"
6061
SEGMENTATIONS_KEY = "segmentations"
6162
SLICE_ID_KEY = "slice_id"
6263
STATUS_CODE_KEY = "status_code"
@@ -65,6 +66,7 @@
6566
TYPE_KEY = "type"
6667
UPDATED_ITEMS = "updated_items"
6768
UPDATE_KEY = "update"
69+
URL = "url"
6870
VERTICES_KEY = "vertices"
6971
WIDTH_KEY = "width"
7072
YAW_KEY = "yaw"

nucleus/dataset.py

Lines changed: 8 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -28,7 +28,9 @@
2828
NAME_KEY,
2929
REFERENCE_IDS_KEY,
3030
REQUEST_ID_KEY,
31+
SCENES,
3132
UPDATE_KEY,
33+
URL,
3234
)
3335
from .dataset_item import (
3436
DatasetItem,
@@ -255,22 +257,22 @@ def upload_scenes(
255257
asynchronous: bool = False,
256258
) -> Union[dict, AsyncJob]:
257259
"""
258-
Uploads scene with given frames to the dataset
260+
Uploads scenes with given frames to the dataset
259261
260262
Parameters:
261-
:param payload: dictionary containing frames to be uploaded as a scene
263+
:param payload: dictionary containing scenes to be uploaded
262264
:param update: if True, overwrite scene on collision
263265
:param aynchronous: if True, return a job object representing asynchronous ingestion job
264266
:return:
265267
{
266268
'dataset_id': str,
267-
'new_items': int,
268-
'updated_items': int,
269-
'ignored_items': int,
269+
'new_scenes': int,
270270
}
271271
"""
272272
if asynchronous:
273-
check_all_frame_paths_remote(payload[FRAMES])
273+
for scene in payload[SCENES]:
274+
for frame in scene[FRAMES]:
275+
check_all_frame_paths_remote(frame[URL])
274276
request_id = serialize_and_write_to_presigned_url(
275277
[payload], self.id, self._client
276278
)

tests/helpers.py

Lines changed: 26 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -26,7 +26,24 @@
2626
"https://scaleapi-cust-lidar.s3.us-west-1.amazonaws.com/test-scale/frame-0.json",
2727
]
2828

29-
TEST_LIDAR_FRAMES = {"frames": TEST_POINTCLOUD_URLS}
29+
TEST_LIDAR_SCENES = {
30+
"scenes": [
31+
{
32+
"referenceId": "scene_1",
33+
"frames": [
34+
{
35+
"lidar": {
36+
"url": TEST_POINTCLOUD_URLS[0],
37+
"type": "pointcloud",
38+
"referenceId": "lidar_frame_1",
39+
"metadata": {},
40+
},
41+
},
42+
],
43+
"metadata": {},
44+
},
45+
],
46+
}
3047

3148
TEST_DATASET_ITEMS = [
3249
DatasetItem(TEST_IMG_URLS[0], "1"),
@@ -81,21 +98,21 @@ def reference_id_from_url(url):
8198

8299
TEST_CUBOID_ANNOTATIONS = [
83100
{
84-
"label": f"[Pytest] Cuboid Annotation ${i}",
101+
"label": f"[Pytest] Cuboid Annotation #{i}",
85102
"geometry": {
86103
"position": {
87-
"x": 50 * i + 10,
88-
"y": 60 * i + 10,
89-
"z": 70 * i + 10,
104+
"x": 50 * i + 5,
105+
"y": 60 * i + 5,
106+
"z": 70 * i + 5,
90107
},
91108
"dimensions": {
92-
"x": 10 * i,
93-
"y": 20 * i,
94-
"z": 30 * i,
109+
"x": 10 * i + 10,
110+
"y": 20 * i + 10,
111+
"z": 30 * i + 10,
95112
},
96113
"yaw": 5 * i,
97114
},
98-
"annotation_id": f"[Pytest] Cuboid Annotation Annotation Id{i}",
115+
"annotation_id": f"[Pytest] Cuboid Annotation Annotation Id {i}",
99116
}
100117
for i in range(len(TEST_POINTCLOUD_URLS))
101118
]

tests/test_scene.py

Lines changed: 10 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -1,9 +1,10 @@
1+
from nucleus.constants import SCENES
12
import pytest
23

34
from .helpers import (
45
TEST_DATASET_3D_NAME,
56
TEST_CUBOID_ANNOTATIONS,
6-
TEST_LIDAR_FRAMES,
7+
TEST_LIDAR_SCENES,
78
assert_cuboid_annotation_matches_dict,
89
)
910

@@ -22,34 +23,30 @@ def dataset(CLIENT):
2223

2324

2425
def test_scene_upload_sync(dataset):
25-
payload = TEST_LIDAR_FRAMES
26+
payload = TEST_LIDAR_SCENES
2627
response = dataset.upload_scenes(payload)
2728

2829
assert response["dataset_id"] == dataset.id
29-
assert response["new_items"] == 1
30-
assert response["updated_items"] == 0
31-
assert response["ignored_items"] == 0
30+
assert response["new_scenes"] == len(TEST_LIDAR_SCENES[SCENES])
3231

3332

3433
@pytest.mark.integration
3534
def test_scene_and_cuboid_upload_sync(dataset):
36-
payload = TEST_LIDAR_FRAMES
35+
payload = TEST_LIDAR_SCENES
3736
response = dataset.upload_scenes(payload)
3837

3938
assert response["dataset_id"] == dataset.id
40-
assert response["new_items"] == 1
41-
assert response["updated_items"] == 0
42-
assert response["ignored_items"] == 0
39+
assert response["new_scenes"] == len(TEST_LIDAR_SCENES[SCENES])
4340

4441
TEST_CUBOID_ANNOTATIONS[0]["dataset_item_id"] = dataset.items[0].item_id
45-
annotation = CuboidAnnotation.from_json(TEST_CUBOID_ANNOTATIONS[0])
46-
response = dataset.annotate(annotations=[annotation])
42+
annotations = [CuboidAnnotation.from_json(TEST_CUBOID_ANNOTATIONS[0])]
43+
response = dataset.annotate(annotations)
4744

4845
assert response["dataset_id"] == dataset.id
49-
assert response["annotations_processed"] == 1
46+
assert response["annotations_processed"] == len(annotations)
5047
assert response["annotations_ignored"] == 0
5148

52-
response = dataset.loc(annotation.item_id)["annotations"]["cuboid"]
49+
response = dataset.loc(annotations[0].item_id)["annotations"]["cuboid"]
5350
assert len(response) == 1
5451
response_annotation = response[0]
5552
assert_cuboid_annotation_matches_dict(

0 commit comments

Comments
 (0)