Skip to content

Commit bccc78c

Browse files
author
Diego Ardila
committed
merge master
2 parents 0cc7ec4 + 17fb35b commit bccc78c

File tree

2 files changed

+19
-11
lines changed

2 files changed

+19
-11
lines changed

nucleus/dataset_item.py

Lines changed: 3 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,6 @@
77

88
from .annotation import is_local_path, Point3D
99
from .constants import (
10-
DATASET_ITEM_ID_KEY,
1110
IMAGE_URL_KEY,
1211
METADATA_KEY,
1312
ORIGINAL_IMAGE_URL_KEY,
@@ -92,12 +91,12 @@ class DatasetItemType(Enum):
9291
class DatasetItem: # pylint: disable=R0902
9392
image_location: Optional[str] = None
9493
reference_id: Optional[str] = None
95-
item_id: Optional[str] = None
9694
metadata: Optional[dict] = None
9795
pointcloud_location: Optional[str] = None
9896
upload_to_scale: Optional[bool] = True
9997

10098
def __post_init__(self):
99+
assert self.reference_id is not None, "reference_id is required."
101100
assert bool(self.image_location) != bool(
102101
self.pointcloud_location
103102
), "Must specify exactly one of the image_location, pointcloud_location parameters"
@@ -133,14 +132,12 @@ def from_json(cls, payload: dict, is_scene=False):
133132
image_location=image_url,
134133
pointcloud_location=payload.get(POINTCLOUD_URL_KEY, None),
135134
reference_id=payload.get(REFERENCE_ID_KEY, None),
136-
item_id=payload.get(DATASET_ITEM_ID_KEY, None),
137135
metadata=payload.get(METADATA_KEY, {}),
138136
)
139137

140138
return cls(
141139
image_location=image_url,
142140
reference_id=payload.get(REFERENCE_ID_KEY, None),
143-
item_id=payload.get(DATASET_ITEM_ID_KEY, None),
144141
metadata=payload.get(METADATA_KEY, {}),
145142
upload_to_scale=payload.get(UPLOAD_TO_SCALE_KEY, None),
146143
)
@@ -152,10 +149,8 @@ def to_payload(self, is_scene=False) -> dict:
152149
payload: Dict[str, Any] = {
153150
METADATA_KEY: self.metadata or {},
154151
}
155-
if self.reference_id:
156-
payload[REFERENCE_ID_KEY] = self.reference_id
157-
if self.item_id:
158-
payload[DATASET_ITEM_ID_KEY] = self.item_id
152+
153+
payload[REFERENCE_ID_KEY] = self.reference_id
159154

160155
if is_scene:
161156
if self.image_location:

tests/test_dataset.py

Lines changed: 16 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -182,8 +182,13 @@ def check_is_expected_response(response):
182182
# Upload just the first item in privacy mode
183183
upload_to_scale = i == 0
184184
ds_items_plain.append(
185-
DatasetItem(image_location=url, upload_to_scale=upload_to_scale)
185+
DatasetItem(
186+
image_location=url,
187+
upload_to_scale=upload_to_scale,
188+
reference_id=url.split("/")[-1] + "_plain",
189+
)
186190
)
191+
187192
response = dataset.append(ds_items_plain)
188193
check_is_expected_response(response)
189194

@@ -195,15 +200,23 @@ def check_is_expected_response(response):
195200

196201
def test_dataset_append_local(CLIENT, dataset):
197202
ds_items_local_error = [
198-
DatasetItem(image_location=LOCAL_FILENAME, metadata={"test": math.nan})
203+
DatasetItem(
204+
image_location=LOCAL_FILENAME,
205+
metadata={"test": math.nan},
206+
reference_id="bad",
207+
)
199208
]
200209
with pytest.raises(ValueError) as e:
201210
dataset.append(ds_items_local_error)
202211
assert "Out of range float values are not JSON compliant" in str(
203212
e.value
204213
)
205214
ds_items_local = [
206-
DatasetItem(image_location=LOCAL_FILENAME, metadata={"test": 0})
215+
DatasetItem(
216+
image_location=LOCAL_FILENAME,
217+
metadata={"test": 0},
218+
reference_id=LOCAL_FILENAME.split("/")[-1],
219+
)
207220
]
208221

209222
response = dataset.append(ds_items_local)

0 commit comments

Comments
 (0)