Skip to content

Commit 1147fbe

Browse files
committed
All tests pass
1 parent d2f3bd9 commit 1147fbe

File tree

3 files changed

+140
-85
lines changed

3 files changed

+140
-85
lines changed

nucleus/slice.py

Lines changed: 7 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -133,16 +133,21 @@ def check_annotations_are_in_slice(
133133
slice: The slice to check against.
134134
"""
135135
info = slice_to_check.info()
136+
136137
item_ids_not_found_in_slice = {
137138
annotation.item_id
138139
for annotation in annotations
139140
if annotation.item_id is not None
140-
}.difference({item_metadata["id"] for item_metadata in info})
141+
}.difference(
142+
{item_metadata["id"] for item_metadata in info["dataset_items"]}
143+
)
141144
reference_ids_not_found_in_slice = {
142145
annotation.reference_id
143146
for annotation in annotations
144147
if annotation.reference_id is not None
145-
}.difference({item_metadata["reference_id"] for item_metadata in info})
148+
}.difference(
149+
{item_metadata["ref_id"] for item_metadata in info["dataset_items"]}
150+
)
146151
if item_ids_not_found_in_slice or reference_ids_not_found_in_slice:
147152
annotations_are_in_slice = False
148153
else:

tests/test_dataset.py

Lines changed: 0 additions & 82 deletions
Original file line numberDiff line numberDiff line change
@@ -127,85 +127,3 @@ def test_dataset_list_autotags(CLIENT, dataset):
127127
# List of Autotags should be empty
128128
autotag_response = CLIENT.list_autotags(dataset.id)
129129
assert autotag_response == []
130-
131-
132-
def test_slice_create_and_delete_and_list(dataset):
133-
# Dataset upload
134-
ds_items = []
135-
for url in TEST_IMG_URLS:
136-
ds_items.append(
137-
DatasetItem(
138-
image_location=url,
139-
reference_id=reference_id_from_url(url),
140-
)
141-
)
142-
response = dataset.append(ds_items)
143-
assert ERROR_PAYLOAD not in response.json()
144-
145-
# Slice creation
146-
slc = dataset.create_slice(
147-
name=TEST_SLICE_NAME,
148-
reference_ids=[item.reference_id for item in ds_items[:2]],
149-
)
150-
151-
dataset_slices = dataset.slices
152-
assert len(dataset_slices) == 1
153-
assert slc.slice_id == dataset_slices[0]
154-
155-
response = slc.info()
156-
assert response["name"] == TEST_SLICE_NAME
157-
assert response["dataset_id"] == dataset.id
158-
assert len(response["dataset_items"]) == 2
159-
for item in ds_items[:2]:
160-
assert (
161-
item.reference_id == response["dataset_items"][0]["ref_id"]
162-
or item.reference_id == response["dataset_items"][1]["ref_id"]
163-
)
164-
165-
166-
def test_slice_append(dataset):
167-
# Dataset upload
168-
ds_items = []
169-
for url in TEST_IMG_URLS:
170-
ds_items.append(
171-
DatasetItem(
172-
image_location=url,
173-
reference_id=reference_id_from_url(url),
174-
)
175-
)
176-
response = dataset.append(ds_items)
177-
assert ERROR_PAYLOAD not in response.json()
178-
179-
# Slice creation
180-
slc = dataset.create_slice(
181-
name=TEST_SLICE_NAME,
182-
reference_ids=[ds_items[0].reference_id],
183-
)
184-
185-
# Insert duplicate first item
186-
slc.append(reference_ids=[item.reference_id for item in ds_items[:3]])
187-
188-
response = slc.info()
189-
assert len(response["dataset_items"]) == 3
190-
for item in ds_items[:3]:
191-
assert (
192-
item.reference_id == response["dataset_items"][0]["ref_id"]
193-
or item.reference_id == response["dataset_items"][1]["ref_id"]
194-
or item.reference_id == response["dataset_items"][2]["ref_id"]
195-
)
196-
197-
all_stored_items = slc.items()
198-
199-
def sort_by_reference_id(items):
200-
# Remove the generated item_ids and standardize
201-
# empty metadata so we can do an equality check.
202-
for item in items:
203-
item.item_id = None
204-
if item.metadata == {}:
205-
item.metadata = None
206-
return sorted(items, key=lambda x: x.reference_id)
207-
208-
breakpoint()
209-
assert sort_by_reference_id(all_stored_items) == sort_by_reference_id(
210-
ds_items[:3]
211-
)

tests/test_slice.py

Lines changed: 133 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,22 @@
11
import pytest
2-
from nucleus import Slice, NucleusClient
2+
from nucleus import Slice, NucleusClient, DatasetItem, BoxAnnotation
3+
from nucleus.constants import ERROR_PAYLOAD
4+
from helpers import (
5+
TEST_DATASET_NAME,
6+
TEST_IMG_URLS,
7+
TEST_SLICE_NAME,
8+
TEST_BOX_ANNOTATIONS,
9+
reference_id_from_url,
10+
)
11+
12+
13+
@pytest.fixture()
14+
def dataset(CLIENT):
15+
ds = CLIENT.create_dataset(TEST_DATASET_NAME)
16+
yield ds
17+
18+
response = CLIENT.delete_dataset(ds.id)
19+
assert response == {}
320

421

522
def test_reprs():
@@ -9,3 +26,118 @@ def test_repr(test_object: any):
926

1027
client = NucleusClient(api_key="fake_key")
1128
test_repr(Slice(slice_id="fake_slice_id", client=client))
29+
30+
31+
def test_slice_create_and_delete_and_list(dataset):
32+
# Dataset upload
33+
ds_items = []
34+
for url in TEST_IMG_URLS:
35+
ds_items.append(
36+
DatasetItem(
37+
image_location=url,
38+
reference_id=reference_id_from_url(url),
39+
)
40+
)
41+
response = dataset.append(ds_items)
42+
assert ERROR_PAYLOAD not in response.json()
43+
44+
# Slice creation
45+
slc = dataset.create_slice(
46+
name=TEST_SLICE_NAME,
47+
reference_ids=[item.reference_id for item in ds_items[:2]],
48+
)
49+
50+
dataset_slices = dataset.slices
51+
assert len(dataset_slices) == 1
52+
assert slc.slice_id == dataset_slices[0]
53+
54+
response = slc.info()
55+
assert response["name"] == TEST_SLICE_NAME
56+
assert response["dataset_id"] == dataset.id
57+
assert len(response["dataset_items"]) == 2
58+
for item in ds_items[:2]:
59+
assert (
60+
item.reference_id == response["dataset_items"][0]["ref_id"]
61+
or item.reference_id == response["dataset_items"][1]["ref_id"]
62+
)
63+
64+
65+
def test_slice_create_and_annotate(dataset):
66+
# Dataset upload
67+
url = TEST_IMG_URLS[0]
68+
annotation_in_slice = BoxAnnotation(**TEST_BOX_ANNOTATIONS[0])
69+
annotation_not_in_slice = BoxAnnotation(**TEST_BOX_ANNOTATIONS[1])
70+
71+
ds_items = []
72+
ds_items.append(
73+
DatasetItem(
74+
image_location=url,
75+
reference_id=reference_id_from_url(url),
76+
)
77+
)
78+
response = dataset.append(ds_items)
79+
assert ERROR_PAYLOAD not in response.json()
80+
81+
# Slice creation
82+
slc = dataset.create_slice(
83+
name=TEST_SLICE_NAME,
84+
reference_ids=[item.reference_id for item in ds_items[:2]],
85+
)
86+
87+
slc.annotate(annotations=[annotation_in_slice])
88+
with pytest.raises(ValueError) as not_in_slice_error:
89+
slc.annotate(annotations=[annotation_not_in_slice])
90+
91+
assert (
92+
annotation_not_in_slice.reference_id
93+
in not_in_slice_error.value.args[0]
94+
)
95+
96+
slc.annotate(annotations=[annotation_not_in_slice], strict=False)
97+
98+
99+
def test_slice_append(dataset):
100+
# Dataset upload
101+
ds_items = []
102+
for url in TEST_IMG_URLS:
103+
ds_items.append(
104+
DatasetItem(
105+
image_location=url,
106+
reference_id=reference_id_from_url(url),
107+
)
108+
)
109+
response = dataset.append(ds_items)
110+
assert ERROR_PAYLOAD not in response.json()
111+
112+
# Slice creation
113+
slc = dataset.create_slice(
114+
name=TEST_SLICE_NAME,
115+
reference_ids=[ds_items[0].reference_id],
116+
)
117+
118+
# Insert duplicate first item
119+
slc.append(reference_ids=[item.reference_id for item in ds_items[:3]])
120+
121+
response = slc.info()
122+
assert len(response["dataset_items"]) == 3
123+
for item in ds_items[:3]:
124+
assert (
125+
item.reference_id == response["dataset_items"][0]["ref_id"]
126+
or item.reference_id == response["dataset_items"][1]["ref_id"]
127+
or item.reference_id == response["dataset_items"][2]["ref_id"]
128+
)
129+
130+
all_stored_items = slc.items()
131+
132+
def sort_by_reference_id(items):
133+
# Remove the generated item_ids and standardize
134+
# empty metadata so we can do an equality check.
135+
for item in items:
136+
item.item_id = None
137+
if item.metadata == {}:
138+
item.metadata = None
139+
return sorted(items, key=lambda x: x.reference_id)
140+
141+
assert sort_by_reference_id(all_stored_items) == sort_by_reference_id(
142+
ds_items[:3]
143+
)

0 commit comments

Comments
 (0)