Skip to content

Commit 983c91a

Browse files
author
Ubuntu
committed
No more dataset item ids and tests pass locally
1 parent 3619a54 commit 983c91a

File tree

5 files changed

+24
-14
lines changed

5 files changed

+24
-14
lines changed

nucleus/dataset.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -419,6 +419,7 @@ def items_and_annotations(
419419
route=f"dataset/{self.id}/exportForTraining",
420420
requests_command=requests.get,
421421
)
422+
print(api_payload)
422423
return convert_export_payload(api_payload[EXPORTED_ROWS])
423424

424425
def export_embeddings(

nucleus/slice.py

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -54,7 +54,6 @@ def info(self) -> dict:
5454

5555
def append(
5656
self,
57-
dataset_item_ids: List[str] = None,
5857
reference_ids: List[str] = None,
5958
) -> dict:
6059
"""
@@ -73,7 +72,6 @@ def append(
7372
"""
7473
response = self._client.append_to_slice(
7574
slice_id=self.slice_id,
76-
dataset_item_ids=dataset_item_ids,
7775
reference_ids=reference_ids,
7876
)
7977
return response

scripts/load_test.py

Lines changed: 23 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -12,6 +12,8 @@
1212

1313
FLAGS = flags.FLAGS
1414

15+
IMAGE_URL = "https://github.com/scaleapi/nucleus-python-client/raw/master/tests/testdata/airplane.jpeg"
16+
1517
# Global flags
1618
flags.DEFINE_string(
1719
"api_key",
@@ -22,7 +24,7 @@
2224
# Dataset upload flags
2325
flags.DEFINE_enum(
2426
"create_or_reuse_dataset",
25-
"reuse",
27+
"create",
2628
["create", "reuse"],
2729
"If create, upload a new dataset. If reuse, use the dataset id flag to reuse an existing dataset.",
2830
)
@@ -38,7 +40,7 @@
3840
lower_bound=0,
3941
)
4042
flags.DEFINE_bool(
41-
"cleanup_dataset", False, "Whether to delete the dataset after the test."
43+
"cleanup_dataset", True, "Whether to delete the dataset after the test."
4244
)
4345

4446
# Annotation upload flags
@@ -52,9 +54,9 @@
5254
# Prediction upload flags
5355
flags.DEFINE_integer(
5456
"num_predictions_per_dataset_item",
55-
1,
57+
0,
5658
"Number of annotations per dataset item",
57-
lower_bound=1,
59+
lower_bound=0,
5860
)
5961

6062

@@ -73,7 +75,7 @@ def generate_fake_metadata(index):
7375
def dataset_item_generator():
7476
for i in range(FLAGS.num_dataset_items):
7577
yield nucleus.DatasetItem(
76-
image_location=f"https://fake_url_that_wont_be_read_since_its_privacy_mode/{i}",
78+
image_location=IMAGE_URL,
7779
reference_id=str(i),
7880
metadata=generate_fake_metadata(i),
7981
upload_to_scale=False,
@@ -143,7 +145,7 @@ def upload_annotations(dataset: Dataset):
143145
print("Starting annotation upload")
144146
tic = time.time()
145147
job = dataset.annotate(
146-
list(annotation_generator()), update=True, asynchronous=True
148+
list(annotation_generator()), update=False, asynchronous=True
147149
)
148150
try:
149151
job.sleep_until_complete(False)
@@ -154,17 +156,27 @@ def upload_annotations(dataset: Dataset):
154156

155157

156158
def upload_predictions(dataset: Dataset):
157-
print("Starting Prediction upload")
159+
model = client().add_model(
160+
name="Load test model", reference_id="model_" + str(time.time())
161+
)
162+
run = model.create_run(
163+
name="Test model run", dataset=dataset, predictions=[]
164+
)
165+
158166
tic = time.time()
159-
job = dataset.predict(
167+
168+
print("Starting prediction upload")
169+
170+
job = run.predict(
160171
list(prediction_generator()), update=True, asynchronous=True
161172
)
173+
162174
try:
163175
job.sleep_until_complete(False)
164176
except JobError:
165177
print(job.errors())
166178
toc = time.time()
167-
print("Finished prediciton upload: %s" % (toc - tic))
179+
print("Finished prediction upload: %s" % (toc - tic))
168180

169181

170182
def main(unused_argv):
@@ -173,10 +185,12 @@ def main(unused_argv):
173185
upload_annotations(dataset)
174186
except Exception as e:
175187
print(e)
188+
176189
try:
177190
upload_predictions(dataset)
178191
except Exception as e:
179192
print(e)
193+
180194
if FLAGS.cleanup_dataset and FLAGS.create_or_reuse_dataset == "create":
181195
client().delete_dataset(dataset.id)
182196

tests/test_dataset.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -471,7 +471,6 @@ def test_append_and_export(dataset):
471471
# We don't export everything on the annotations in order to speed up export.
472472
def clear_fields(annotation):
473473
cleared_annotation = copy.deepcopy(annotation)
474-
cleared_annotation.annotation_id = None
475474
cleared_annotation.metadata = {}
476475
return cleared_annotation
477476

tests/test_slice.py

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -102,8 +102,6 @@ def test_slice_create_and_export(dataset):
102102
dataset.annotate(annotations=[annotation_in_slice])
103103

104104
expected_box_annotation = copy.deepcopy(annotation_in_slice)
105-
expected_box_annotation.annotation_id = None
106-
expected_box_annotation.metadata = {}
107105

108106
exported = slc.items_and_annotations()
109107
assert exported[0][ITEM_KEY] == ds_items[0]

0 commit comments

Comments
 (0)