37
37
TEST_MULTICATEGORY_ANNOTATIONS ,
38
38
TEST_POLYGON_ANNOTATIONS ,
39
39
TEST_SEGMENTATION_ANNOTATIONS ,
40
+ assert_partial_equality ,
40
41
reference_id_from_url ,
41
42
)
42
43
@@ -294,26 +295,14 @@ def test_dataset_append_async(dataset: Dataset):
294
295
job = dataset .append (make_dataset_items (), asynchronous = True )
295
296
job .sleep_until_complete ()
296
297
status = job .status ()
297
- status ["message" ]["PayloadUrl" ] = ""
298
- print (status )
299
- assert status == {
298
+ expected = {
300
299
"job_id" : job .job_id ,
301
300
"status" : "Completed" ,
302
- "message" : {
303
- "PayloadUrl" : "" ,
304
- "image_upload_step" : {"errored" : 0 , "pending" : 0 , "completed" : 5 },
305
- "started_image_processing" : f"Dataset: { dataset .id } , Job: { job .job_id } " ,
306
- "ingest_to_reupload_queue" : {
307
- "epoch" : 1 ,
308
- "total" : 5 ,
309
- "datasetId" : f"{ dataset .id } " ,
310
- "processed" : 5 ,
311
- },
312
- },
313
301
"job_progress" : "1.00" ,
314
302
"completed_steps" : 5 ,
315
303
"total_steps" : 5 ,
316
304
}
305
+ assert_partial_equality (expected , status )
317
306
318
307
319
308
def test_dataset_append_async_with_local_path (dataset : Dataset ):
@@ -341,22 +330,8 @@ def test_dataset_append_async_with_1_bad_url(dataset: Dataset):
341
330
assert status ["job_progress" ] == "0.80"
342
331
assert status ["completed_steps" ] == 4
343
332
assert status ["total_steps" ] == 5
344
- assert status ["message" ] == {
345
- "PayloadUrl" : "" ,
346
- "image_upload_step" : {"errored" : 1 , "pending" : 0 , "completed" : 4 },
347
- "ingest_to_reupload_queue" : {
348
- "epoch" : 1 ,
349
- "total" : 5 ,
350
- "datasetId" : f"{ dataset .id } " ,
351
- "processed" : 5 ,
352
- },
353
- "started_image_processing" : f"Dataset: { dataset .id } , Job: { job .job_id } " ,
354
- }
355
333
# The error is fairly detailed and subject to change. What's important is we surface which URLs failed.
356
- assert (
357
- 'Failure when processing the image "https://looks.ok.but.is.not.accessible"'
358
- in str (job .errors ())
359
- )
334
+ assert '"https://looks.ok.but.is.not.accessible"' in str (job .errors ())
360
335
361
336
362
337
def test_dataset_list_autotags (CLIENT , dataset ):
@@ -398,7 +373,8 @@ def test_annotate_async(dataset: Dataset):
398
373
asynchronous = True ,
399
374
)
400
375
job .sleep_until_complete ()
401
- assert job .status () == {
376
+ status = job .status ()
377
+ expected = {
402
378
"job_id" : job .job_id ,
403
379
"status" : "Completed" ,
404
380
"message" : {
@@ -420,6 +396,7 @@ def test_annotate_async(dataset: Dataset):
420
396
"completed_steps" : 5 ,
421
397
"total_steps" : 5 ,
422
398
}
399
+ assert_partial_equality (expected , status )
423
400
424
401
425
402
@pytest .mark .integration
@@ -439,8 +416,8 @@ def test_annotate_async_with_error(dataset: Dataset):
439
416
asynchronous = True ,
440
417
)
441
418
job .sleep_until_complete ()
442
-
443
- assert job . status () = = {
419
+ status = job . status ()
420
+ expected = {
444
421
"job_id" : job .job_id ,
445
422
"status" : "Completed" ,
446
423
"message" : {
@@ -462,6 +439,7 @@ def test_annotate_async_with_error(dataset: Dataset):
462
439
"completed_steps" : 5 ,
463
440
"total_steps" : 5 ,
464
441
}
442
+ assert_partial_equality (expected , status )
465
443
466
444
assert "Item with id fake_garbage doesn" in str (job .errors ())
467
445
0 commit comments