Skip to content

Commit 3a813bb

Browse files
committed
ENH: Add large image sharding test
1 parent 853fdd2 commit 3a813bb

File tree

1 file changed

+35
-0
lines changed

1 file changed

+35
-0
lines changed

test/test_large_serialization.py

Lines changed: 35 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,12 @@
1+
from packaging import version
2+
13
from dask_image import imread
24
from ngff_zarr import config, to_multiscales, to_ngff_image, to_ngff_zarr
35
from zarr.storage import MemoryStore
6+
import zarr
7+
import pytest
8+
9+
zarr_version = version.parse(zarr.__version__)
410

511

612
def test_large_image_serialization(input_images):
@@ -24,3 +30,32 @@ def test_large_image_serialization(input_images):
2430
# verify_against_baseline(dataset_name, baseline_name, multiscales)
2531

2632
config.memory_target = default_mem_target
33+
34+
35+
def test_large_image_serialization_with_sharding(input_images):
36+
pytest.mark.skipif(
37+
zarr_version < version.parse("3.0.0b1"), reason="zarr version < 3.0.0b1"
38+
)
39+
default_mem_target = config.memory_target
40+
config.memory_target = int(1e6)
41+
42+
dataset_name = "lung_series"
43+
data = imread.imread(input_images[dataset_name])
44+
image = to_ngff_image(
45+
data=data,
46+
dims=("z", "y", "x"),
47+
scale={"z": 2.5, "y": 1.40625, "x": 1.40625},
48+
translation={"z": 332.5, "y": 360.0, "x": 0.0},
49+
name="LIDC2",
50+
)
51+
multiscales = to_multiscales(image)
52+
# baseline_name = "auto/memory_target_1e6.zarr"
53+
# store_new_multiscales(dataset_name, baseline_name, multiscales)
54+
test_store = MemoryStore()
55+
chunks_per_shard = 1
56+
to_ngff_zarr(
57+
test_store, multiscales, version="0.5", chunks_per_shard=chunks_per_shard
58+
)
59+
# verify_against_baseline(dataset_name, baseline_name, multiscales)
60+
61+
config.memory_target = default_mem_target

0 commit comments

Comments
 (0)