From bbd2daec037192e9702426bf2939af6bc2262cde Mon Sep 17 00:00:00 2001 From: Matt McCormick Date: Fri, 29 Nov 2024 14:38:56 -0500 Subject: [PATCH] ENH: Add a test for large image serialization with tensorstore --- test/test_to_ngff_zarr_tensorstore.py | 31 ++++++++++++++++++++++++++- 1 file changed, 30 insertions(+), 1 deletion(-) diff --git a/test/test_to_ngff_zarr_tensorstore.py b/test/test_to_ngff_zarr_tensorstore.py index 90cc63aa..c24b6a51 100644 --- a/test/test_to_ngff_zarr_tensorstore.py +++ b/test/test_to_ngff_zarr_tensorstore.py @@ -3,8 +3,16 @@ import pytest import zarr +from dask_image import imread -from ngff_zarr import Methods, to_multiscales, to_ngff_zarr, from_ngff_zarr +from ngff_zarr import ( + Methods, + to_multiscales, + to_ngff_zarr, + from_ngff_zarr, + config, + to_ngff_image, +) from ._data import verify_against_baseline @@ -47,3 +55,24 @@ def test_gaussian_isotropic_scale_factors(input_images): to_ngff_zarr(tmpdir, multiscales, use_tensorstore=True) multiscales = from_ngff_zarr(tmpdir) verify_against_baseline(dataset_name, baseline_name, multiscales) + + +def test_large_image_serialization(input_images): + pytest.importorskip("tensorstore") + + default_mem_target = config.memory_target + config.memory_target = int(1e6) + + dataset_name = "lung_series" + data = imread.imread(input_images[dataset_name]) + image = to_ngff_image( + data=data, + dims=("z", "y", "x"), + scale={"z": 2.5, "y": 1.40625, "x": 1.40625}, + translation={"z": 332.5, "y": 360.0, "x": 0.0}, + name="LIDC2", + ) + multiscales = to_multiscales(image) + with tempfile.TemporaryDirectory() as tmpdir: + to_ngff_zarr(tmpdir, multiscales, use_tensorstore=True) + config.memory_target = default_mem_target