Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
7 changes: 7 additions & 0 deletions .github/workflows/test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,13 @@ jobs:
python -m pip install --upgrade pip
python -m pip install -e ".[tensorstore]"

- name: Install Zarr Python 3 for OME-Zarr 0.5
if:
${{ matrix.python-version != '3.9' && matrix.python-version != '3.10'
}}
run: |
python -m pip install --upgrade --pre "zarr==3.0.0b2"

- name: Test with pytest
run: |
pytest --junitxml=junit/test-results.xml
Expand Down
2 changes: 1 addition & 1 deletion ngff_zarr/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@
from .to_ngff_image import to_ngff_image
from .to_ngff_zarr import to_ngff_zarr
from .validate import validate
from .zarr_metadata import (
from .v04.zarr_metadata import (
AxesType,
SpatialDims,
SupportedDims,
Expand Down
8 changes: 8 additions & 0 deletions ngff_zarr/_zarr_kwargs.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
import zarr
from packaging import version

zarr_version = version.parse(zarr.__version__)
if zarr_version >= version.parse("3.0.0b1"):
zarr_kwargs = {}
else:
zarr_kwargs = {"dimension_separator": "/"}
9 changes: 9 additions & 0 deletions ngff_zarr/_zarr_open_array.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
import zarr
from packaging import version

zarr_version = version.parse(zarr.__version__)
if zarr_version >= version.parse("3.0.0b1"):
from zarr.api.synchronous import open_array
else:
from zarr.creation import open_array
open_array = open_array
9 changes: 4 additions & 5 deletions ngff_zarr/cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,8 @@
from .to_multiscales import to_multiscales
from .to_ngff_image import to_ngff_image
from .to_ngff_zarr import to_ngff_zarr
from .zarr_metadata import is_unit_supported
from .v04.zarr_metadata import is_unit_supported
from ._zarr_kwargs import zarr_kwargs


def _multiscales_to_ngff_zarr(
Expand Down Expand Up @@ -235,9 +236,7 @@ def main():
cache_dir = Path(args.cache_dir).resolve()
if not cache_dir.exists():
Path.makedirs(cache_dir, parents=True)
config.cache_store = zarr.storage.DirectoryStore(
cache_dir, dimension_separator="/"
)
config.cache_store = zarr.storage.DirectoryStore(cache_dir, **zarr_kwargs)

console = Console()
progress = RichProgress(
Expand Down Expand Up @@ -304,7 +303,7 @@ def shutdown_client(sig_id, frame): # noqa: ARG001
)
output_store = None
if args.output and output_backend is ConversionBackend.NGFF_ZARR:
output_store = DirectoryStore(args.output, dimension_separator="/")
output_store = DirectoryStore(args.output, **zarr_kwargs)

subtitle = "[red]generation"
if not args.output:
Expand Down
11 changes: 9 additions & 2 deletions ngff_zarr/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,9 +2,9 @@
from pathlib import Path

import dask.config
import zarr
from platformdirs import user_cache_dir
from zarr.storage import StoreLike
from ._zarr_kwargs import zarr_kwargs

if dask.config.get("temporary-directory") is not None:
_store_dir = dask.config.get("temporary-directory")
Expand All @@ -13,7 +13,14 @@


def default_store_factory():
return zarr.storage.DirectoryStore(_store_dir, dimension_separator="/")
try:
from zarr.storage import DirectoryStore

return DirectoryStore(_store_dir, **zarr_kwargs)
except ImportError:
from zarr.storage import LocalStore

return LocalStore(_store_dir)


try:
Expand Down
71 changes: 56 additions & 15 deletions ngff_zarr/from_ngff_zarr.py
Original file line number Diff line number Diff line change
@@ -1,41 +1,70 @@
from collections.abc import MutableMapping
from pathlib import Path
from typing import Union
from typing import Union, Optional
from packaging import version

import dask.array
import zarr
from zarr.storage import BaseStore
import zarr.storage

# Zarr Python 3
if hasattr(zarr.storage, "StoreLike"):
StoreLike = zarr.storage.StoreLike
else:
StoreLike = Union[MutableMapping, str, Path, zarr.storage.BaseStore]

from .ngff_image import NgffImage
from .to_multiscales import Multiscales
from .zarr_metadata import Axis, Dataset, Metadata, Scale, Translation
from .v04.zarr_metadata import Axis, Dataset, Scale, Translation
from .validate import validate as validate_ngff

zarr_version = version.parse(zarr.__version__)
zarr_version_major = zarr_version.major


def from_ngff_zarr(
store: Union[MutableMapping, str, Path, BaseStore],
store: StoreLike,
validate: bool = False,
version: Optional[str] = None,
) -> Multiscales:
"""
Read an OME-Zarr NGFF Multiscales data structure from a Zarr store.

store : MutableMapping, str or Path, zarr.storage.BaseStore
store : StoreLike
Store or path to directory in file system.

validate : bool
If True, validate the NGFF metadata against the schema.

version : string, optional
OME-Zarr version, if known.

Returns
-------

multiscales: multiscale ngff image with dask-chunked arrays for data

"""

root = zarr.open_group(store, mode="r")
format_kwargs = {}
if version and zarr_version_major >= 3:
format_kwargs = {"zarr_format": 2} if version == "0.4" else {"zarr_format": 3}
root = zarr.open_group(store, mode="r", **format_kwargs)
root_attrs = root.attrs.asdict()

if not version:
if "ome" in root_attrs:
version = root_attrs["ome"]["version"]
else:
version = root_attrs["multiscales"][0].get("version", "0.4")

if validate:
validate_ngff(root.attrs.asdict())
metadata = root.attrs["multiscales"][0]
validate_ngff(root_attrs, version=version)

if "ome" in root_attrs:
metadata = root.attrs["ome"]["multiscales"][0]
else:
metadata = root.attrs["multiscales"][0]

dims = [a["name"] for a in metadata["axes"]]

Expand Down Expand Up @@ -82,12 +111,24 @@ def from_ngff_zarr(
coordinateTransformations = None
if "coordinateTransformations" in metadata:
coordinateTransformations = metadata["coordinateTransformations"]
metadata = Metadata(
axes=axes,
datasets=datasets,
name=name,
version=metadata["version"],
coordinateTransformations=coordinateTransformations,
)
if version == "0.5":
from .v05.zarr_metadata import Metadata

metadata = Metadata(
axes=axes,
datasets=datasets,
name=name,
coordinateTransformations=coordinateTransformations,
)
else:
from .v04.zarr_metadata import Metadata

metadata = Metadata(
axes=axes,
datasets=datasets,
name=name,
version=metadata["version"],
coordinateTransformations=coordinateTransformations,
)

return Multiscales(images, metadata)
2 changes: 1 addition & 1 deletion ngff_zarr/multiscales.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@

from .methods import Methods
from .ngff_image import NgffImage
from .zarr_metadata import Metadata
from .v04.zarr_metadata import Metadata


@dataclass
Expand Down
2 changes: 1 addition & 1 deletion ngff_zarr/ngff_image.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@

from dask.array.core import Array as DaskArray

from .zarr_metadata import Units
from .v04.zarr_metadata import Units

ComputedCallback = Callable[[], None]

Expand Down
35 changes: 25 additions & 10 deletions ngff_zarr/to_multiscales.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,14 @@
import zarr
from dask.array.core import Array as DaskArray
from numpy.typing import ArrayLike
from zarr.core import Array as ZarrArray

try:
from zarr.core import Array as ZarrArray
except ImportError:
from zarr.core.array import Array as ZarrArray
from ._zarr_kwargs import zarr_kwargs
from ._zarr_open_array import open_array
import zarr.storage

from .config import config
from .memory_usage import memory_usage
Expand All @@ -30,7 +37,7 @@
from .ngff_image import NgffImage
from .rich_dask_progress import NgffProgress, NgffProgressCallback
from .to_ngff_image import to_ngff_image
from .zarr_metadata import Axis, Dataset, Metadata, Scale, Translation
from .v04.zarr_metadata import Axis, Dataset, Metadata, Scale, Translation


def _ngff_image_scale_factors(ngff_image, min_length, out_chunks):
Expand Down Expand Up @@ -82,10 +89,18 @@ def _large_image_serialization(
def remove_from_cache_store(sig_id, frame): # noqa: ARG001
nonlocal base_path_removed
if not base_path_removed:
if isinstance(cache_store, zarr.storage.DirectoryStore):
if hasattr(zarr.storage, "DirectoryStore") and isinstance(
cache_store, zarr.storage.DirectoryStore
):
full_path = Path(cache_store.dir_path()) / base_path
if full_path.exists():
shutil.rmtree(full_path, ignore_errors=True)
elif hasattr(zarr.storage, "LocalStore") and isinstance(
cache_store, zarr.storage.LocalStore
):
full_path = Path(cache_store.root) / base_path
if full_path.exists():
shutil.rmtree(full_path, ignore_errors=True)
else:
zarr.storage.rmdir(cache_store, base_path)
base_path_removed = True
Expand Down Expand Up @@ -129,14 +144,14 @@ def remove_from_cache_store(sig_id, frame): # noqa: ARG001
slabs.chunks,
meta=slabs,
)
zarr_array = zarr.creation.open_array(
zarr_array = open_array(
shape=data.shape,
chunks=chunks,
dtype=data.dtype,
store=cache_store,
path=path,
mode="a",
dimension_separator="/",
**zarr_kwargs,
)

n_slabs = int(np.ceil(data.shape[z_index] / slab_slices))
Expand Down Expand Up @@ -164,7 +179,7 @@ def remove_from_cache_store(sig_id, frame): # noqa: ARG001
overwrite=False,
compute=True,
return_stored=False,
dimension_separator="/",
**zarr_kwargs,
)
data = dask.array.from_zarr(cache_store, component=path)
if optimized_chunks < data.shape[z_index] and slab_slices < optimized_chunks:
Expand All @@ -173,14 +188,14 @@ def remove_from_cache_store(sig_id, frame): # noqa: ARG001
path = f"{base_path}/optimized_chunks"
chunks = tuple([c[0] for c in optimized.chunks])
data = data.rechunk(chunks)
zarr_array = zarr.creation.open_array(
zarr_array = open_array(
shape=data.shape,
chunks=chunks,
dtype=data.dtype,
store=cache_store,
path=path,
mode="a",
dimension_separator="/",
**zarr_kwargs,
)
n_slabs = int(np.ceil(data.shape[z_index] / optimized_chunks))
for slab_index in range(n_slabs):
Expand All @@ -205,7 +220,7 @@ def remove_from_cache_store(sig_id, frame): # noqa: ARG001
overwrite=False,
compute=True,
return_stored=False,
dimension_separator="/",
**zarr_kwargs,
)
data = dask.array.from_zarr(cache_store, component=path)
else:
Expand All @@ -223,7 +238,7 @@ def remove_from_cache_store(sig_id, frame): # noqa: ARG001
overwrite=False,
compute=True,
return_stored=False,
dimension_separator="/",
**zarr_kwargs,
)
data = dask.array.from_zarr(cache_store, component=path)

Expand Down
8 changes: 6 additions & 2 deletions ngff_zarr/to_ngff_image.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,11 +4,15 @@
import dask
from dask.array.core import Array as DaskArray
from numpy.typing import ArrayLike
from zarr.core import Array as ZarrArray

try:
from zarr.core import Array as ZarrArray
except ImportError:
from zarr.core.array import Array as ZarrArray

from .methods._support import _spatial_dims
from .ngff_image import NgffImage
from .zarr_metadata import SupportedDims, Units
from .v04.zarr_metadata import SupportedDims, Units


def to_ngff_image(
Expand Down
Loading
Loading