1111import zarr
1212from dask .array .core import Array as DaskArray
1313from numpy .typing import ArrayLike
14- from zarr .core import Array as ZarrArray
14+
15+ try :
16+ from zarr .core import Array as ZarrArray
17+ except ImportError :
18+ from zarr .core .array import Array as ZarrArray
19+ from ._zarr_kwargs import zarr_kwargs
20+ from ._zarr_open_array import open_array
21+ import zarr .storage
1522
1623from .config import config
1724from .memory_usage import memory_usage
3037from .ngff_image import NgffImage
3138from .rich_dask_progress import NgffProgress , NgffProgressCallback
3239from .to_ngff_image import to_ngff_image
33- from .zarr_metadata import Axis , Dataset , Metadata , Scale , Translation
40+ from .v04 . zarr_metadata import Axis , Dataset , Metadata , Scale , Translation
3441
3542
3643def _ngff_image_scale_factors (ngff_image , min_length , out_chunks ):
@@ -82,10 +89,18 @@ def _large_image_serialization(
8289 def remove_from_cache_store (sig_id , frame ): # noqa: ARG001
8390 nonlocal base_path_removed
8491 if not base_path_removed :
85- if isinstance (cache_store , zarr .storage .DirectoryStore ):
92+ if hasattr (zarr .storage , "DirectoryStore" ) and isinstance (
93+ cache_store , zarr .storage .DirectoryStore
94+ ):
8695 full_path = Path (cache_store .dir_path ()) / base_path
8796 if full_path .exists ():
8897 shutil .rmtree (full_path , ignore_errors = True )
98+ elif hasattr (zarr .storage , "LocalStore" ) and isinstance (
99+ cache_store , zarr .storage .LocalStore
100+ ):
101+ full_path = Path (cache_store .root ) / base_path
102+ if full_path .exists ():
103+ shutil .rmtree (full_path , ignore_errors = True )
89104 else :
90105 zarr .storage .rmdir (cache_store , base_path )
91106 base_path_removed = True
@@ -129,14 +144,14 @@ def remove_from_cache_store(sig_id, frame): # noqa: ARG001
129144 slabs .chunks ,
130145 meta = slabs ,
131146 )
132- zarr_array = zarr . creation . open_array (
147+ zarr_array = open_array (
133148 shape = data .shape ,
134149 chunks = chunks ,
135150 dtype = data .dtype ,
136151 store = cache_store ,
137152 path = path ,
138153 mode = "a" ,
139- dimension_separator = "/" ,
154+ ** zarr_kwargs ,
140155 )
141156
142157 n_slabs = int (np .ceil (data .shape [z_index ] / slab_slices ))
@@ -164,7 +179,7 @@ def remove_from_cache_store(sig_id, frame): # noqa: ARG001
164179 overwrite = False ,
165180 compute = True ,
166181 return_stored = False ,
167- dimension_separator = "/" ,
182+ ** zarr_kwargs ,
168183 )
169184 data = dask .array .from_zarr (cache_store , component = path )
170185 if optimized_chunks < data .shape [z_index ] and slab_slices < optimized_chunks :
@@ -173,14 +188,14 @@ def remove_from_cache_store(sig_id, frame): # noqa: ARG001
173188 path = f"{ base_path } /optimized_chunks"
174189 chunks = tuple ([c [0 ] for c in optimized .chunks ])
175190 data = data .rechunk (chunks )
176- zarr_array = zarr . creation . open_array (
191+ zarr_array = open_array (
177192 shape = data .shape ,
178193 chunks = chunks ,
179194 dtype = data .dtype ,
180195 store = cache_store ,
181196 path = path ,
182197 mode = "a" ,
183- dimension_separator = "/" ,
198+ ** zarr_kwargs ,
184199 )
185200 n_slabs = int (np .ceil (data .shape [z_index ] / optimized_chunks ))
186201 for slab_index in range (n_slabs ):
@@ -205,7 +220,7 @@ def remove_from_cache_store(sig_id, frame): # noqa: ARG001
205220 overwrite = False ,
206221 compute = True ,
207222 return_stored = False ,
208- dimension_separator = "/" ,
223+ ** zarr_kwargs ,
209224 )
210225 data = dask .array .from_zarr (cache_store , component = path )
211226 else :
@@ -223,7 +238,7 @@ def remove_from_cache_store(sig_id, frame): # noqa: ARG001
223238 overwrite = False ,
224239 compute = True ,
225240 return_stored = False ,
226- dimension_separator = "/" ,
241+ ** zarr_kwargs ,
227242 )
228243 data = dask .array .from_zarr (cache_store , component = path )
229244
0 commit comments