@@ -59,10 +59,32 @@ def _prep_for_to_zarr(
5959 )
6060
6161
62+ def _write_with_tensorstore (store_path : str , array , region , chunks , ** kwargs ) -> None :
63+ """Write array using tensorstore backend"""
64+ import tensorstore as ts
65+
66+ spec = {
67+ "driver" : "zarr" ,
68+ "kvstore" : {
69+ "driver" : "file" ,
70+ "path" : store_path ,
71+ },
72+ "metadata" : {
73+ "chunks" : chunks ,
74+ "shape" : array .shape ,
75+ "dtype" : array .dtype .str ,
76+ "dimension_separator" : "/" ,
77+ },
78+ }
79+ dataset = ts .open (spec , create = True , dtype = array .dtype , ** kwargs ).result ()
80+ dataset [...] = array [region ]
81+
82+
6283def to_ngff_zarr (
6384 store : Union [MutableMapping , str , Path , BaseStore ],
6485 multiscales : Multiscales ,
6586 overwrite : bool = True ,
87+ use_tensorstore : bool = False ,
6688 chunk_store : Optional [Union [MutableMapping , str , Path , BaseStore ]] = None ,
6789 progress : Optional [Union [NgffProgress , NgffProgressCallback ]] = None ,
6890 ** kwargs ,
@@ -79,16 +101,25 @@ def to_ngff_zarr(
79101 :param overwrite: If True, delete any pre-existing data in `store` before creating groups.
80102 :type overwrite: bool, optional
81103
104+ :param use_tensorstore: If True, write array using tensorstore backend.
105+ :type use_tensorstore: bool, optional
106+
82107 :param chunk_store: Separate storage for chunks. If not provided, `store` will be used
83108 for storage of both chunks and metadata.
84109 :type chunk_store: MutableMapping, str or Path, zarr.storage.BaseStore, optional
85110
86111 :type progress: RichDaskProgress
87112 :param progress: Optional progress logger
88113
89- :param **kwargs: Passed to the zarr.creation.create() function, e.g., compression options.
114+ :param **kwargs: Passed to the zarr.creation.create() or ts.open() function, e.g., compression options.
90115 """
91116
117+ if use_tensorstore :
118+ if isinstance (store , (str , Path )):
119+ store_path = str (store )
120+ else :
121+ raise ValueError ("Tensorstore requires a path-like store" )
122+
92123 metadata_dict = asdict (multiscales .metadata )
93124 metadata_dict = _pop_metadata_optionals (metadata_dict )
94125 metadata_dict ["@type" ] = "ngff:Image"
@@ -262,33 +293,50 @@ def to_ngff_zarr(
262293 arr_region .chunks ,
263294 meta = arr_region ,
264295 )
265- dask .array .to_zarr (
266- optimized ,
267- zarr_array ,
268- region = region ,
269- component = path ,
270- overwrite = False ,
271- compute = True ,
272- return_stored = False ,
273- dimension_separator = "/" ,
274- ** kwargs ,
275- )
296+ if use_tensorstore :
297+ scale_path = f"{ store_path } /{ path } "
298+ _write_with_tensorstore (
299+ scale_path ,
300+ optimized ,
301+ region ,
302+ [c [0 ] for c in arr_region .chunks ],
303+ ** kwargs ,
304+ )
305+ else :
306+ dask .array .to_zarr (
307+ optimized ,
308+ zarr_array ,
309+ region = region ,
310+ component = path ,
311+ overwrite = False ,
312+ compute = True ,
313+ return_stored = False ,
314+ dimension_separator = "/" ,
315+ ** kwargs ,
316+ )
276317 else :
277318 if isinstance (progress , NgffProgressCallback ):
278319 progress .add_callback_task (
279320 f"[green]Writing scale { index + 1 } of { nscales } "
280321 )
281- arr = _prep_for_to_zarr (store , arr )
282- dask .array .to_zarr (
283- arr ,
284- store ,
285- component = path ,
286- overwrite = False ,
287- compute = True ,
288- return_stored = False ,
289- dimension_separator = "/" ,
290- ** kwargs ,
291- )
322+ if use_tensorstore :
323+ scale_path = f"{ store_path } /{ path } "
324+ region = tuple ([slice (arr .shape [i ]) for i in range (arr .ndim )])
325+ _write_with_tensorstore (
326+ scale_path , arr , region , [c [0 ] for c in arr .chunks ], ** kwargs
327+ )
328+ else :
329+ arr = _prep_for_to_zarr (store , arr )
330+ dask .array .to_zarr (
331+ arr ,
332+ store ,
333+ component = path ,
334+ overwrite = False ,
335+ compute = True ,
336+ return_stored = False ,
337+ dimension_separator = "/" ,
338+ ** kwargs ,
339+ )
292340
293341 # Minimize task graph depth
294342 if (
0 commit comments