Skip to content

Commit e7ebd35

Browse files
committed
ENH: Add tensorstore write support
1 parent 81c9275 commit e7ebd35

File tree

7 files changed

+220
-27
lines changed

7 files changed

+220
-27
lines changed

.github/workflows/pixi-test.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,7 @@ jobs:
88
strategy:
99
max-parallel: 5
1010
matrix:
11-
os: [ubuntu-22.04, windows-2022]
11+
os: [ubuntu-22.04, macos-13]
1212

1313
steps:
1414
- uses: actions/checkout@v4

.github/workflows/test.yml

Lines changed: 9 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,7 @@ jobs:
88
strategy:
99
max-parallel: 5
1010
matrix:
11-
os: [ubuntu-22.04, windows-2022, macos-12, macos-14]
11+
os: [ubuntu-22.04, windows-2022, macos-13]
1212
python-version: ["3.9", "3.10", "3.11", "3.12", "3.13"]
1313

1414
steps:
@@ -24,8 +24,15 @@ jobs:
2424
python -m pip install --upgrade pip
2525
python -m pip install -e ".[test,dask-image,itk,cli,validate]"
2626
27+
- name: Install tensorstore
28+
if:
29+
${{ matrix.python-version != '3.13' && matrix.os != 'macos-12' &&
30+
matrix.os != 'windows-2022' }}
31+
run: |
32+
python -m pip install --upgrade pip
33+
python -m pip install -e ".[tensorstore]"
34+
2735
- name: Test with pytest
28-
if: ${{ matrix.os != 'ubuntu-22.04' && matrix.os != 'macos-14' }}
2936
run: |
3037
pytest --junitxml=junit/test-results.xml
3138

docs/installation.md

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -30,3 +30,7 @@ Optional dependencies include:
3030
`dask-image` : Multiscale generation with `dask-image` methods.
3131

3232
`itk` : Multiscale generation with `itk` methods.
33+
34+
`tensorstore` : Support writing with `tensorstore`.
35+
36+
`validate` : Support metadata validation when reading.

ngff_zarr/to_ngff_zarr.py

Lines changed: 70 additions & 22 deletions
Original file line numberDiff line numberDiff line change
@@ -59,10 +59,32 @@ def _prep_for_to_zarr(
5959
)
6060

6161

62+
def _write_with_tensorstore(store_path: str, array, region, chunks) -> None:
63+
"""Write array using tensorstore backend"""
64+
import tensorstore as ts
65+
66+
spec = {
67+
"driver": "zarr",
68+
"kvstore": {
69+
"driver": "file",
70+
"path": store_path,
71+
},
72+
"metadata": {
73+
"chunks": chunks,
74+
"shape": array.shape,
75+
"dtype": array.dtype.str,
76+
"dimension_separator": "/",
77+
},
78+
}
79+
dataset = ts.open(spec, create=True, dtype=array.dtype).result()
80+
dataset[...] = array[region]
81+
82+
6283
def to_ngff_zarr(
6384
store: Union[MutableMapping, str, Path, BaseStore],
6485
multiscales: Multiscales,
6586
overwrite: bool = True,
87+
use_tensorstore: bool = False,
6688
chunk_store: Optional[Union[MutableMapping, str, Path, BaseStore]] = None,
6789
progress: Optional[Union[NgffProgress, NgffProgressCallback]] = None,
6890
**kwargs,
@@ -79,6 +101,9 @@ def to_ngff_zarr(
79101
:param overwrite: If True, delete any pre-existing data in `store` before creating groups.
80102
:type overwrite: bool, optional
81103
104+
:param use_tensorstore: If True, write array using tensorstore backend.
105+
:type use_tensorstore: bool, optional
106+
82107
:param chunk_store: Separate storage for chunks. If not provided, `store` will be used
83108
for storage of both chunks and metadata.
84109
:type chunk_store: MutableMapping, str or Path, zarr.storage.BaseStore, optional
@@ -89,6 +114,12 @@ def to_ngff_zarr(
89114
:param **kwargs: Passed to the zarr.creation.create() function, e.g., compression options.
90115
"""
91116

117+
if use_tensorstore:
118+
if isinstance(store, (str, Path)):
119+
store_path = str(store)
120+
else:
121+
raise ValueError("Tensorstore requires a path-like store")
122+
92123
metadata_dict = asdict(multiscales.metadata)
93124
metadata_dict = _pop_metadata_optionals(metadata_dict)
94125
metadata_dict["@type"] = "ngff:Image"
@@ -262,33 +293,50 @@ def to_ngff_zarr(
262293
arr_region.chunks,
263294
meta=arr_region,
264295
)
265-
dask.array.to_zarr(
266-
optimized,
267-
zarr_array,
268-
region=region,
269-
component=path,
270-
overwrite=False,
271-
compute=True,
272-
return_stored=False,
273-
dimension_separator="/",
274-
**kwargs,
275-
)
296+
if use_tensorstore:
297+
scale_path = f"{store_path}/{path}"
298+
_write_with_tensorstore(
299+
scale_path,
300+
optimized,
301+
region,
302+
[c[0] for c in arr_region.chunks],
303+
**kwargs,
304+
)
305+
else:
306+
dask.array.to_zarr(
307+
optimized,
308+
zarr_array,
309+
region=region,
310+
component=path,
311+
overwrite=False,
312+
compute=True,
313+
return_stored=False,
314+
dimension_separator="/",
315+
**kwargs,
316+
)
276317
else:
277318
if isinstance(progress, NgffProgressCallback):
278319
progress.add_callback_task(
279320
f"[green]Writing scale {index+1} of {nscales}"
280321
)
281-
arr = _prep_for_to_zarr(store, arr)
282-
dask.array.to_zarr(
283-
arr,
284-
store,
285-
component=path,
286-
overwrite=False,
287-
compute=True,
288-
return_stored=False,
289-
dimension_separator="/",
290-
**kwargs,
291-
)
322+
if use_tensorstore:
323+
scale_path = f"{store_path}/{path}"
324+
region = tuple([slice(arr.shape[i]) for i in range(arr.ndim)])
325+
_write_with_tensorstore(
326+
scale_path, arr, region, [c[0] for c in arr.chunks], **kwargs
327+
)
328+
else:
329+
arr = _prep_for_to_zarr(store, arr)
330+
dask.array.to_zarr(
331+
arr,
332+
store,
333+
component=path,
334+
overwrite=False,
335+
compute=True,
336+
return_stored=False,
337+
dimension_separator="/",
338+
**kwargs,
339+
)
292340

293341
# Minimize task graph depth
294342
if (

0 commit comments

Comments
 (0)