Skip to content

Commit 56c4bf8

Browse files
committed
Set chunked_array_type in .chunk call
1 parent b7a3313 commit 56c4bf8

File tree

1 file changed

+10
-9
lines changed

1 file changed

+10
-9
lines changed

examples/virtual-rechunk/virtual-rechunk.py

Lines changed: 10 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -42,9 +42,9 @@ def reduce_references(results):
4242
"""
4343
combined_vds = xr.combine_nested(
4444
results,
45-
concat_dim=['Time'],
46-
coords='minimal',
47-
compat='override',
45+
concat_dim=["Time"],
46+
coords="minimal",
47+
compat="override",
4848
)
4949
# possibly write parquet to s3 here
5050
return combined_vds
@@ -60,24 +60,25 @@ def reduce_references(results):
6060
)
6161

6262
ds = futures.get_result()
63-
ds.virtualize.to_kerchunk('combined.json', format='json')
63+
ds.virtualize.to_kerchunk("combined.json", format="json")
6464

6565
# NOTE: In jupyter, open_dataset seems to cache the json, such that changes
6666
# aren't propogated until the kernel is restarted.
67-
combined_ds = xr.open_dataset('combined.json',
67+
combined_ds = xr.open_dataset("combined.json",
6868
engine="kerchunk",
6969
chunks={},
70-
chunked_array_type='cubed',
70+
chunked_array_type="cubed",
7171
)
7272

7373
combined_ds['Time'].attrs = {} # to_zarr complains about attrs
7474

7575
rechunked_ds = combined_ds.chunk(
76-
chunks={'Time': 5, 'south_north': 25, 'west_east': 32}
76+
chunks={'Time': 5, 'south_north': 25, 'west_east': 32},
77+
chunked_array_type="cubed",
7778
)
7879

79-
rechunked_ds.to_zarr('rechunked.zarr',
80-
mode='w',
80+
rechunked_ds.to_zarr("rechunked.zarr",
81+
mode="w",
8182
encoding={}, # TODO
8283
consolidated=True,
8384
safe_chunks=False,

0 commit comments

Comments
 (0)