Skip to content

Commit 6d4a292

Browse files
authored
unify zarr chunking with other chunking in apiv2.open_dataset (#4667)
1 parent 74dffff commit 6d4a292

File tree

1 file changed

+18
-32
lines changed

1 file changed

+18
-32
lines changed

xarray/backends/apiv2.py

Lines changed: 18 additions & 32 deletions
Original file line numberDiff line numberDiff line change
@@ -29,38 +29,24 @@ def _chunk_ds(
2929
overwrite_encoded_chunks,
3030
**extra_tokens,
3131
):
32-
if engine != "zarr":
33-
from dask.base import tokenize
34-
35-
mtime = _get_mtime(filename_or_obj)
36-
token = tokenize(filename_or_obj, mtime, engine, chunks, **extra_tokens)
37-
name_prefix = "open_dataset-%s" % token
38-
ds = backend_ds.chunk(chunks, name_prefix=name_prefix, token=token)
39-
40-
else:
41-
42-
if chunks == "auto":
43-
try:
44-
import dask.array # noqa
45-
except ImportError:
46-
chunks = None
47-
48-
if chunks is None:
49-
return backend_ds
50-
51-
if isinstance(chunks, int):
52-
chunks = dict.fromkeys(backend_ds.dims, chunks)
53-
54-
variables = {}
55-
for k, v in backend_ds.variables.items():
56-
var_chunks = _get_chunk(v, chunks)
57-
variables[k] = _maybe_chunk(
58-
k,
59-
v,
60-
var_chunks,
61-
overwrite_encoded_chunks=overwrite_encoded_chunks,
62-
)
63-
ds = backend_ds._replace(variables)
32+
from dask.base import tokenize
33+
34+
mtime = _get_mtime(filename_or_obj)
35+
token = tokenize(filename_or_obj, mtime, engine, chunks, **extra_tokens)
36+
name_prefix = "open_dataset-%s" % token
37+
38+
variables = {}
39+
for name, var in backend_ds.variables.items():
40+
var_chunks = _get_chunk(var, chunks)
41+
variables[name] = _maybe_chunk(
42+
name,
43+
var,
44+
var_chunks,
45+
overwrite_encoded_chunks=overwrite_encoded_chunks,
46+
name_prefix=name_prefix,
47+
token=token,
48+
)
49+
ds = backend_ds._replace(variables)
6450
return ds
6551

6652

0 commit comments

Comments
 (0)