Skip to content

Commit 350b0b4

Browse files
committed
Format with preview flag
1 parent e3383b6 commit 350b0b4

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

68 files changed

+1571
-1949
lines changed

asv_bench/benchmarks/combine.py

Lines changed: 15 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -16,9 +16,9 @@ def setup(self) -> None:
1616
data = np.random.randn(t_size)
1717

1818
self.dsA0 = xr.Dataset({"A": xr.DataArray(data, coords={"T": t}, dims=("T"))})
19-
self.dsA1 = xr.Dataset(
20-
{"A": xr.DataArray(data, coords={"T": t + t_size}, dims=("T"))}
21-
)
19+
self.dsA1 = xr.Dataset({
20+
"A": xr.DataArray(data, coords={"T": t + t_size}, dims=("T"))
21+
})
2222

2323
def time_combine_by_coords(self) -> None:
2424
"""Also has to load and arrange t coordinate"""
@@ -54,18 +54,18 @@ def setup(self):
5454
t = np.arange(t_size)
5555
data = np.random.randn(t_size, x_size, y_size)
5656

57-
self.dsA0 = xr.Dataset(
58-
{"A": xr.DataArray(data, coords={"T": t}, dims=("T", "X", "Y"))}
59-
)
60-
self.dsA1 = xr.Dataset(
61-
{"A": xr.DataArray(data, coords={"T": t + t_size}, dims=("T", "X", "Y"))}
62-
)
63-
self.dsB0 = xr.Dataset(
64-
{"B": xr.DataArray(data, coords={"T": t}, dims=("T", "X", "Y"))}
65-
)
66-
self.dsB1 = xr.Dataset(
67-
{"B": xr.DataArray(data, coords={"T": t + t_size}, dims=("T", "X", "Y"))}
68-
)
57+
self.dsA0 = xr.Dataset({
58+
"A": xr.DataArray(data, coords={"T": t}, dims=("T", "X", "Y"))
59+
})
60+
self.dsA1 = xr.Dataset({
61+
"A": xr.DataArray(data, coords={"T": t + t_size}, dims=("T", "X", "Y"))
62+
})
63+
self.dsB0 = xr.Dataset({
64+
"B": xr.DataArray(data, coords={"T": t}, dims=("T", "X", "Y"))
65+
})
66+
self.dsB1 = xr.Dataset({
67+
"B": xr.DataArray(data, coords={"T": t + t_size}, dims=("T", "X", "Y"))
68+
})
6969

7070
def time_combine_nested(self):
7171
datasets = [[self.dsA0, self.dsA1], [self.dsB0, self.dsB1]]

asv_bench/benchmarks/dataset.py

Lines changed: 4 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -7,12 +7,10 @@
77

88
class DatasetBinaryOp:
99
def setup(self):
10-
self.ds = Dataset(
11-
{
12-
"a": (("x", "y"), np.ones((300, 400))),
13-
"b": (("x", "y"), np.ones((300, 400))),
14-
}
15-
)
10+
self.ds = Dataset({
11+
"a": (("x", "y"), np.ones((300, 400))),
12+
"b": (("x", "y"), np.ones((300, 400))),
13+
})
1614
self.mean = self.ds.mean()
1715
self.std = self.ds.std()
1816

asv_bench/benchmarks/groupby.py

Lines changed: 5 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -11,13 +11,11 @@
1111
class GroupBy:
1212
def setup(self, *args, **kwargs):
1313
self.n = 100
14-
self.ds1d = xr.Dataset(
15-
{
16-
"a": xr.DataArray(np.r_[np.repeat(1, self.n), np.repeat(2, self.n)]),
17-
"b": xr.DataArray(np.arange(2 * self.n)),
18-
"c": xr.DataArray(np.arange(2 * self.n)),
19-
}
20-
)
14+
self.ds1d = xr.Dataset({
15+
"a": xr.DataArray(np.r_[np.repeat(1, self.n), np.repeat(2, self.n)]),
16+
"b": xr.DataArray(np.arange(2 * self.n)),
17+
"c": xr.DataArray(np.arange(2 * self.n)),
18+
})
2119
self.ds2d = self.ds1d.expand_dims(z=10).copy()
2220
self.ds1d_mean = self.ds1d.groupby("b").mean()
2321
self.ds2d_mean = self.ds2d.groupby("b").mean()

asv_bench/benchmarks/pandas.py

Lines changed: 5 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -9,13 +9,11 @@
99
class MultiIndexSeries:
1010
def setup(self, dtype, subset):
1111
data = np.random.rand(100000).astype(dtype)
12-
index = pd.MultiIndex.from_product(
13-
[
14-
list("abcdefhijk"),
15-
list("abcdefhijk"),
16-
pd.date_range(start="2000-01-01", periods=1000, freq="D"),
17-
]
18-
)
12+
index = pd.MultiIndex.from_product([
13+
list("abcdefhijk"),
14+
list("abcdefhijk"),
15+
pd.date_range(start="2000-01-01", periods=1000, freq="D"),
16+
])
1917
series = pd.Series(data, index)
2018
if subset:
2119
series = series[::3]

ci/min_deps_check.py

Lines changed: 11 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -3,6 +3,7 @@
33
publication date. Compare it against requirements/min-all-deps.yml to verify the
44
policy on obsolete dependencies is being followed. Print a pretty report :)
55
"""
6+
67
from __future__ import annotations
78

89
import itertools
@@ -104,18 +105,16 @@ def metadata(entry):
104105

105106
# Hardcoded fix to work around incorrect dates in conda
106107
if pkg == "python":
107-
out.update(
108-
{
109-
(2, 7): datetime(2010, 6, 3),
110-
(3, 5): datetime(2015, 9, 13),
111-
(3, 6): datetime(2016, 12, 23),
112-
(3, 7): datetime(2018, 6, 27),
113-
(3, 8): datetime(2019, 10, 14),
114-
(3, 9): datetime(2020, 10, 5),
115-
(3, 10): datetime(2021, 10, 4),
116-
(3, 11): datetime(2022, 10, 24),
117-
}
118-
)
108+
out.update({
109+
(2, 7): datetime(2010, 6, 3),
110+
(3, 5): datetime(2015, 9, 13),
111+
(3, 6): datetime(2016, 12, 23),
112+
(3, 7): datetime(2018, 6, 27),
113+
(3, 8): datetime(2019, 10, 14),
114+
(3, 9): datetime(2020, 10, 5),
115+
(3, 10): datetime(2021, 10, 4),
116+
(3, 11): datetime(2022, 10, 24),
117+
})
119118

120119
return out
121120

doc/conf.py

Lines changed: 3 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -52,11 +52,9 @@
5252
try:
5353
import cartopy # noqa: F401
5454
except ImportError:
55-
allowed_failures.update(
56-
[
57-
"gallery/plot_cartopy_facetgrid.py",
58-
]
59-
)
55+
allowed_failures.update([
56+
"gallery/plot_cartopy_facetgrid.py",
57+
])
6058

6159
nbsphinx_allow_errors = False
6260

doc/examples/apply_ufunc_vectorize_1d.ipynb

Lines changed: 8 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -520,9 +520,10 @@
520520
"\n",
521521
"interped = xr.apply_ufunc(\n",
522522
" interp1d_np, # first the function\n",
523-
" air.chunk(\n",
524-
" {\"time\": 2, \"lon\": 2}\n",
525-
" ), # now arguments in the order expected by 'interp1_np'\n",
523+
" air.chunk({\n",
524+
" \"time\": 2,\n",
525+
" \"lon\": 2,\n",
526+
" }), # now arguments in the order expected by 'interp1_np'\n",
526527
" air.lat, # as above\n",
527528
" newlat, # as above\n",
528529
" input_core_dims=[[\"lat\"], [\"lat\"], [\"new_lat\"]], # list with one entry per arg\n",
@@ -617,9 +618,10 @@
617618
"source": [
618619
"interped = xr.apply_ufunc(\n",
619620
" interp1d_np_gufunc, # first the function\n",
620-
" air.chunk(\n",
621-
" {\"time\": 2, \"lon\": 2}\n",
622-
" ), # now arguments in the order expected by 'interp1_np'\n",
621+
" air.chunk({\n",
622+
" \"time\": 2,\n",
623+
" \"lon\": 2,\n",
624+
" }), # now arguments in the order expected by 'interp1_np'\n",
623625
" air.lat, # as above\n",
624626
" newlat, # as above\n",
625627
" input_core_dims=[[\"lat\"], [\"lat\"], [\"new_lat\"]], # list with one entry per arg\n",

pyproject.toml

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -288,6 +288,7 @@ convention = "numpy"
288288

289289
[tool.ruff.format]
290290
docstring-code-format = true
291+
preview = true
291292

292293
[tool.pytest.ini_options]
293294
addopts = ["--strict-config", "--strict-markers"]

xarray/backends/api.py

Lines changed: 26 additions & 37 deletions
Original file line numberDiff line numberDiff line change
@@ -1132,8 +1132,7 @@ def to_netcdf(
11321132
*,
11331133
multifile: Literal[True],
11341134
invalid_netcdf: bool = False,
1135-
) -> tuple[ArrayWriter, AbstractDataStore]:
1136-
...
1135+
) -> tuple[ArrayWriter, AbstractDataStore]: ...
11371136

11381137

11391138
# path=None writes to bytes
@@ -1150,8 +1149,7 @@ def to_netcdf(
11501149
compute: bool = True,
11511150
multifile: Literal[False] = False,
11521151
invalid_netcdf: bool = False,
1153-
) -> bytes:
1154-
...
1152+
) -> bytes: ...
11551153

11561154

11571155
# compute=False returns dask.Delayed
@@ -1169,8 +1167,7 @@ def to_netcdf(
11691167
compute: Literal[False],
11701168
multifile: Literal[False] = False,
11711169
invalid_netcdf: bool = False,
1172-
) -> Delayed:
1173-
...
1170+
) -> Delayed: ...
11741171

11751172

11761173
# default return None
@@ -1187,8 +1184,7 @@ def to_netcdf(
11871184
compute: Literal[True] = True,
11881185
multifile: Literal[False] = False,
11891186
invalid_netcdf: bool = False,
1190-
) -> None:
1191-
...
1187+
) -> None: ...
11921188

11931189

11941190
# if compute cannot be evaluated at type check time
@@ -1206,8 +1202,7 @@ def to_netcdf(
12061202
compute: bool = False,
12071203
multifile: Literal[False] = False,
12081204
invalid_netcdf: bool = False,
1209-
) -> Delayed | None:
1210-
...
1205+
) -> Delayed | None: ...
12111206

12121207

12131208
# if multifile cannot be evaluated at type check time
@@ -1225,8 +1220,7 @@ def to_netcdf(
12251220
compute: bool = False,
12261221
multifile: bool = False,
12271222
invalid_netcdf: bool = False,
1228-
) -> tuple[ArrayWriter, AbstractDataStore] | Delayed | None:
1229-
...
1223+
) -> tuple[ArrayWriter, AbstractDataStore] | Delayed | None: ...
12301224

12311225

12321226
# Any
@@ -1243,8 +1237,7 @@ def to_netcdf(
12431237
compute: bool = False,
12441238
multifile: bool = False,
12451239
invalid_netcdf: bool = False,
1246-
) -> tuple[ArrayWriter, AbstractDataStore] | bytes | Delayed | None:
1247-
...
1240+
) -> tuple[ArrayWriter, AbstractDataStore] | bytes | Delayed | None: ...
12481241

12491242

12501243
def to_netcdf(
@@ -1499,22 +1492,20 @@ def save_mfdataset(
14991492
"save_mfdataset"
15001493
)
15011494

1502-
writers, stores = zip(
1503-
*[
1504-
to_netcdf(
1505-
ds,
1506-
path,
1507-
mode,
1508-
format,
1509-
group,
1510-
engine,
1511-
compute=compute,
1512-
multifile=True,
1513-
**kwargs,
1514-
)
1515-
for ds, path, group in zip(datasets, paths, groups)
1516-
]
1517-
)
1495+
writers, stores = zip(*[
1496+
to_netcdf(
1497+
ds,
1498+
path,
1499+
mode,
1500+
format,
1501+
group,
1502+
engine,
1503+
compute=compute,
1504+
multifile=True,
1505+
**kwargs,
1506+
)
1507+
for ds, path, group in zip(datasets, paths, groups)
1508+
])
15181509

15191510
try:
15201511
writes = [w.sync(compute=compute) for w in writers]
@@ -1526,9 +1517,9 @@ def save_mfdataset(
15261517
if not compute:
15271518
import dask
15281519

1529-
return dask.delayed(
1530-
[dask.delayed(_finalize_store)(w, s) for w, s in zip(writes, stores)]
1531-
)
1520+
return dask.delayed([
1521+
dask.delayed(_finalize_store)(w, s) for w, s in zip(writes, stores)
1522+
])
15321523

15331524

15341525
def _auto_detect_region(ds_new, ds_orig, dim):
@@ -1678,8 +1669,7 @@ def to_zarr(
16781669
zarr_version: int | None = None,
16791670
write_empty_chunks: bool | None = None,
16801671
chunkmanager_store_kwargs: dict[str, Any] | None = None,
1681-
) -> backends.ZarrStore:
1682-
...
1672+
) -> backends.ZarrStore: ...
16831673

16841674

16851675
# compute=False returns dask.Delayed
@@ -1702,8 +1692,7 @@ def to_zarr(
17021692
zarr_version: int | None = None,
17031693
write_empty_chunks: bool | None = None,
17041694
chunkmanager_store_kwargs: dict[str, Any] | None = None,
1705-
) -> Delayed:
1706-
...
1695+
) -> Delayed: ...
17071696

17081697

17091698
def to_zarr(

xarray/backends/locks.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -40,9 +40,9 @@ class SerializableLock:
4040
The creation of locks is itself not threadsafe.
4141
"""
4242

43-
_locks: ClassVar[
44-
WeakValueDictionary[Hashable, threading.Lock]
45-
] = WeakValueDictionary()
43+
_locks: ClassVar[WeakValueDictionary[Hashable, threading.Lock]] = (
44+
WeakValueDictionary()
45+
)
4646
token: Hashable
4747
lock: threading.Lock
4848

0 commit comments

Comments
 (0)