Skip to content

Commit a765ae0

Browse files
Upgrade ruff to 0.8.0 (#9816)
1 parent 552a74b commit a765ae0

File tree

14 files changed

+70
-83
lines changed

14 files changed

+70
-83
lines changed

.pre-commit-config.yaml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -25,7 +25,7 @@ repos:
2525
- id: text-unicode-replacement-char
2626
- repo: https://github.com/astral-sh/ruff-pre-commit
2727
# Ruff version.
28-
rev: v0.7.2
28+
rev: v0.8.0
2929
hooks:
3030
- id: ruff-format
3131
- id: ruff

asv_bench/benchmarks/dataset_io.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -305,7 +305,7 @@ def make_ds(self, nfiles=10):
305305
ds.attrs = {"history": "created for xarray benchmarking"}
306306

307307
self.ds_list.append(ds)
308-
self.filenames_list.append("test_netcdf_%i.nc" % i)
308+
self.filenames_list.append(f"test_netcdf_{i}.nc")
309309

310310

311311
class IOWriteMultipleNetCDF3(IOMultipleNetCDF):

pyproject.toml

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -45,7 +45,7 @@ dev = [
4545
"pytest-env",
4646
"pytest-xdist",
4747
"pytest-timeout",
48-
"ruff",
48+
"ruff>=0.8.0",
4949
"sphinx",
5050
"sphinx_autosummary_accessors",
5151
"xarray[complete]",
@@ -256,7 +256,6 @@ ignore = [
256256
"E501", # line too long - let the formatter worry about that
257257
"E731", # do not assign a lambda expression, use a def
258258
"UP007", # use X | Y for type annotations
259-
"UP027", # deprecated
260259
"C40", # unnecessary generator, comprehension, or literal
261260
"PIE790", # unnecessary pass statement
262261
"PERF203", # try-except within a loop incurs performance overhead

xarray/__init__.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -64,7 +64,7 @@
6464

6565
# A hardcoded __all__ variable is necessary to appease
6666
# `mypy --strict` running in projects that import xarray.
67-
__all__ = (
67+
__all__ = ( # noqa: RUF022
6868
# Sub-packages
6969
"groupers",
7070
"testing",
@@ -117,8 +117,8 @@
117117
"Context",
118118
"Coordinates",
119119
"DataArray",
120-
"Dataset",
121120
"DataTree",
121+
"Dataset",
122122
"Index",
123123
"IndexSelResult",
124124
"IndexVariable",
@@ -131,6 +131,6 @@
131131
"SerializationWarning",
132132
"TreeIsomorphismError",
133133
# Constants
134-
"__version__",
135134
"ALL_DIMS",
135+
"__version__",
136136
)

xarray/coding/cftime_offsets.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1451,8 +1451,7 @@ def date_range_like(source, calendar, use_cftime=None):
14511451
from xarray.core.dataarray import DataArray
14521452

14531453
if not isinstance(source, pd.DatetimeIndex | CFTimeIndex) and (
1454-
isinstance(source, DataArray)
1455-
and (source.ndim != 1)
1454+
(isinstance(source, DataArray) and (source.ndim != 1))
14561455
or not _contains_datetime_like_objects(source.variable)
14571456
):
14581457
raise ValueError(

xarray/conventions.py

Lines changed: 2 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -726,11 +726,8 @@ def _encode_coordinates(
726726
)
727727

728728
# if coordinates set to None, don't write coordinates attribute
729-
if (
730-
"coordinates" in attrs
731-
and attrs.get("coordinates") is None
732-
or "coordinates" in encoding
733-
and encoding.get("coordinates") is None
729+
if ("coordinates" in attrs and attrs.get("coordinates") is None) or (
730+
"coordinates" in encoding and encoding.get("coordinates") is None
734731
):
735732
# make sure "coordinates" is removed from attrs/encoding
736733
attrs.pop("coordinates", None)

xarray/core/dataset.py

Lines changed: 3 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -5401,11 +5401,9 @@ def _get_stack_index(
54015401
and var.dims[0] == dim
54025402
and (
54035403
# stack: must be a single coordinate index
5404-
not multi
5405-
and not self.xindexes.is_multi(name)
5404+
(not multi and not self.xindexes.is_multi(name))
54065405
# unstack: must be an index that implements .unstack
5407-
or multi
5408-
and type(index).unstack is not Index.unstack
5406+
or (multi and type(index).unstack is not Index.unstack)
54095407
)
54105408
):
54115409
if stack_index is not None and index is not stack_index:
@@ -7617,7 +7615,7 @@ def from_dataframe(cls, dataframe: pd.DataFrame, sparse: bool = False) -> Self:
76177615

76187616
if isinstance(idx, pd.MultiIndex):
76197617
dims = tuple(
7620-
name if name is not None else "level_%i" % n # type: ignore[redundant-expr]
7618+
name if name is not None else f"level_{n}" # type: ignore[redundant-expr]
76217619
for n, name in enumerate(idx.names)
76227620
)
76237621
for dim, lev in zip(dims, idx.levels, strict=True):

xarray/plot/utils.py

Lines changed: 3 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -869,11 +869,11 @@ def _infer_interval_breaks(coord, axis=0, scale=None, check_monotonic=False):
869869
if check_monotonic and not _is_monotonic(coord, axis=axis):
870870
raise ValueError(
871871
"The input coordinate is not sorted in increasing "
872-
"order along axis %d. This can lead to unexpected "
872+
f"order along axis {axis}. This can lead to unexpected "
873873
"results. Consider calling the `sortby` method on "
874874
"the input DataArray. To plot data with categorical "
875875
"axes, consider using the `heatmap` function from "
876-
"the `seaborn` statistical plotting library." % axis
876+
"the `seaborn` statistical plotting library."
877877
)
878878

879879
# If logscale, compute the intervals in the logarithmic space
@@ -1708,8 +1708,7 @@ def _determine_guide(
17081708
if (
17091709
not add_colorbar
17101710
and (hueplt_norm.data is not None and hueplt_norm.data_is_numeric is False)
1711-
or sizeplt_norm.data is not None
1712-
):
1711+
) or sizeplt_norm.data is not None:
17131712
add_legend = True
17141713
else:
17151714
add_legend = False

xarray/testing/assertions.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -124,8 +124,8 @@ def assert_equal(a, b, check_dim_order: bool = True):
124124
numpy.testing.assert_array_equal
125125
"""
126126
__tracebackhide__ = True
127-
assert (
128-
type(a) is type(b) or isinstance(a, Coordinates) and isinstance(b, Coordinates)
127+
assert type(a) is type(b) or (
128+
isinstance(a, Coordinates) and isinstance(b, Coordinates)
129129
)
130130
b = maybe_transpose_dims(a, b, check_dim_order)
131131
if isinstance(a, Variable | DataArray):
@@ -163,8 +163,8 @@ def assert_identical(a, b):
163163
assert_equal, assert_allclose, Dataset.equals, DataArray.equals
164164
"""
165165
__tracebackhide__ = True
166-
assert (
167-
type(a) is type(b) or isinstance(a, Coordinates) and isinstance(b, Coordinates)
166+
assert type(a) is type(b) or (
167+
isinstance(a, Coordinates) and isinstance(b, Coordinates)
168168
)
169169
if isinstance(a, Variable):
170170
assert a.identical(b), formatting.diff_array_repr(a, b, "identical")

xarray/tests/__init__.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -210,8 +210,7 @@ def __call__(self, dsk, keys, **kwargs):
210210
self.total_computes += 1
211211
if self.total_computes > self.max_computes:
212212
raise RuntimeError(
213-
"Too many computes. Total: %d > max: %d."
214-
% (self.total_computes, self.max_computes)
213+
f"Too many computes. Total: {self.total_computes} > max: {self.max_computes}."
215214
)
216215
return dask.get(dsk, keys, **kwargs)
217216

0 commit comments

Comments
 (0)