Skip to content

Commit 6baceca

Browse files
authored
Repo checker (#9450)
* Remove default mypy option * Implement mypy ignore-without-code option * Enable mypy redundant-expr * Fix erroneous tuple types * Remove ruff target-version, redundant with project * Use extend selections for ruff * Fix B009 and B010 with ruff * Fix test parametrization * Fix FutureWarning * Make zips strict unless it is causing errors In which case set them to explicit False * Add a commit message for pre-commit autoupdate
1 parent cc74d3a commit 6baceca

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

78 files changed

+421
-313
lines changed

.pre-commit-config.yaml

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,7 @@
11
# https://pre-commit.com/
22
ci:
33
autoupdate_schedule: monthly
4+
autoupdate_commit_msg: 'Update pre-commit hooks'
45
exclude: 'xarray/datatree_.*'
56
repos:
67
- repo: https://github.com/pre-commit/pre-commit-hooks

asv_bench/benchmarks/dataset_io.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -724,7 +724,7 @@ class PerformanceBackend(xr.backends.BackendEntrypoint):
724724
def open_dataset(
725725
self,
726726
filename_or_obj: str | os.PathLike | None,
727-
drop_variables: tuple[str] = None,
727+
drop_variables: tuple[str, ...] = None,
728728
*,
729729
mask_and_scale=True,
730730
decode_times=True,

asv_bench/benchmarks/groupby.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -174,7 +174,7 @@ def setup(self, use_cftime, use_flox):
174174
# GH9426 - deep-copying CFTime object arrays is weirdly slow
175175
asda = xr.DataArray(time)
176176
labeled_time = []
177-
for year, month in zip(asda.dt.year, asda.dt.month):
177+
for year, month in zip(asda.dt.year, asda.dt.month, strict=True):
178178
labeled_time.append(cftime.datetime(year, month, 1))
179179

180180
self.da = xr.DataArray(

asv_bench/benchmarks/rolling.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -64,7 +64,7 @@ def time_rolling_long(self, func, pandas, use_bottleneck):
6464
def time_rolling_np(self, window_, min_periods, use_bottleneck):
6565
with xr.set_options(use_bottleneck=use_bottleneck):
6666
self.ds.rolling(x=window_, center=False, min_periods=min_periods).reduce(
67-
getattr(np, "nansum")
67+
np.nansum
6868
).load()
6969

7070
@parameterized(

doc/user-guide/testing.rst

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -193,7 +193,7 @@ different type:
193193

194194
.. ipython:: python
195195
196-
def sparse_random_arrays(shape: tuple[int]) -> sparse._coo.core.COO:
196+
def sparse_random_arrays(shape: tuple[int, ...]) -> sparse._coo.core.COO:
197197
"""Strategy which generates random sparse.COO arrays"""
198198
if shape is None:
199199
shape = npst.array_shapes()

properties/test_pandas_roundtrip.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -80,7 +80,7 @@ def test_roundtrip_dataarray(data, arr) -> None:
8080
tuple
8181
)
8282
)
83-
coords = {name: np.arange(n) for (name, n) in zip(names, arr.shape)}
83+
coords = {name: np.arange(n) for (name, n) in zip(names, arr.shape, strict=True)}
8484
original = xr.DataArray(arr, dims=names, coords=coords)
8585
roundtripped = xr.DataArray(original.to_pandas())
8686
xr.testing.assert_identical(original, roundtripped)

pyproject.toml

Lines changed: 3 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -84,14 +84,13 @@ source = ["xarray"]
8484
exclude_lines = ["pragma: no cover", "if TYPE_CHECKING"]
8585

8686
[tool.mypy]
87-
enable_error_code = "redundant-self"
87+
enable_error_code = ["ignore-without-code", "redundant-self", "redundant-expr"]
8888
exclude = [
8989
'build',
9090
'xarray/util/generate_.*\.py',
9191
'xarray/datatree_/doc/.*\.py',
9292
]
9393
files = "xarray"
94-
show_error_codes = true
9594
show_error_context = true
9695
warn_redundant_casts = true
9796
warn_unused_configs = true
@@ -240,7 +239,6 @@ extend-exclude = [
240239
"doc",
241240
"_typed_ops.pyi",
242241
]
243-
target-version = "py310"
244242

245243
[tool.ruff.lint]
246244
# E402: module level import not at top of file
@@ -249,13 +247,13 @@ target-version = "py310"
249247
extend-safe-fixes = [
250248
"TID252", # absolute imports
251249
]
252-
ignore = [
250+
extend-ignore = [
253251
"E402",
254252
"E501",
255253
"E731",
256254
"UP007",
257255
]
258-
select = [
256+
extend-select = [
259257
"F", # Pyflakes
260258
"E", # Pycodestyle
261259
"W",

xarray/backends/api.py

Lines changed: 10 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -51,7 +51,7 @@
5151
try:
5252
from dask.delayed import Delayed
5353
except ImportError:
54-
Delayed = None # type: ignore
54+
Delayed = None # type: ignore[assignment, misc]
5555
from io import BufferedIOBase
5656

5757
from xarray.backends.common import BackendEntrypoint
@@ -1113,7 +1113,7 @@ def open_mfdataset(
11131113
list(combined_ids_paths.keys()),
11141114
list(combined_ids_paths.values()),
11151115
)
1116-
elif combine == "by_coords" and concat_dim is not None:
1116+
elif concat_dim is not None:
11171117
raise ValueError(
11181118
"When combine='by_coords', passing a value for `concat_dim` has no "
11191119
"effect. To manually combine along a specific dimension you should "
@@ -1432,7 +1432,7 @@ def to_netcdf(
14321432
store.sync()
14331433
return target.getvalue()
14341434
finally:
1435-
if not multifile and compute:
1435+
if not multifile and compute: # type: ignore[redundant-expr]
14361436
store.close()
14371437

14381438
if not compute:
@@ -1585,8 +1585,9 @@ def save_mfdataset(
15851585
multifile=True,
15861586
**kwargs,
15871587
)
1588-
for ds, path, group in zip(datasets, paths, groups)
1589-
]
1588+
for ds, path, group in zip(datasets, paths, groups, strict=True)
1589+
],
1590+
strict=True,
15901591
)
15911592

15921593
try:
@@ -1600,7 +1601,10 @@ def save_mfdataset(
16001601
import dask
16011602

16021603
return dask.delayed(
1603-
[dask.delayed(_finalize_store)(w, s) for w, s in zip(writes, stores)]
1604+
[
1605+
dask.delayed(_finalize_store)(w, s)
1606+
for w, s in zip(writes, stores, strict=True)
1607+
]
16041608
)
16051609

16061610

xarray/backends/common.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -431,7 +431,7 @@ def set_dimensions(self, variables, unlimited_dims=None):
431431
for v in unlimited_dims: # put unlimited_dims first
432432
dims[v] = None
433433
for v in variables.values():
434-
dims.update(dict(zip(v.dims, v.shape)))
434+
dims.update(dict(zip(v.dims, v.shape, strict=True)))
435435

436436
for dim, length in dims.items():
437437
if dim in existing_dims and length != existing_dims[dim]:

xarray/backends/file_manager.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -276,7 +276,7 @@ def __getstate__(self):
276276
def __setstate__(self, state) -> None:
277277
"""Restore from a pickle."""
278278
opener, args, mode, kwargs, lock, manager_id = state
279-
self.__init__( # type: ignore
279+
self.__init__( # type: ignore[misc]
280280
opener, *args, mode=mode, kwargs=kwargs, lock=lock, manager_id=manager_id
281281
)
282282

0 commit comments

Comments
 (0)