Skip to content

Commit 5515aec

Browse files
Enforce ruff/Perflint rules (PERF) (#9730)
* Apply ruff/Perflint rule PERF102 PERF102 When using only the values of a dict use the `values()` method * Apply ruff/Perflint rule PERF401 PERF401 Use a list comprehension to create a transformed list * Apply ruff/Perflint rule PERF403 PERF403 Use a dictionary comprehension instead of a for-loop * Enforce ruff/Perflint rules (PERF)
1 parent 88169c7 commit 5515aec

13 files changed

+55
-57
lines changed

pyproject.toml

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -245,6 +245,7 @@ ignore = [
245245
"E501",
246246
"E731",
247247
"UP007",
248+
"PERF20",
248249
"RUF001",
249250
"RUF002",
250251
"RUF003",
@@ -258,6 +259,7 @@ extend-select = [
258259
"W",
259260
"TID", # flake8-tidy-imports (absolute imports)
260261
"I", # isort
262+
"PERF", # Perflint
261263
"PGH", # pygrep-hooks
262264
"RUF",
263265
"UP", # Pyupgrade

xarray/backends/memory.py

Lines changed: 1 addition & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -27,11 +27,7 @@ def get_variables(self):
2727
return self._variables
2828

2929
def get_dimensions(self):
30-
dims = {}
31-
for v in self._variables.values():
32-
for d, s in v.dims.items():
33-
dims[d] = s
34-
return dims
30+
return {d: s for v in self._variables.values() for d, s in v.dims.items()}
3531

3632
def prepare_variable(self, k, v, *args, **kwargs):
3733
new_var = Variable(v.dims, np.empty_like(v), v.attrs)

xarray/backends/plugins.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -82,7 +82,7 @@ def backends_dict_from_pkg(
8282
def set_missing_parameters(
8383
backend_entrypoints: dict[str, type[BackendEntrypoint]],
8484
) -> None:
85-
for _, backend in backend_entrypoints.items():
85+
for backend in backend_entrypoints.values():
8686
if backend.open_dataset_parameters is None:
8787
open_dataset = backend.open_dataset
8888
backend.open_dataset_parameters = detect_parameters(open_dataset)

xarray/core/coordinates.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -752,7 +752,7 @@ def _update_coords(
752752
# check for inconsistent state *before* modifying anything in-place
753753
dims = calculate_dimensions(variables)
754754
new_coord_names = set(coords)
755-
for dim, _size in dims.items():
755+
for dim in dims.keys():
756756
if dim in variables:
757757
new_coord_names.add(dim)
758758

xarray/core/dataset.py

Lines changed: 7 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -5606,7 +5606,7 @@ def _unstack_once(
56065606
new_indexes, clean_index = index.unstack()
56075607
indexes.update(new_indexes)
56085608

5609-
for _name, idx in new_indexes.items():
5609+
for idx in new_indexes.values():
56105610
variables.update(idx.create_variables(index_vars))
56115611

56125612
for name, var in self.variables.items():
@@ -5647,7 +5647,7 @@ def _unstack_full_reindex(
56475647
indexes.update(new_indexes)
56485648

56495649
new_index_variables = {}
5650-
for _name, idx in new_indexes.items():
5650+
for idx in new_indexes.values():
56515651
new_index_variables.update(idx.create_variables(index_vars))
56525652

56535653
new_dim_sizes = {k: v.size for k, v in new_index_variables.items()}
@@ -9364,10 +9364,11 @@ def pad(
93649364
# keep indexes that won't be affected by pad and drop all other indexes
93659365
xindexes = self.xindexes
93669366
pad_dims = set(pad_width)
9367-
indexes = {}
9368-
for k, idx in xindexes.items():
9369-
if not pad_dims.intersection(xindexes.get_all_dims(k)):
9370-
indexes[k] = idx
9367+
indexes = {
9368+
k: idx
9369+
for k, idx in xindexes.items()
9370+
if not pad_dims.intersection(xindexes.get_all_dims(k))
9371+
}
93719372

93729373
for name, var in self.variables.items():
93739374
var_pad_width = {k: v for k, v in pad_width.items() if k in var.dims}

xarray/core/merge.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -710,7 +710,7 @@ def merge_core(
710710
coord_names.intersection_update(variables)
711711
if explicit_coords is not None:
712712
coord_names.update(explicit_coords)
713-
for dim, _size in dims.items():
713+
for dim in dims.keys():
714714
if dim in variables:
715715
coord_names.add(dim)
716716
ambiguous_coords = coord_names.intersection(noncoord_names)

xarray/tests/test_backends.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -847,7 +847,7 @@ def find_and_validate_array(obj):
847847
else:
848848
raise TypeError(f"{type(obj.array)} is wrapped by {type(obj)}")
849849

850-
for _k, v in ds.variables.items():
850+
for v in ds.variables.values():
851851
find_and_validate_array(v._data)
852852

853853
def test_array_type_after_indexing(self) -> None:

xarray/tests/test_combine.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -29,7 +29,7 @@
2929

3030
def assert_combined_tile_ids_equal(dict1, dict2):
3131
assert len(dict1) == len(dict2)
32-
for k, _v in dict1.items():
32+
for k in dict1.keys():
3333
assert k in dict2.keys()
3434
assert_equal(dict1[k], dict2[k])
3535

xarray/tests/test_concat.py

Lines changed: 28 additions & 30 deletions
Original file line numberDiff line numberDiff line change
@@ -74,40 +74,38 @@ def create_typed_datasets(
7474
num_datasets: int = 2, seed: int | None = None
7575
) -> list[Dataset]:
7676
var_strings = ["a", "b", "c", "d", "e", "f", "g", "h"]
77-
result = []
7877
rng = np.random.default_rng(seed)
7978
lat = rng.standard_normal(size=(1, 4))
8079
lon = rng.standard_normal(size=(1, 4))
81-
for i in range(num_datasets):
82-
result.append(
83-
Dataset(
84-
data_vars={
85-
"float": (["x", "y", "day"], rng.standard_normal(size=(1, 4, 2))),
86-
"float2": (["x", "y", "day"], rng.standard_normal(size=(1, 4, 2))),
87-
"string": (
88-
["x", "y", "day"],
89-
rng.choice(var_strings, size=(1, 4, 2)),
90-
),
91-
"int": (["x", "y", "day"], rng.integers(0, 10, size=(1, 4, 2))),
92-
"datetime64": (
93-
["x", "y", "day"],
94-
np.arange(
95-
np.datetime64("2017-01-01"), np.datetime64("2017-01-09")
96-
).reshape(1, 4, 2),
97-
),
98-
"timedelta64": (
99-
["x", "y", "day"],
100-
np.reshape([pd.Timedelta(days=i) for i in range(8)], [1, 4, 2]),
101-
),
102-
},
103-
coords={
104-
"lat": (["x", "y"], lat),
105-
"lon": (["x", "y"], lon),
106-
"day": ["day" + str(i * 2 + 1), "day" + str(i * 2 + 2)],
107-
},
108-
)
80+
return [
81+
Dataset(
82+
data_vars={
83+
"float": (["x", "y", "day"], rng.standard_normal(size=(1, 4, 2))),
84+
"float2": (["x", "y", "day"], rng.standard_normal(size=(1, 4, 2))),
85+
"string": (
86+
["x", "y", "day"],
87+
rng.choice(var_strings, size=(1, 4, 2)),
88+
),
89+
"int": (["x", "y", "day"], rng.integers(0, 10, size=(1, 4, 2))),
90+
"datetime64": (
91+
["x", "y", "day"],
92+
np.arange(
93+
np.datetime64("2017-01-01"), np.datetime64("2017-01-09")
94+
).reshape(1, 4, 2),
95+
),
96+
"timedelta64": (
97+
["x", "y", "day"],
98+
np.reshape([pd.Timedelta(days=i) for i in range(8)], [1, 4, 2]),
99+
),
100+
},
101+
coords={
102+
"lat": (["x", "y"], lat),
103+
"lon": (["x", "y"], lon),
104+
"day": ["day" + str(i * 2 + 1), "day" + str(i * 2 + 2)],
105+
},
109106
)
110-
return result
107+
for i in range(num_datasets)
108+
]
111109

112110

113111
def test_concat_compat() -> None:

xarray/tests/test_dataset.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -3036,12 +3036,12 @@ def test_drop_encoding(self) -> None:
30363036
vencoding = {"scale_factor": 10}
30373037
orig.encoding = {"foo": "bar"}
30383038

3039-
for k, _v in orig.variables.items():
3039+
for k in orig.variables.keys():
30403040
orig[k].encoding = vencoding
30413041

30423042
actual = orig.drop_encoding()
30433043
assert actual.encoding == {}
3044-
for _k, v in actual.variables.items():
3044+
for v in actual.variables.values():
30453045
assert v.encoding == {}
30463046

30473047
assert_equal(actual, orig)

0 commit comments

Comments
 (0)