Skip to content

Commit 4ea3937

Browse files
Enforce ruff/flake8-comprehensions rules (C4) (#9724)
* Apply ruff/flake8-comprehensions rule C409 C409 Unnecessary `tuple` literal passed to `tuple()` (remove the outer call to `tuple()`) C409 Unnecessary list comprehension passed to `tuple()` (rewrite as a generator) * Apply ruff/flake8-comprehensions rule C414 C414 Unnecessary `tuple` call within `sorted()` * Apply ruff/flake8-comprehensions rule C416 C416 Unnecessary `dict` comprehension (rewrite using `dict()`) C416 Unnecessary `list` comprehension (rewrite using `list()`) * Apply ruff/flake8-comprehensions rule C417 C417 Unnecessary `map` usage (rewrite using a generator expression) * Apply ruff/flake8-comprehensions rule C419 C419 Unnecessary list comprehension * Enforce ruff/flake8-comprehensions rules (C4)
1 parent 5515aec commit 4ea3937

20 files changed

+46
-62
lines changed

properties/test_index_manipulation.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -179,7 +179,7 @@ def rename_vars(self, newname, data):
179179
def drop_dims(self, data):
180180
dims = data.draw(
181181
st.lists(
182-
st.sampled_from(sorted(tuple(self.dataset.dims))),
182+
st.sampled_from(sorted(self.dataset.dims)),
183183
min_size=1,
184184
unique=True,
185185
)

pyproject.toml

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -241,6 +241,7 @@ extend-safe-fixes = [
241241
"TID252", # absolute imports
242242
]
243243
ignore = [
244+
"C40",
244245
"E402",
245246
"E501",
246247
"E731",
@@ -254,6 +255,7 @@ ignore = [
254255
]
255256
extend-select = [
256257
"B", # flake8-bugbear
258+
"C4", # flake8-comprehensions
257259
"F", # Pyflakes
258260
"E", # Pycodestyle
259261
"W",

xarray/backends/zarr.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -194,7 +194,7 @@ def __init__(self, zarr_array):
194194
if (
195195
not _zarr_v3()
196196
and self._array.filters is not None
197-
and any([filt.codec_id == "vlen-utf8" for filt in self._array.filters])
197+
and any(filt.codec_id == "vlen-utf8" for filt in self._array.filters)
198198
):
199199
dtype = coding.strings.create_vlen_dtype(str)
200200
else:
@@ -649,7 +649,7 @@ def open_store(
649649
use_zarr_fill_value_as_mask=use_zarr_fill_value_as_mask,
650650
zarr_format=zarr_format,
651651
)
652-
group_paths = [node for node in _iter_zarr_groups(zarr_group, parent=group)]
652+
group_paths = list(_iter_zarr_groups(zarr_group, parent=group))
653653
return {
654654
group: cls(
655655
zarr_group.get(group),

xarray/core/computation.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -2252,7 +2252,7 @@ def unify_chunks(*objects: Dataset | DataArray) -> tuple[Dataset | DataArray, ..
22522252
if not unify_chunks_args:
22532253
return objects
22542254

2255-
chunkmanager = get_chunked_array_type(*[arg for arg in unify_chunks_args])
2255+
chunkmanager = get_chunked_array_type(*list(unify_chunks_args))
22562256
_, chunked_data = chunkmanager.unify_chunks(*unify_chunks_args)
22572257
chunked_data_iter = iter(chunked_data)
22582258
out: list[Dataset | DataArray] = []

xarray/core/dataset.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -370,7 +370,7 @@ def _get_func_args(func, param_names):
370370
else:
371371
params = list(func_args)[1:]
372372
if any(
373-
[(p.kind in [p.VAR_POSITIONAL, p.VAR_KEYWORD]) for p in func_args.values()]
373+
(p.kind in [p.VAR_POSITIONAL, p.VAR_KEYWORD]) for p in func_args.values()
374374
):
375375
raise ValueError(
376376
"`param_names` must be provided because `func` takes variable length arguments."
@@ -1586,7 +1586,7 @@ def __getitem__(
15861586
message = f"No variable named {key!r}. Variables on the dataset include {shorten_list_repr(list(self.variables.keys()), max_items=10)}"
15871587
# If someone attempts `ds['foo' , 'bar']` instead of `ds[['foo', 'bar']]`
15881588
if isinstance(key, tuple):
1589-
message += f"\nHint: use a list to select multiple variables, for example `ds[{[d for d in key]}]`"
1589+
message += f"\nHint: use a list to select multiple variables, for example `ds[{list(key)}]`"
15901590
raise KeyError(message) from e
15911591

15921592
if utils.iterable_of_hashable(key):
@@ -1686,7 +1686,7 @@ def _setitem_check(self, key, value):
16861686
f"Variables {missing_vars} in new values"
16871687
f" not available in original dataset:\n{self}"
16881688
)
1689-
elif not any([isinstance(value, t) for t in [DataArray, Number, str]]):
1689+
elif not any(isinstance(value, t) for t in [DataArray, Number, str]):
16901690
raise TypeError(
16911691
"Dataset assignment only accepts DataArrays, Datasets, and scalars."
16921692
)
@@ -4078,7 +4078,7 @@ def interp(
40784078
)
40794079
indexers.update({d: self.variables[d] for d in sdims})
40804080

4081-
obj = self if assume_sorted else self.sortby([k for k in coords])
4081+
obj = self if assume_sorted else self.sortby(list(coords))
40824082

40834083
def maybe_variable(obj, k):
40844084
# workaround to get variable for dimension without coordinate.

xarray/core/datatree_mapping.py

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -170,9 +170,7 @@ def _check_single_set_return_values(path_to_node: str, obj: Any) -> int | None:
170170
def _check_all_return_values(returned_objects) -> int | None:
171171
"""Walk through all values returned by mapping func over subtrees, raising on any invalid or inconsistent types."""
172172

173-
result_data_objects = [
174-
(path_to_node, r) for path_to_node, r in returned_objects.items()
175-
]
173+
result_data_objects = list(returned_objects.items())
176174

177175
first_path, result = result_data_objects[0]
178176
return_values = _check_single_set_return_values(first_path, result)

xarray/core/indexes.py

Lines changed: 9 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -880,7 +880,7 @@ def _check_dim_compat(variables: Mapping[Any, Variable], all_dims: str = "equal"
880880
either share the same (single) dimension or each have a different dimension.
881881
882882
"""
883-
if any([var.ndim != 1 for var in variables.values()]):
883+
if any(var.ndim != 1 for var in variables.values()):
884884
raise ValueError("PandasMultiIndex only accepts 1-dimensional variables")
885885

886886
dims = {var.dims for var in variables.values()}
@@ -1208,7 +1208,7 @@ def sel(self, labels, method=None, tolerance=None) -> IndexSelResult:
12081208
indexer: int | slice | np.ndarray | Variable | DataArray
12091209

12101210
# label(s) given for multi-index level(s)
1211-
if all([lbl in self.index.names for lbl in labels]):
1211+
if all(lbl in self.index.names for lbl in labels):
12121212
label_values = {}
12131213
for k, v in labels.items():
12141214
label_array = normalize_label(v, dtype=self.level_coords_dtype[k])
@@ -1221,7 +1221,7 @@ def sel(self, labels, method=None, tolerance=None) -> IndexSelResult:
12211221
f"available along coordinate {k!r} (multi-index level)"
12221222
) from err
12231223

1224-
has_slice = any([isinstance(v, slice) for v in label_values.values()])
1224+
has_slice = any(isinstance(v, slice) for v in label_values.values())
12251225

12261226
if len(label_values) == self.index.nlevels and not has_slice:
12271227
indexer = self.index.get_loc(
@@ -1268,9 +1268,7 @@ def sel(self, labels, method=None, tolerance=None) -> IndexSelResult:
12681268
else:
12691269
levels = [self.index.names[i] for i in range(len(label))]
12701270
indexer, new_index = self.index.get_loc_level(label, level=levels)
1271-
scalar_coord_values.update(
1272-
{k: v for k, v in zip(levels, label, strict=True)}
1273-
)
1271+
scalar_coord_values.update(dict(zip(levels, label, strict=True)))
12741272

12751273
else:
12761274
label_array = normalize_label(label)
@@ -1371,10 +1369,9 @@ def rename(self, name_dict, dims_dict):
13711369
index = self.index.rename(new_names)
13721370

13731371
new_dim = dims_dict.get(self.dim, self.dim)
1374-
new_level_coords_dtype = {
1375-
k: v
1376-
for k, v in zip(new_names, self.level_coords_dtype.values(), strict=True)
1377-
}
1372+
new_level_coords_dtype = dict(
1373+
zip(new_names, self.level_coords_dtype.values(), strict=True)
1374+
)
13781375
return self._replace(
13791376
index, dim=new_dim, level_coords_dtype=new_level_coords_dtype
13801377
)
@@ -1820,7 +1817,7 @@ def _apply_indexes_fast(indexes: Indexes[Index], args: Mapping[Any, Any], func:
18201817
# multi-index arrays
18211818
indexes_fast, coords = indexes._indexes, indexes._variables
18221819

1823-
new_indexes: dict[Hashable, Index] = {k: v for k, v in indexes_fast.items()}
1820+
new_indexes: dict[Hashable, Index] = dict(indexes_fast.items())
18241821
new_index_variables: dict[Hashable, Variable] = {}
18251822
for name, index in indexes_fast.items():
18261823
coord = coords[name]
@@ -1848,7 +1845,7 @@ def _apply_indexes(
18481845
args: Mapping[Any, Any],
18491846
func: str,
18501847
) -> tuple[dict[Hashable, Index], dict[Hashable, Variable]]:
1851-
new_indexes: dict[Hashable, Index] = {k: v for k, v in indexes.items()}
1848+
new_indexes: dict[Hashable, Index] = dict(indexes.items())
18521849
new_index_variables: dict[Hashable, Variable] = {}
18531850

18541851
for index, index_vars in indexes.group_by_index():

xarray/core/rolling.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -522,7 +522,7 @@ def _counts(self, keep_attrs: bool | None) -> DataArray:
522522
counts = (
523523
self.obj.notnull(keep_attrs=keep_attrs)
524524
.rolling(
525-
{d: w for d, w in zip(self.dim, self.window, strict=True)},
525+
dict(zip(self.dim, self.window, strict=True)),
526526
center={d: self.center[i] for i, d in enumerate(self.dim)},
527527
)
528528
.construct(rolling_dim, fill_value=False, keep_attrs=keep_attrs)

xarray/core/treenode.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -317,7 +317,7 @@ def iter_lineage(self: Tree) -> tuple[Tree, ...]:
317317
DeprecationWarning,
318318
stacklevel=2,
319319
)
320-
return tuple((self, *self.parents))
320+
return (self, *self.parents)
321321

322322
@property
323323
def lineage(self: Tree) -> tuple[Tree, ...]:
@@ -349,7 +349,7 @@ def ancestors(self: Tree) -> tuple[Tree, ...]:
349349
DeprecationWarning,
350350
stacklevel=2,
351351
)
352-
return tuple((*reversed(self.parents), self))
352+
return (*reversed(self.parents), self)
353353

354354
@property
355355
def root(self: Tree) -> Tree:
@@ -380,7 +380,7 @@ def leaves(self: Tree) -> tuple[Tree, ...]:
380380
381381
Leaf nodes are defined as nodes which have no children.
382382
"""
383-
return tuple([node for node in self.subtree if node.is_leaf])
383+
return tuple(node for node in self.subtree if node.is_leaf)
384384

385385
@property
386386
def siblings(self: Tree) -> dict[str, Tree]:

xarray/core/utils.py

Lines changed: 3 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -1069,11 +1069,9 @@ def contains_only_chunked_or_numpy(obj) -> bool:
10691069
obj = obj._to_temp_dataset()
10701070

10711071
return all(
1072-
[
1073-
isinstance(var._data, ExplicitlyIndexed | np.ndarray)
1074-
or is_chunked_array(var._data)
1075-
for var in obj._variables.values()
1076-
]
1072+
isinstance(var._data, ExplicitlyIndexed | np.ndarray)
1073+
or is_chunked_array(var._data)
1074+
for var in obj._variables.values()
10771075
)
10781076

10791077

0 commit comments

Comments
 (0)