Skip to content

Commit 85deeac

Browse files
Apply assorted ruff/Pylint rules (PL) / Enforce PLE rules (#10366)
* Apply ruff/Pylint rule PLC0206 PLC0206 Extracting value from dictionary without calling `.items()` * Apply ruff/Pylint rule PLR1714 PLR1714 Consider merging multiple comparison * Apply ruff/Pylint rule PLR2044 PLR2044 Line with empty comment * Apply ruff/Pylint rule PLR5501 PLR5501 Use `elif` instead of `else` then `if`, to reduce indentation * Apply ruff/Pylint rule PLW0127 PLW0127 Self-assignment of variable The intent of the "flake8 workaround" might have been to avoid F811 errors. These F811 errors exist only because of the ignored F401 errors. So ignore F401 errors more precisely to clarify the situation. * Enforce ruff/Pylint Error rules (PLE)
1 parent 3cbf960 commit 85deeac

21 files changed

+235
-262
lines changed

pyproject.toml

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -258,6 +258,7 @@ extend-select = [
258258
"PERF", # Perflint
259259
"W", # pycodestyle warnings
260260
"PGH", # pygrep-hooks
261+
"PLE", # Pylint Errors
261262
"UP", # pyupgrade
262263
"FURB", # refurb
263264
"RUF",

xarray/backends/common.py

Lines changed: 3 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -338,11 +338,10 @@ def add(self, source, target, region=None):
338338
self.sources.append(source)
339339
self.targets.append(target)
340340
self.regions.append(region)
341+
elif region:
342+
target[region] = source
341343
else:
342-
if region:
343-
target[region] = source
344-
else:
345-
target[...] = source
344+
target[...] = source
346345

347346
def sync(self, compute=True, chunkmanager_store_kwargs=None):
348347
if self.sources:

xarray/coding/cftime_offsets.py

Lines changed: 16 additions & 20 deletions
Original file line numberDiff line numberDiff line change
@@ -279,9 +279,8 @@ def _adjust_n_years(other, n, month, reference_day):
279279
if n > 0:
280280
if other.month < month or (other.month == month and other.day < reference_day):
281281
n -= 1
282-
else:
283-
if other.month > month or (other.month == month and other.day > reference_day):
284-
n += 1
282+
elif other.month > month or (other.month == month and other.day > reference_day):
283+
n += 1
285284
return n
286285

287286

@@ -353,12 +352,11 @@ def roll_qtrday(
353352
# pretend to roll back if on same month but
354353
# before compare_day
355354
n -= 1
356-
else:
357-
if months_since > 0 or (
358-
months_since == 0 and other.day > _get_day_of_month(other, day_option)
359-
):
360-
# make sure to roll forward, so negate
361-
n += 1
355+
elif months_since > 0 or (
356+
months_since == 0 and other.day > _get_day_of_month(other, day_option)
357+
):
358+
# make sure to roll forward, so negate
359+
n += 1
362360
return n
363361

364362

@@ -815,13 +813,12 @@ def delta_to_tick(delta: timedelta | pd.Timedelta) -> Tick:
815813
return Minute(n=seconds // 60)
816814
else:
817815
return Second(n=seconds)
816+
# Regardless of the days and seconds this will always be a Millisecond
817+
# or Microsecond object
818+
elif delta.microseconds % 1_000 == 0:
819+
return Millisecond(n=delta.microseconds // 1_000)
818820
else:
819-
# Regardless of the days and seconds this will always be a Millisecond
820-
# or Microsecond object
821-
if delta.microseconds % 1_000 == 0:
822-
return Millisecond(n=delta.microseconds // 1_000)
823-
else:
824-
return Microsecond(n=delta.microseconds)
821+
return Microsecond(n=delta.microseconds)
825822

826823

827824
def to_cftime_datetime(date_str_or_date, calendar=None):
@@ -1615,11 +1612,10 @@ def date_range_like(source, calendar, use_cftime=None):
16151612
source_calendar = "standard"
16161613
source_start = default_precision_timestamp(source_start)
16171614
source_end = default_precision_timestamp(source_end)
1618-
else:
1619-
if isinstance(source, CFTimeIndex):
1620-
source_calendar = source.calendar
1621-
else: # DataArray
1622-
source_calendar = source.dt.calendar
1615+
elif isinstance(source, CFTimeIndex):
1616+
source_calendar = source.calendar
1617+
else: # DataArray
1618+
source_calendar = source.dt.calendar
16231619

16241620
if calendar == source_calendar and is_np_datetime_like(source.dtype) ^ use_cftime:
16251621
return source

xarray/coding/times.py

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -579,9 +579,8 @@ def decode_cf_datetime(
579579
"'time_unit' or specify 'use_cftime=True'.",
580580
SerializationWarning,
581581
)
582-
else:
583-
if _is_standard_calendar(calendar):
584-
dates = cftime_to_nptime(dates, time_unit=time_unit)
582+
elif _is_standard_calendar(calendar):
583+
dates = cftime_to_nptime(dates, time_unit=time_unit)
585584
elif use_cftime:
586585
dates = _decode_datetime_with_cftime(flat_num_dates, units, calendar)
587586
else:

xarray/computation/apply_ufunc.py

Lines changed: 13 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -445,17 +445,16 @@ def apply_dict_of_variables_vfunc(
445445
core_dim_present = _check_core_dims(signature, variable_args, name)
446446
if core_dim_present is True:
447447
result_vars[name] = func(*variable_args)
448+
elif on_missing_core_dim == "raise":
449+
raise ValueError(core_dim_present)
450+
elif on_missing_core_dim == "copy":
451+
result_vars[name] = variable_args[0]
452+
elif on_missing_core_dim == "drop":
453+
pass
448454
else:
449-
if on_missing_core_dim == "raise":
450-
raise ValueError(core_dim_present)
451-
elif on_missing_core_dim == "copy":
452-
result_vars[name] = variable_args[0]
453-
elif on_missing_core_dim == "drop":
454-
pass
455-
else:
456-
raise ValueError(
457-
f"Invalid value for `on_missing_core_dim`: {on_missing_core_dim!r}"
458-
)
455+
raise ValueError(
456+
f"Invalid value for `on_missing_core_dim`: {on_missing_core_dim!r}"
457+
)
459458

460459
if signature.num_outputs > 1:
461460
return _unpack_dict_tuples(result_vars, signature.num_outputs)
@@ -809,11 +808,10 @@ def func(*arrays):
809808
raise ValueError(
810809
f"unknown setting for chunked array handling in apply_ufunc: {dask}"
811810
)
812-
else:
813-
if vectorize:
814-
func = _vectorize(
815-
func, signature, output_dtypes=output_dtypes, exclude_dims=exclude_dims
816-
)
811+
elif vectorize:
812+
func = _vectorize(
813+
func, signature, output_dtypes=output_dtypes, exclude_dims=exclude_dims
814+
)
817815

818816
result_data = func(*input_data)
819817

xarray/computation/rolling.py

Lines changed: 10 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -1253,18 +1253,17 @@ def wrapped_func(
12531253
for c, v in self.obj.coords.items():
12541254
if c == self.obj.name:
12551255
coords[c] = reduced
1256+
elif any(d in self.windows for d in v.dims):
1257+
coords[c] = v.variable.coarsen(
1258+
self.windows,
1259+
self.coord_func[c],
1260+
self.boundary,
1261+
self.side,
1262+
keep_attrs,
1263+
**kwargs,
1264+
)
12561265
else:
1257-
if any(d in self.windows for d in v.dims):
1258-
coords[c] = v.variable.coarsen(
1259-
self.windows,
1260-
self.coord_func[c],
1261-
self.boundary,
1262-
self.side,
1263-
keep_attrs,
1264-
**kwargs,
1265-
)
1266-
else:
1267-
coords[c] = v
1266+
coords[c] = v
12681267
return DataArray(
12691268
reduced, dims=self.obj.dims, coords=coords, name=self.obj.name
12701269
)

xarray/conventions.py

Lines changed: 10 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -226,17 +226,16 @@ def decode_cf_variable(
226226
DeprecationWarning,
227227
)
228228
decode_times = CFDatetimeCoder(use_cftime=use_cftime)
229-
else:
230-
if use_cftime is not None:
231-
raise TypeError(
232-
"Usage of 'use_cftime' as a kwarg is not allowed "
233-
"if a 'CFDatetimeCoder' instance is passed to "
234-
"'decode_times'. Please set 'use_cftime' "
235-
"when initializing 'CFDatetimeCoder' instead.\n"
236-
"Example usage:\n"
237-
" time_coder = xr.coders.CFDatetimeCoder(use_cftime=True)\n"
238-
" ds = xr.open_dataset(decode_times=time_coder)\n",
239-
)
229+
elif use_cftime is not None:
230+
raise TypeError(
231+
"Usage of 'use_cftime' as a kwarg is not allowed "
232+
"if a 'CFDatetimeCoder' instance is passed to "
233+
"'decode_times'. Please set 'use_cftime' "
234+
"when initializing 'CFDatetimeCoder' instead.\n"
235+
"Example usage:\n"
236+
" time_coder = xr.coders.CFDatetimeCoder(use_cftime=True)\n"
237+
" ds = xr.open_dataset(decode_times=time_coder)\n",
238+
)
240239
var = decode_times.decode(var, name=name)
241240

242241
if decode_endianness and not var.dtype.isnative:

xarray/core/dataset.py

Lines changed: 66 additions & 73 deletions
Original file line numberDiff line numberDiff line change
@@ -2904,9 +2904,8 @@ def sel(
29042904
for k, v in query_results.variables.items():
29052905
if v.dims:
29062906
no_scalar_variables[k] = v
2907-
else:
2908-
if k in self._coord_names:
2909-
query_results.drop_coords.append(k)
2907+
elif k in self._coord_names:
2908+
query_results.drop_coords.append(k)
29102909
query_results.variables = no_scalar_variables
29112910

29122911
result = self.isel(indexers=query_results.dim_indexers, drop=drop)
@@ -4552,26 +4551,25 @@ def expand_dims(
45524551
for d, c in zip_axis_dim:
45534552
all_dims.insert(d, c)
45544553
variables[k] = v.set_dims(dict(all_dims))
4555-
else:
4556-
if k not in variables:
4557-
if k in coord_names and create_index_for_new_dim:
4558-
# If dims includes a label of a non-dimension coordinate,
4559-
# it will be promoted to a 1D coordinate with a single value.
4560-
index, index_vars = create_default_index_implicit(v.set_dims(k))
4561-
indexes[k] = index
4562-
variables.update(index_vars)
4563-
else:
4564-
if create_index_for_new_dim:
4565-
warnings.warn(
4566-
f"No index created for dimension {k} because variable {k} is not a coordinate. "
4567-
f"To create an index for {k}, please first call `.set_coords('{k}')` on this object.",
4568-
UserWarning,
4569-
stacklevel=2,
4570-
)
4554+
elif k not in variables:
4555+
if k in coord_names and create_index_for_new_dim:
4556+
# If dims includes a label of a non-dimension coordinate,
4557+
# it will be promoted to a 1D coordinate with a single value.
4558+
index, index_vars = create_default_index_implicit(v.set_dims(k))
4559+
indexes[k] = index
4560+
variables.update(index_vars)
4561+
else:
4562+
if create_index_for_new_dim:
4563+
warnings.warn(
4564+
f"No index created for dimension {k} because variable {k} is not a coordinate. "
4565+
f"To create an index for {k}, please first call `.set_coords('{k}')` on this object.",
4566+
UserWarning,
4567+
stacklevel=2,
4568+
)
45714569

4572-
# create 1D variable without creating a new index
4573-
new_1d_var = v.set_dims(k)
4574-
variables.update({k: new_1d_var})
4570+
# create 1D variable without creating a new index
4571+
new_1d_var = v.set_dims(k)
4572+
variables.update({k: new_1d_var})
45754573

45764574
return self._replace_with_new_dims(
45774575
variables, coord_names=coord_names, indexes=indexes
@@ -4890,9 +4888,8 @@ def set_xindex(
48904888
index_cls = PandasIndex
48914889
else:
48924890
index_cls = PandasMultiIndex
4893-
else:
4894-
if not issubclass(index_cls, Index):
4895-
raise TypeError(f"{index_cls} is not a subclass of xarray.Index")
4891+
elif not issubclass(index_cls, Index):
4892+
raise TypeError(f"{index_cls} is not a subclass of xarray.Index")
48964893

48974894
invalid_coords = set(coord_names) - self._coord_names
48984895

@@ -6744,34 +6741,33 @@ def reduce(
67446741
if name in self.coords:
67456742
if not reduce_dims:
67466743
variables[name] = var
6747-
else:
6748-
if (
6749-
# Some reduction functions (e.g. std, var) need to run on variables
6750-
# that don't have the reduce dims: PR5393
6751-
not is_extension_array_dtype(var.dtype)
6752-
and (
6753-
not reduce_dims
6754-
or not numeric_only
6755-
or np.issubdtype(var.dtype, np.number)
6756-
or (var.dtype == np.bool_)
6757-
)
6758-
):
6759-
# prefer to aggregate over axis=None rather than
6760-
# axis=(0, 1) if they will be equivalent, because
6761-
# the former is often more efficient
6762-
# keep single-element dims as list, to support Hashables
6763-
reduce_maybe_single = (
6764-
None
6765-
if len(reduce_dims) == var.ndim and var.ndim != 1
6766-
else reduce_dims
6767-
)
6768-
variables[name] = var.reduce(
6769-
func,
6770-
dim=reduce_maybe_single,
6771-
keep_attrs=keep_attrs,
6772-
keepdims=keepdims,
6773-
**kwargs,
6774-
)
6744+
elif (
6745+
# Some reduction functions (e.g. std, var) need to run on variables
6746+
# that don't have the reduce dims: PR5393
6747+
not is_extension_array_dtype(var.dtype)
6748+
and (
6749+
not reduce_dims
6750+
or not numeric_only
6751+
or np.issubdtype(var.dtype, np.number)
6752+
or (var.dtype == np.bool_)
6753+
)
6754+
):
6755+
# prefer to aggregate over axis=None rather than
6756+
# axis=(0, 1) if they will be equivalent, because
6757+
# the former is often more efficient
6758+
# keep single-element dims as list, to support Hashables
6759+
reduce_maybe_single = (
6760+
None
6761+
if len(reduce_dims) == var.ndim and var.ndim != 1
6762+
else reduce_dims
6763+
)
6764+
variables[name] = var.reduce(
6765+
func,
6766+
dim=reduce_maybe_single,
6767+
keep_attrs=keep_attrs,
6768+
keepdims=keepdims,
6769+
**kwargs,
6770+
)
67756771

67766772
coord_names = {k for k in self.coords if k in variables}
67776773
indexes = {k: v for k, v in self._indexes.items() if k in variables}
@@ -7970,8 +7966,6 @@ def sortby(
79707966
variables = variables(self)
79717967
if not isinstance(variables, list):
79727968
variables = [variables]
7973-
else:
7974-
variables = variables
79757969
arrays = [v if isinstance(v, DataArray) else self[v] for v in variables]
79767970
aligned_vars = align(self, *arrays, join="left")
79777971
aligned_self = cast("Self", aligned_vars[0])
@@ -8395,25 +8389,24 @@ def _integrate_one(self, coord, datetime_unit=None, cumulative=False):
83958389
if dim not in v.dims or cumulative:
83968390
variables[k] = v
83978391
coord_names.add(k)
8398-
else:
8399-
if k in self.data_vars and dim in v.dims:
8400-
coord_data = to_like_array(coord_var.data, like=v.data)
8401-
if _contains_datetime_like_objects(v):
8402-
v = datetime_to_numeric(v, datetime_unit=datetime_unit)
8403-
if cumulative:
8404-
integ = duck_array_ops.cumulative_trapezoid(
8405-
v.data, coord_data, axis=v.get_axis_num(dim)
8406-
)
8407-
v_dims = v.dims
8408-
else:
8409-
integ = duck_array_ops.trapz(
8410-
v.data, coord_data, axis=v.get_axis_num(dim)
8411-
)
8412-
v_dims = list(v.dims)
8413-
v_dims.remove(dim)
8414-
variables[k] = Variable(v_dims, integ)
8392+
elif k in self.data_vars and dim in v.dims:
8393+
coord_data = to_like_array(coord_var.data, like=v.data)
8394+
if _contains_datetime_like_objects(v):
8395+
v = datetime_to_numeric(v, datetime_unit=datetime_unit)
8396+
if cumulative:
8397+
integ = duck_array_ops.cumulative_trapezoid(
8398+
v.data, coord_data, axis=v.get_axis_num(dim)
8399+
)
8400+
v_dims = v.dims
84158401
else:
8416-
variables[k] = v
8402+
integ = duck_array_ops.trapz(
8403+
v.data, coord_data, axis=v.get_axis_num(dim)
8404+
)
8405+
v_dims = list(v.dims)
8406+
v_dims.remove(dim)
8407+
variables[k] = Variable(v_dims, integ)
8408+
else:
8409+
variables[k] = v
84178410
indexes = {k: v for k, v in self._indexes.items() if k in variables}
84188411
return self._replace_with_new_dims(
84198412
variables, coord_names=coord_names, indexes=indexes

0 commit comments

Comments
 (0)