Skip to content

Commit db12b0d

Browse files
jhammanandersy005
andauthored
(chore) min versions bump (#8022)
* chore: bump minimum versions in min-all-deps.yml * docs * bump bare minimum * chore: cleanup deprecations following minimum version bump * more cleanup --------- Co-authored-by: Anderson Banihirwe <axbanihirwe@ualr.edu>
1 parent bb501ba commit db12b0d

20 files changed

+107
-188
lines changed

ci/min_deps_check.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
#!/usr/bin/env python
22
"""Fetch from conda database all available versions of the xarray dependencies and their
3-
publication date. Compare it against requirements/py37-min-all-deps.yml to verify the
3+
publication date. Compare it against requirements/min-all-deps.yml to verify the
44
policy on obsolete dependencies is being followed. Print a pretty report :)
55
"""
66
import itertools
@@ -46,7 +46,7 @@ def warning(msg: str) -> None:
4646

4747

4848
def parse_requirements(fname) -> Iterator[tuple[str, int, int, int | None]]:
49-
"""Load requirements/py37-min-all-deps.yml
49+
"""Load requirements/min-all-deps.yml
5050
5151
Yield (package name, major version, minor version, [patch version])
5252
"""

ci/requirements/bare-minimum.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -11,6 +11,6 @@ dependencies:
1111
- pytest-env
1212
- pytest-xdist
1313
- pytest-timeout
14-
- numpy=1.21
14+
- numpy=1.22
1515
- packaging=21.3
1616
- pandas=1.4

ci/requirements/min-all-deps.yml

Lines changed: 17 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -8,48 +8,48 @@ dependencies:
88
# When upgrading python, numpy, or pandas, must also change
99
# doc/user-guide/installing.rst, doc/user-guide/plotting.rst and setup.py.
1010
- python=3.9
11-
- boto3=1.20
11+
- boto3=1.24
1212
- bottleneck=1.3
1313
- cartopy=0.20
1414
- cdms2=3.1
15-
- cftime=1.5
15+
- cftime=1.6
1616
- coveralls
17-
- dask-core=2022.1
18-
- distributed=2022.1
17+
- dask-core=2022.7
18+
- distributed=2022.7
1919
- flox=0.5
20-
- h5netcdf=0.13
20+
- h5netcdf=1.0
2121
# h5py and hdf5 tend to cause conflicts
2222
# for e.g. hdf5 1.12 conflicts with h5py=3.1
2323
# prioritize bumping other packages instead
2424
- h5py=3.6
2525
- hdf5=1.12
2626
- hypothesis
27-
- iris=3.1
28-
- lxml=4.7 # Optional dep of pydap
27+
- iris=3.2
28+
- lxml=4.9 # Optional dep of pydap
2929
- matplotlib-base=3.5
3030
- nc-time-axis=1.4
3131
# netcdf follows a 1.major.minor[.patch] convention
3232
# (see https://github.com/Unidata/netcdf4-python/issues/1090)
33-
- netcdf4=1.5.7
33+
- netcdf4=1.6.0
3434
- numba=0.55
35-
- numpy=1.21
35+
- numpy=1.22
3636
- packaging=21.3
3737
- pandas=1.4
38-
- pint=0.18
38+
- pint=0.19
3939
- pip
4040
- pseudonetcdf=3.2
41-
- pydap=3.2
41+
- pydap=3.3
4242
- pytest
4343
- pytest-cov
4444
- pytest-env
4545
- pytest-xdist
4646
- pytest-timeout
47-
- rasterio=1.2
48-
- scipy=1.7
47+
- rasterio=1.3
48+
- scipy=1.8
4949
- seaborn=0.11
5050
- sparse=0.13
51-
- toolz=0.11
52-
- typing_extensions=4.0
53-
- zarr=2.10
51+
- toolz=0.12
52+
- typing_extensions=4.3
53+
- zarr=2.12
5454
- pip:
55-
- numbagg==0.1
55+
- numbagg==0.2.1

doc/getting-started-guide/installing.rst

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,7 @@ Required dependencies
77
---------------------
88

99
- Python (3.9 or later)
10-
- `numpy <https://www.numpy.org/>`__ (1.21 or later)
10+
- `numpy <https://www.numpy.org/>`__ (1.22 or later)
1111
- `packaging <https://packaging.pypa.io/en/latest/#>`__ (21.3 or later)
1212
- `pandas <https://pandas.pydata.org/>`__ (1.4 or later)
1313

doc/whats-new.rst

Lines changed: 23 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -47,6 +47,29 @@ New Features
4747
Breaking changes
4848
~~~~~~~~~~~~~~~~
4949

50+
- The minimum versions of some dependencies were changed (:pull:`8022`):
51+
52+
===================== ========= ========
53+
Package Old New
54+
===================== ========= ========
55+
boto3 1.20 1.24
56+
cftime 1.5 1.6
57+
dask-core 2022.1 2022.7
58+
distributed 2022.1 2022.7
59+
hfnetcdf 0.13 1.0
60+
iris 3.1 3.2
61+
lxml 4.7 4.9
62+
netcdf4 1.5.7 1.6.0
63+
numpy 1.21 1.22
64+
pint 0.18 0.19
65+
pydap 3.2 3.3
66+
rasterio 1.2 1.3
67+
scipy 1.7 1.8
68+
toolz 0.11 0.12
69+
typing_extensions 4.0 4.3
70+
zarr 2.10 2.12
71+
numbagg 0.1 0.2.1
72+
===================== ========= ========
5073

5174
Deprecations
5275
~~~~~~~~~~~~

xarray/backends/h5netcdf_.py

Lines changed: 5 additions & 21 deletions
Original file line numberDiff line numberDiff line change
@@ -6,8 +6,6 @@
66
from collections.abc import Iterable
77
from typing import TYPE_CHECKING, Any
88

9-
from packaging.version import Version
10-
119
from xarray.backends.common import (
1210
BACKEND_ENTRYPOINTS,
1311
BackendEntrypoint,
@@ -233,28 +231,14 @@ def get_attrs(self):
233231
return FrozenDict(_read_attributes(self.ds))
234232

235233
def get_dimensions(self):
236-
import h5netcdf
237-
238-
if Version(h5netcdf.__version__) >= Version("0.14.0.dev0"):
239-
return FrozenDict((k, len(v)) for k, v in self.ds.dimensions.items())
240-
else:
241-
return self.ds.dimensions
234+
return FrozenDict((k, len(v)) for k, v in self.ds.dimensions.items())
242235

243236
def get_encoding(self):
244-
import h5netcdf
245-
246-
if Version(h5netcdf.__version__) >= Version("0.14.0.dev0"):
247-
return {
248-
"unlimited_dims": {
249-
k for k, v in self.ds.dimensions.items() if v.isunlimited()
250-
}
251-
}
252-
else:
253-
return {
254-
"unlimited_dims": {
255-
k for k, v in self.ds.dimensions.items() if v is None
256-
}
237+
return {
238+
"unlimited_dims": {
239+
k for k, v in self.ds.dimensions.items() if v.isunlimited()
257240
}
241+
}
258242

259243
def set_dimension(self, name, length, is_unlimited=False):
260244
_ensure_no_forward_slash_in_name(name)

xarray/backends/pydap_.py

Lines changed: 4 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,6 @@
44
from typing import TYPE_CHECKING, Any
55

66
import numpy as np
7-
from packaging.version import Version
87

98
from xarray.backends.common import (
109
BACKEND_ENTRYPOINTS,
@@ -123,11 +122,10 @@ def open(
123122
"output_grid": output_grid or True,
124123
"timeout": timeout,
125124
}
126-
if Version(pydap.lib.__version__) >= Version("3.3.0"):
127-
if verify is not None:
128-
kwargs.update({"verify": verify})
129-
if user_charset is not None:
130-
kwargs.update({"user_charset": user_charset})
125+
if verify is not None:
126+
kwargs.update({"verify": verify})
127+
if user_charset is not None:
128+
kwargs.update({"user_charset": user_charset})
131129
ds = pydap.client.open_url(**kwargs)
132130
return cls(ds)
133131

xarray/coding/cftimeindex.py

Lines changed: 1 addition & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -470,13 +470,9 @@ def get_loc(self, key):
470470
else:
471471
return super().get_loc(key)
472472

473-
def _maybe_cast_slice_bound(self, label, side, kind=None):
473+
def _maybe_cast_slice_bound(self, label, side):
474474
"""Adapted from
475475
pandas.tseries.index.DatetimeIndex._maybe_cast_slice_bound
476-
477-
Note that we have never used the kind argument in CFTimeIndex and it is
478-
deprecated as of pandas version 1.3.0. It exists only for compatibility
479-
reasons. We can remove it when our minimum version of pandas is 1.3.0.
480476
"""
481477
if not isinstance(label, str):
482478
return label

xarray/core/combine.py

Lines changed: 0 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,6 @@
11
from __future__ import annotations
22

33
import itertools
4-
import warnings
54
from collections import Counter
65
from collections.abc import Iterable, Sequence
76
from typing import TYPE_CHECKING, Literal, Union
@@ -653,7 +652,6 @@ def _combine_single_variable_hypercube(
653652
return concatenated
654653

655654

656-
# TODO remove empty list default param after version 0.21, see PR4696
657655
def combine_by_coords(
658656
data_objects: Iterable[Dataset | DataArray] = [],
659657
compat: CompatOptions = "no_conflicts",
@@ -662,7 +660,6 @@ def combine_by_coords(
662660
fill_value: object = dtypes.NA,
663661
join: JoinOptions = "outer",
664662
combine_attrs: CombineAttrsOptions = "no_conflicts",
665-
datasets: Iterable[Dataset] | None = None,
666663
) -> Dataset | DataArray:
667664
"""
668665
@@ -760,8 +757,6 @@ def combine_by_coords(
760757
If a callable, it must expect a sequence of ``attrs`` dicts and a context object
761758
as its only parameters.
762759
763-
datasets : Iterable of Datasets
764-
765760
Returns
766761
-------
767762
combined : xarray.Dataset or xarray.DataArray
@@ -918,14 +913,6 @@ def combine_by_coords(
918913
DataArrays or Datasets, a ValueError will be raised (as this is an ambiguous operation).
919914
"""
920915

921-
# TODO remove after version 0.21, see PR4696
922-
if datasets is not None:
923-
warnings.warn(
924-
"The datasets argument has been renamed to `data_objects`."
925-
" From 0.21 on passing a value for datasets will raise an error."
926-
)
927-
data_objects = datasets
928-
929916
if not data_objects:
930917
return Dataset()
931918

xarray/core/dataarray.py

Lines changed: 8 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -5036,15 +5036,15 @@ def quantile(
50365036
desired quantile lies between two data points. The options sorted by their R
50375037
type as summarized in the H&F paper [1]_ are:
50385038
5039-
1. "inverted_cdf" (*)
5040-
2. "averaged_inverted_cdf" (*)
5041-
3. "closest_observation" (*)
5042-
4. "interpolated_inverted_cdf" (*)
5043-
5. "hazen" (*)
5044-
6. "weibull" (*)
5039+
1. "inverted_cdf"
5040+
2. "averaged_inverted_cdf"
5041+
3. "closest_observation"
5042+
4. "interpolated_inverted_cdf"
5043+
5. "hazen"
5044+
6. "weibull"
50455045
7. "linear" (default)
5046-
8. "median_unbiased" (*)
5047-
9. "normal_unbiased" (*)
5046+
8. "median_unbiased"
5047+
9. "normal_unbiased"
50485048
50495049
The first three methods are discontiuous. The following discontinuous
50505050
variations of the default "linear" (7.) option are also available:
@@ -5058,8 +5058,6 @@ def quantile(
50585058
was previously called "interpolation", renamed in accordance with numpy
50595059
version 1.22.0.
50605060
5061-
(*) These methods require numpy version 1.22 or newer.
5062-
50635061
keep_attrs : bool or None, optional
50645062
If True, the dataset's attributes (`attrs`) will be copied from
50655063
the original object to the new one. If False (default), the new

0 commit comments

Comments
 (0)