Skip to content

Commit 18dd1f9

Browse files
dcherianandersy005
authored andcommitted
Fix benchmark CI (#9013)
* [skip-ci] Fix benchmark CI * [skip-ci] reduce warnings * Fix indexing benchmark
1 parent 2046049 commit 18dd1f9

File tree

4 files changed

+21
-15
lines changed

4 files changed

+21
-15
lines changed

.github/workflows/benchmarks.yml

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -28,8 +28,11 @@ jobs:
2828
environment-name: xarray-tests
2929
cache-environment: true
3030
cache-environment-key: "${{runner.os}}-${{runner.arch}}-py${{env.PYTHON_VERSION}}-${{env.TODAY}}-${{hashFiles(env.CONDA_ENV_FILE)}}-benchmark"
31+
# add "build" because of https://github.com/airspeed-velocity/asv/issues/1385
3132
create-args: >-
3233
asv
34+
build
35+
mamba
3336
3437
3538
- name: Run benchmarks
@@ -47,9 +50,6 @@ jobs:
4750
asv machine --yes
4851
echo "Baseline: ${{ github.event.pull_request.base.sha }} (${{ github.event.pull_request.base.label }})"
4952
echo "Contender: ${GITHUB_SHA} (${{ github.event.pull_request.head.label }})"
50-
# Use mamba for env creation
51-
# export CONDA_EXE=$(which mamba)
52-
export CONDA_EXE=$(which conda)
5353
# Run benchmarks for current commit against base
5454
ASV_OPTIONS="--split --show-stderr --factor $ASV_FACTOR"
5555
asv continuous $ASV_OPTIONS ${{ github.event.pull_request.base.sha }} ${GITHUB_SHA} \

asv_bench/asv.conf.json

Lines changed: 8 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -29,7 +29,7 @@
2929
// If missing or the empty string, the tool will be automatically
3030
// determined by looking for tools on the PATH environment
3131
// variable.
32-
"environment_type": "conda",
32+
"environment_type": "mamba",
3333
"conda_channels": ["conda-forge"],
3434

3535
// timeout in seconds for installing any dependencies in environment
@@ -41,7 +41,7 @@
4141

4242
// The Pythons you'd like to test against. If not provided, defaults
4343
// to the current version of Python used to run `asv`.
44-
"pythons": ["3.10"],
44+
"pythons": ["3.11"],
4545

4646
// The matrix of dependencies to test. Each key is the name of a
4747
// package (in PyPI) and the values are version numbers. An empty
@@ -72,8 +72,12 @@
7272
"sparse": [""],
7373
"cftime": [""]
7474
},
75-
76-
75+
// fix for bad builds
76+
// https://github.com/airspeed-velocity/asv/issues/1389#issuecomment-2076131185
77+
"build_command": [
78+
"python -m build",
79+
"python -mpip wheel --no-deps --no-build-isolation --no-index -w {build_cache_dir} {build_dir}"
80+
],
7781
// Combinations of libraries/python versions can be excluded/included
7882
// from the set to test. Each entry is a dictionary containing additional
7983
// key-value pairs to include/exclude.

asv_bench/benchmarks/groupby.py

Lines changed: 9 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -68,6 +68,7 @@ def setup(self, *args, **kwargs):
6868
self.ds2d_mean = self.ds2d.groupby("b").mean().compute()
6969

7070

71+
# TODO: These don't work now because we are calling `.compute` explicitly.
7172
class GroupByPandasDataFrame(GroupBy):
7273
"""Run groupby tests using pandas DataFrame."""
7374

@@ -111,11 +112,11 @@ def setup(self, *args, **kwargs):
111112
{
112113
"b": ("time", np.arange(365.0 * 24)),
113114
},
114-
coords={"time": pd.date_range("2001-01-01", freq="H", periods=365 * 24)},
115+
coords={"time": pd.date_range("2001-01-01", freq="h", periods=365 * 24)},
115116
)
116117
self.ds2d = self.ds1d.expand_dims(z=10)
117-
self.ds1d_mean = self.ds1d.resample(time="48H").mean()
118-
self.ds2d_mean = self.ds2d.resample(time="48H").mean()
118+
self.ds1d_mean = self.ds1d.resample(time="48h").mean()
119+
self.ds2d_mean = self.ds2d.resample(time="48h").mean()
119120

120121
@parameterized(["ndim"], [(1, 2)])
121122
def time_init(self, ndim):
@@ -127,15 +128,15 @@ def time_init(self, ndim):
127128
def time_agg_small_num_groups(self, method, ndim, use_flox):
128129
ds = getattr(self, f"ds{ndim}d")
129130
with xr.set_options(use_flox=use_flox):
130-
getattr(ds.resample(time="3M"), method)().compute()
131+
getattr(ds.resample(time="3ME"), method)().compute()
131132

132133
@parameterized(
133134
["method", "ndim", "use_flox"], [("sum", "mean"), (1, 2), (True, False)]
134135
)
135136
def time_agg_large_num_groups(self, method, ndim, use_flox):
136137
ds = getattr(self, f"ds{ndim}d")
137138
with xr.set_options(use_flox=use_flox):
138-
getattr(ds.resample(time="48H"), method)().compute()
139+
getattr(ds.resample(time="48h"), method)().compute()
139140

140141

141142
class ResampleDask(Resample):
@@ -154,13 +155,13 @@ def setup(self, *args, **kwargs):
154155
},
155156
coords={
156157
"time": xr.date_range(
157-
"2001-01-01", freq="H", periods=365 * 24, calendar="noleap"
158+
"2001-01-01", freq="h", periods=365 * 24, calendar="noleap"
158159
)
159160
},
160161
)
161162
self.ds2d = self.ds1d.expand_dims(z=10)
162-
self.ds1d_mean = self.ds1d.resample(time="48H").mean()
163-
self.ds2d_mean = self.ds2d.resample(time="48H").mean()
163+
self.ds1d_mean = self.ds1d.resample(time="48h").mean()
164+
self.ds2d_mean = self.ds2d.resample(time="48h").mean()
164165

165166

166167
@parameterized(["use_cftime", "use_flox"], [[True, False], [True, False]])

asv_bench/benchmarks/indexing.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -19,6 +19,7 @@
1919
}
2020

2121
basic_assignment_values = {
22+
"1scalar": 0,
2223
"1slice": xr.DataArray(randn((3, ny), frac_nan=0.1), dims=["x", "y"]),
2324
"1slice-1scalar": xr.DataArray(randn(int(ny / 3) + 1, frac_nan=0.1), dims=["y"]),
2425
"2slicess-1scalar": xr.DataArray(

0 commit comments

Comments
 (0)