Skip to content

Commit aeed33d

Browse files
pb8ozulinx86
authored andcommitted
test: default to not honor the performance baseline checks
But add an option to check them Signed-off-by: Pablo Barbáchano <pablob@amazon.com>
1 parent 4022f53 commit aeed33d

File tree

4 files changed

+14
-3
lines changed

4 files changed

+14
-3
lines changed

tests/conftest.py

Lines changed: 9 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -125,6 +125,15 @@ def test_with_any_microvm(test_microvm_any):
125125
METRICS = get_metrics_logger()
126126

127127

128+
def pytest_addoption(parser):
129+
"""Pytest hook. Add command line options."""
130+
parser.addoption(
131+
"--perf-fail",
132+
action="store_true",
133+
help="fail the test if the baseline does not match",
134+
)
135+
136+
128137
@pytest.fixture(scope="function", autouse=True)
129138
def record_props(request, record_property):
130139
"""Decorate test results with additional properties.

tests/framework/stats/consumer.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -100,7 +100,7 @@ def _reset(self):
100100
"""Reset the results of this consumer, used in a previous exercise."""
101101
self._results = defaultdict()
102102

103-
def process(self, fail_fast=False) -> (dict, dict):
103+
def process(self, check=True, fail_fast=False) -> (dict, dict):
104104
"""Generate statistics as a dictionary."""
105105
self._validate()
106106
for ms_name in self._results:
@@ -127,7 +127,7 @@ def process(self, fail_fast=False) -> (dict, dict):
127127
}
128128

129129
pass_criteria = st_def.pass_criteria
130-
if pass_criteria:
130+
if check and pass_criteria:
131131
# if the statistic definition contains a criteria but the
132132
# corresponding baseline is not defined, the test should fail.
133133
if pass_criteria.baseline == {}:

tests/framework/stats/core.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -53,6 +53,7 @@ def __init__(self, name="<PLACEHOLDER>", iterations=1, custom={}):
5353
self._failure_aggregator = CoreException()
5454
self.metrics_test = None
5555
self.metrics = None
56+
self.check_baseline = True
5657

5758
def add_pipe(self, producer: Producer, consumer: Consumer, tag=None):
5859
"""Add a new producer-consumer pipe."""
@@ -83,7 +84,7 @@ def run_exercise(self, fail_fast=False) -> Result:
8384
self.metrics.flush()
8485

8586
try:
86-
stats, custom = pipe.consumer.process(fail_fast)
87+
stats, custom = pipe.consumer.process(check=self.check_baseline)
8788
except (ProcessingException, AssertionError) as err:
8889
self._failure_aggregator.add_row(f"Failed on '{tag}':")
8990
self._failure_aggregator.add_row(err)

tests/integration_tests/performance/conftest.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -85,6 +85,7 @@ def send_metrics(metrics, stats: core.Core):
8585
def st_core(metrics, results_file_dumper, guest_kernel, rootfs, request):
8686
"""Helper fixture to dump results and publish metrics"""
8787
stats = core.Core()
88+
stats.check_baseline = request.config.getoption("--perf-fail")
8889
stats.iterations = 1
8990
stats.custom = {
9091
"instance": global_props.instance,

0 commit comments

Comments
 (0)