diff --git a/README.md b/README.md index 4e2eb94..cb1b3fc 100644 --- a/README.md +++ b/README.md @@ -61,9 +61,11 @@ those fixtures are shared between threads. ## Features -- Two global CLI flags: +- Three global CLI flags: - `--parallel-threads` to run a test suite in parallel - `--iterations` to run multiple times in each thread + - `--skip-thread-unsafe` to skip running tests marked as or + detected to be thread-unsafe. - Three corresponding markers: - `pytest.mark.parallel_threads(n)` to mark a single test to run @@ -239,6 +241,13 @@ def test_skip_if_parallel(num_parallel_threads): ... ``` +You can skip tests marked as or detected to be thread-unsafe by passing +`--skip-thread-unsafe` in your pytest invocation. This is useful when running +pytest-run-parallel under [Thread +Sanitizer](https://clang.llvm.org/docs/ThreadSanitizer.html). Setting +`--skip-thread-unsafe=True` will avoid unnecessarily running tests where thread +sanitizer cannot detect races because the test is not parallelized. + Finally, the `thread_comp` fixture allows for parallel test debugging, by providing an instance of `ThreadComparator`, whose `__call__` method allows to check if all the values produced by all threads during an diff --git a/src/pytest_run_parallel/plugin.py b/src/pytest_run_parallel/plugin.py index 206b5a9..35020bb 100644 --- a/src/pytest_run_parallel/plugin.py +++ b/src/pytest_run_parallel/plugin.py @@ -32,6 +32,14 @@ def pytest_addoption(parser): type=int, help="Set the number of threads used to execute each test concurrently.", ) + parser.addoption( + "--skip-thread-unsafe", + action="store", + dest="skip_thread_unsafe", + help="Whether to skip running thread-unsafe tests", + type=bool, + default=False, + ) parser.addini( "thread_unsafe_fixtures", "list of thread-unsafe fixture names that cause a test to " @@ -145,6 +153,8 @@ def pytest_itemcollected(item): fixtures = getattr(item, "fixturenames", ()) n_iterations = item.config.option.iterations + skip_thread_unsafe = item.config.option.skip_thread_unsafe + m = item.get_closest_marker("iterations") if m is not None: n_iterations = int(m.args[0]) @@ -153,13 +163,13 @@ def pytest_itemcollected(item): if n_workers > 1 and m is not None: n_workers = 1 reason = m.kwargs.get("reason", None) - if reason is not None: - item.user_properties.append(("thread_unsafe_reason", reason)) + if reason is None: + reason = "uses thread_unsafe marker" + item.user_properties.append(("thread_unsafe_reason", reason)) + if skip_thread_unsafe: + item.add_marker(pytest.mark.skip(reason=f"Thread unsafe: {reason}")) else: - item.user_properties.append( - ("thread_unsafe_reason", "uses thread_unsafe marker") - ) - item.add_marker(pytest.mark.parallel_threads(1)) + item.add_marker(pytest.mark.parallel_threads(1)) if not hasattr(item, "obj"): if hasattr(item, "_parallel_custom_item"): @@ -190,7 +200,12 @@ def pytest_itemcollected(item): if thread_unsafe: n_workers = 1 item.user_properties.append(("thread_unsafe_reason", thread_unsafe_reason)) - item.add_marker(pytest.mark.parallel_threads(1)) + if skip_thread_unsafe: + item.add_marker( + pytest.mark.skip(reason=f"Thread unsafe: {thread_unsafe_reason}") + ) + else: + item.add_marker(pytest.mark.parallel_threads(1)) unsafe_fixtures = _thread_unsafe_fixtures | set( item.config.getini("thread_unsafe_fixtures") diff --git a/tests/test_run_parallel.py b/tests/test_run_parallel.py index d8c16cd..ea51386 100644 --- a/tests/test_run_parallel.py +++ b/tests/test_run_parallel.py @@ -589,6 +589,15 @@ def test_should_run_single_2(num_parallel_threads): ] ) + # check that skipping works too + result = pytester.runpytest( + "--parallel-threads=10", "--skip-thread-unsafe=True", "-v" + ) + + result.stdout.fnmatch_lines( + ["*::test_should_run_single SKIPPED*", "*::test_should_run_single_2 SKIPPED*"] + ) + def test_pytest_warns_detection(pytester): # create a temporary pytest test module @@ -636,6 +645,20 @@ def test_single_thread_warns_4(num_parallel_threads): ] ) + # check that skipping works too + result = pytester.runpytest( + "--parallel-threads=10", "--skip-thread-unsafe=True", "-v" + ) + + result.stdout.fnmatch_lines( + [ + "*::test_single_thread_warns_1 SKIPPED*", + "*::test_single_thread_warns_2 SKIPPED*", + "*::test_single_thread_warns_3 SKIPPED*", + "*::test_single_thread_warns_4 SKIPPED*", + ] + ) + @pytest.mark.skipif(psutil is None, reason="psutil needs to be installed") def test_auto_detect_cpus_psutil_affinity(