Skip to content

Add an option to skip thread-unsafe tests #74

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 4 commits into from
Jun 10, 2025
Merged
Show file tree
Hide file tree
Changes from 2 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
10 changes: 9 additions & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -61,9 +61,11 @@ those fixtures are shared between threads.

## Features

- Two global CLI flags:
- Three global CLI flags:
- `--parallel-threads` to run a test suite in parallel
- `--iterations` to run multiple times in each thread
- `--skip-thread-unsafe` to skip running tests marked as or
detected to be thread-unsafe.

- Three corresponding markers:
- `pytest.mark.parallel_threads(n)` to mark a single test to run
Expand Down Expand Up @@ -239,6 +241,12 @@ def test_skip_if_parallel(num_parallel_threads):
...
```

You can skip tests marked as or detected to be thread-unsafe by passing
`--skip-thread-unsafe` in your pytest invocation. This is useful when running
pytest-run-parallel under [Thread Sanitizer](). Setting
`--skip-thread-unsafe=True` will avoid unnecessarily running tests where thread
sanitizer cannot detect races because the test is not parallelized.

Finally, the `thread_comp` fixture allows for parallel test debugging,
by providing an instance of `ThreadComparator`, whose `__call__` method
allows to check if all the values produced by all threads during an
Expand Down
31 changes: 24 additions & 7 deletions src/pytest_run_parallel/plugin.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,14 @@ def pytest_addoption(parser):
type=int,
help="Set the number of threads used to execute each test concurrently.",
)
parser.addoption(
"--skip-thread-unsafe",
action="store",
dest="skip_thread_unsafe",
help="Whether to skip running thread-unsafe tests",
type=bool,
default=False,
)
parser.addini(
"thread_unsafe_fixtures",
"list of thread-unsafe fixture names that cause a test to "
Expand Down Expand Up @@ -145,6 +153,8 @@ def pytest_itemcollected(item):
fixtures = getattr(item, "fixturenames", ())

n_iterations = item.config.option.iterations
skip_thread_unsafe = item.config.option.skip_thread_unsafe

m = item.get_closest_marker("iterations")
if m is not None:
n_iterations = int(m.args[0])
Expand All @@ -153,13 +163,13 @@ def pytest_itemcollected(item):
if n_workers > 1 and m is not None:
n_workers = 1
reason = m.kwargs.get("reason", None)
if reason is not None:
item.user_properties.append(("thread_unsafe_reason", reason))
if reason is None:
reason = "uses thread_unsafe marker"
item.user_properties.append(("thread_unsafe_reason", reason))
if skip_thread_unsafe:
item.add_marker(pytest.mark.skip(reason=f"Thread unsafe: {reason}"))
else:
item.user_properties.append(
("thread_unsafe_reason", "uses thread_unsafe marker")
)
item.add_marker(pytest.mark.parallel_threads(1))
item.add_marker(pytest.mark.parallel_threads(1))

if not hasattr(item, "obj"):
if hasattr(item, "_parallel_custom_item"):
Expand All @@ -183,14 +193,21 @@ def pytest_itemcollected(item):
]
skipped_functions = frozenset((".".join(x[:-1]), x[-1]) for x in skipped_functions)

skip_thread_unsafe = item.config.option.skip_thread_unsafe
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This is not needed, right?


if n_workers > 1:
thread_unsafe, thread_unsafe_reason = identify_thread_unsafe_nodes(
item.obj, skipped_functions
)
if thread_unsafe:
n_workers = 1
item.user_properties.append(("thread_unsafe_reason", thread_unsafe_reason))
item.add_marker(pytest.mark.parallel_threads(1))
if skip_thread_unsafe:
item.add_marker(
pytest.mark.skip(reason=f"Thread unsafe: {thread_unsafe_reason}")
)
else:
item.add_marker(pytest.mark.parallel_threads(1))

unsafe_fixtures = _thread_unsafe_fixtures | set(
item.config.getini("thread_unsafe_fixtures")
Expand Down
23 changes: 23 additions & 0 deletions tests/test_run_parallel.py
Original file line number Diff line number Diff line change
Expand Up @@ -589,6 +589,15 @@ def test_should_run_single_2(num_parallel_threads):
]
)

# check that skipping works too
result = pytester.runpytest(
"--parallel-threads=10", "--skip-thread-unsafe=True", "-v"
)

result.stdout.fnmatch_lines(
["*::test_should_run_single SKIPPED*", "*::test_should_run_single_2 SKIPPED*"]
)


def test_pytest_warns_detection(pytester):
# create a temporary pytest test module
Expand Down Expand Up @@ -636,6 +645,20 @@ def test_single_thread_warns_4(num_parallel_threads):
]
)

# check that skipping works too
result = pytester.runpytest(
"--parallel-threads=10", "--skip-thread-unsafe=True", "-v"
)

result.stdout.fnmatch_lines(
[
"*::test_single_thread_warns_1 SKIPPED*",
"*::test_single_thread_warns_2 SKIPPED*",
"*::test_single_thread_warns_3 SKIPPED*",
"*::test_single_thread_warns_4 SKIPPED*",
]
)


@pytest.mark.skipif(psutil is None, reason="psutil needs to be installed")
def test_auto_detect_cpus_psutil_affinity(
Expand Down