Skip to content

Commit a0beafc

Browse files
committed
Add an option to skip thread-unsafe tests
1 parent 120db6e commit a0beafc

File tree

3 files changed

+55
-8
lines changed

3 files changed

+55
-8
lines changed

README.md

Lines changed: 9 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -61,9 +61,11 @@ those fixtures are shared between threads.
6161

6262
## Features
6363

64-
- Two global CLI flags:
64+
- Three global CLI flags:
6565
- `--parallel-threads` to run a test suite in parallel
6666
- `--iterations` to run multiple times in each thread
67+
- `--skip-thread-unsafe` to skip running tests marked as or
68+
detected to be thread-unsafe.
6769

6870
- Three corresponding markers:
6971
- `pytest.mark.parallel_threads(n)` to mark a single test to run
@@ -239,6 +241,12 @@ def test_skip_if_parallel(num_parallel_threads):
239241
...
240242
```
241243

244+
You can skip tests marked as or detected to be thread-unsafe by passing
245+
`--skip-thread-unsafe` in your pytest invocation. This is useful when running
246+
pytest-run-parallel under [Thread Sanitizer](). Setting
247+
`--skip-thread-unsafe=True` will avoid unnecessarily running tests where thread
248+
sanitizer cannot detect races because the test is not parallelized.
249+
242250
Finally, the `thread_comp` fixture allows for parallel test debugging,
243251
by providing an instance of `ThreadComparator`, whose `__call__` method
244252
allows to check if all the values produced by all threads during an

src/pytest_run_parallel/plugin.py

Lines changed: 24 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -32,6 +32,14 @@ def pytest_addoption(parser):
3232
type=int,
3333
help="Set the number of threads used to execute each test concurrently.",
3434
)
35+
parser.addoption(
36+
"--skip-thread-unsafe",
37+
action="store",
38+
dest="skip_thread_unsafe",
39+
help="Whether to skip running thread-unsafe tests",
40+
type=bool,
41+
default=False,
42+
)
3543
parser.addini(
3644
"thread_unsafe_fixtures",
3745
"list of thread-unsafe fixture names that cause a test to "
@@ -145,6 +153,8 @@ def pytest_itemcollected(item):
145153
fixtures = getattr(item, "fixturenames", ())
146154

147155
n_iterations = item.config.option.iterations
156+
skip_thread_unsafe = item.config.option.skip_thread_unsafe
157+
148158
m = item.get_closest_marker("iterations")
149159
if m is not None:
150160
n_iterations = int(m.args[0])
@@ -153,13 +163,14 @@ def pytest_itemcollected(item):
153163
if n_workers > 1 and m is not None:
154164
n_workers = 1
155165
reason = m.kwargs.get("reason", None)
156-
if reason is not None:
157-
item.user_properties.append(("thread_unsafe_reason", reason))
166+
if reason is None:
167+
reason = "uses thread_unsafe marker"
168+
item.user_properties.append(("thread_unsafe_reason", reason))
169+
if skip_thread_unsafe:
170+
item.add_marker(pytest.mark.skip(
171+
reason=f"Thread unsafe: {reason}"))
158172
else:
159-
item.user_properties.append(
160-
("thread_unsafe_reason", "uses thread_unsafe marker")
161-
)
162-
item.add_marker(pytest.mark.parallel_threads(1))
173+
item.add_marker(pytest.mark.parallel_threads(1))
163174

164175
if not hasattr(item, "obj"):
165176
if hasattr(item, "_parallel_custom_item"):
@@ -183,14 +194,20 @@ def pytest_itemcollected(item):
183194
]
184195
skipped_functions = frozenset((".".join(x[:-1]), x[-1]) for x in skipped_functions)
185196

197+
skip_thread_unsafe = item.config.option.skip_thread_unsafe
198+
186199
if n_workers > 1:
187200
thread_unsafe, thread_unsafe_reason = identify_thread_unsafe_nodes(
188201
item.obj, skipped_functions
189202
)
190203
if thread_unsafe:
191204
n_workers = 1
192205
item.user_properties.append(("thread_unsafe_reason", thread_unsafe_reason))
193-
item.add_marker(pytest.mark.parallel_threads(1))
206+
if skip_thread_unsafe:
207+
item.add_marker(pytest.mark.skip(
208+
reason=f"Thread unsafe: {thread_unsafe_reason}"))
209+
else:
210+
item.add_marker(pytest.mark.parallel_threads(1))
194211

195212
unsafe_fixtures = _thread_unsafe_fixtures | set(
196213
item.config.getini("thread_unsafe_fixtures")

tests/test_run_parallel.py

Lines changed: 22 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -589,6 +589,16 @@ def test_should_run_single_2(num_parallel_threads):
589589
]
590590
)
591591

592+
# check that skipping works too
593+
result = pytester.runpytest(
594+
"--parallel-threads=10", "--skip-thread-unsafe=True", "-v")
595+
596+
result.stdout.fnmatch_lines([
597+
"*::test_should_run_single SKIPPED*",
598+
"*::test_should_run_single_2 SKIPPED*"
599+
]
600+
)
601+
592602

593603
def test_pytest_warns_detection(pytester):
594604
# create a temporary pytest test module
@@ -636,6 +646,18 @@ def test_single_thread_warns_4(num_parallel_threads):
636646
]
637647
)
638648

649+
# check that skipping works too
650+
result = pytester.runpytest(
651+
"--parallel-threads=10", "--skip-thread-unsafe=True", "-v")
652+
653+
result.stdout.fnmatch_lines(
654+
[
655+
"*::test_single_thread_warns_1 SKIPPED*",
656+
"*::test_single_thread_warns_2 SKIPPED*",
657+
"*::test_single_thread_warns_3 SKIPPED*",
658+
"*::test_single_thread_warns_4 SKIPPED*",
659+
]
660+
)
639661

640662
@pytest.mark.skipif(psutil is None, reason="psutil needs to be installed")
641663
def test_auto_detect_cpus_psutil_affinity(

0 commit comments

Comments
 (0)