-
Notifications
You must be signed in to change notification settings - Fork 539
Closed
Description
I observed this on commit 9fcf0ffe28db6ffe396fb13dbf01cbb925674187
, but not on the latest published version of rayon-core
.
To reproduce:
$ git clone git@github.com:rust-lang/rayon
$ cd rayon/rayon-core
$ MIRIFLAGS="-Zmiri-disable-isolation" cargo +miri miri test mutual_install
I then see:
test thread_pool::test::mutual_install ... warning: thread support is experimental and incomplete: weak memory effects are not emulated.
error: Undefined Behavior: Data race detected between Deallocate on Thread(id = 2) and Read on Thread(id = 1) at alloc167539 (current vector clock = VClock([47, 428, 824]), conflicting timestamp = VClock([0, 429, 824]))
--> rayon-core/src/registry.rs:506:5
|
506 | }
| ^ Data race detected between Deallocate on Thread(id = 2) and Read on Thread(id = 1) at alloc167539 (current vector clock = VClock([47, 428, 824]), conflicting timestamp = VClock([0, 429, 824]))
|
= help: this indicates a bug in the program: it performed an invalid operation, and caused Undefined Behavior
= help: see https://doc.rust-lang.org/nightly/reference/behavior-considered-undefined.html for further information
full error with backtrace
test thread_pool::test::mutual_install ... warning: thread support is experimental and incomplete: weak memory effects are not emulated.
error: Undefined Behavior: Data race detected between Deallocate on Thread(id = 2) and Read on Thread(id = 1) at alloc167539 (current vector clock = VClock([47, 428, 824]), conflicting timestamp = VClock([0, 429, 824]))
--> rayon-core/src/registry.rs:506:5
|
506 | }
| ^ Data race detected between Deallocate on Thread(id = 2) and Read on Thread(id = 1) at alloc167539 (current vector clock = VClock([47, 428, 824]), conflicting timestamp = VClock([0, 429, 824]))
|
= help: this indicates a bug in the program: it performed an invalid operation, and caused Undefined Behavior
= help: see https://doc.rust-lang.org/nightly/reference/behavior-considered-undefined.html for further information
= note: inside `registry::Registry::in_worker_cross::<[closure@rayon-core/src/thread_pool/mod.rs:110:33: 110:44], bool>` at rayon-core/src/registry.rs:506:5
note: inside `registry::Registry::in_worker::<[closure@rayon-core/src/thread_pool/mod.rs:110:33: 110:44], bool>` at rayon-core/src/registry.rs:446:17
--> rayon-core/src/registry.rs:446:17
|
446 | self.in_worker_cross(&*worker_thread, op)
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
note: inside `thread_pool::ThreadPool::install::<[closure@rayon-core/src/thread_pool/test.rs:163:27: 167:14], bool>` at rayon-core/src/thread_pool/mod.rs:110:9
--> rayon-core/src/thread_pool/mod.rs:110:9
|
110 | self.registry.in_worker(|_, _| op())
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
note: inside closure at rayon-core/src/thread_pool/test.rs:163:13
--> rayon-core/src/thread_pool/test.rs:163:13
|
163 | / pool1.install(|| {
164 | | // If they blocked on inter-pool installs, there would be no
165 | | // threads left to run this!
166 | | true
167 | | })
| |______________^
note: inside closure at rayon-core/src/thread_pool/mod.rs:110:40
--> rayon-core/src/thread_pool/mod.rs:110:40
|
110 | self.registry.in_worker(|_, _| op())
| ^^^^
note: inside closure at rayon-core/src/registry.rs:499:17
--> rayon-core/src/registry.rs:499:17
|
499 | op(&*worker_thread, true)
| ^^^^^^^^^^^^^^^^^^^^^^^^^
note: inside closure at rayon-core/src/job.rs:116:21
--> rayon-core/src/job.rs:116:21
|
116 | move || func(true)
| ^^^^^^^^^^
= note: inside `<std::panic::AssertUnwindSafe<[closure@rayon-core/src/job.rs:116:13: 116:31]> as std::ops::FnOnce<()>>::call_once` at rustc_src/src/panic/unwind_safe.rs:271:9
= note: inside `std::panicking::r#try::do_call::<std::panic::AssertUnwindSafe<[closure@rayon-core/src/job.rs:116:13: 116:31]>, bool>` at rustc_src/src/panicking.rs:492:40
= note: inside `std::panicking::r#try::<bool, std::panic::AssertUnwindSafe<[closure@rayon-core/src/job.rs:116:13: 116:31]>>` at rustc_src/src/panicking.rs:456:19
= note: inside `std::panic::catch_unwind::<std::panic::AssertUnwindSafe<[closure@rayon-core/src/job.rs:116:13: 116:31]>, bool>` at rustc_src/src/panic.rs:137:14
note: inside `unwind::halt_unwinding::<[closure@rayon-core/src/job.rs:116:13: 116:31], bool>` at rayon-core/src/unwind.rs:17:5
--> rayon-core/src/unwind.rs:17:5
|
17 | panic::catch_unwind(AssertUnwindSafe(func))
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
note: inside `<job::StackJob<latch::TickleLatch<latch::SpinLatch>, [closure@rayon-core/src/registry.rs:496:13: 500:14], bool> as job::Job>::execute` at rayon-core/src/job.rs:123:38
--> rayon-core/src/job.rs:123:38
|
123 | (*this.result.get()) = match unwind::halt_unwinding(call(func)) {
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
note: inside `job::JobRef::execute` at rayon-core/src/job.rs:60:9
--> rayon-core/src/job.rs:60:9
|
60 | (self.execute_fn)(self.pointer)
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
note: inside `registry::WorkerThread::execute` at rayon-core/src/registry.rs:730:9
--> rayon-core/src/registry.rs:730:9
|
730 | job.execute();
| ^^^^^^^^^^^^^
note: inside `registry::WorkerThread::wait_until_cold::<latch::CountLatch>` at rayon-core/src/registry.rs:711:17
--> rayon-core/src/registry.rs:711:17
|
711 | self.execute(job);
| ^^^^^^^^^^^^^^^^^
note: inside `registry::WorkerThread::wait_until::<latch::CountLatch>` at rayon-core/src/registry.rs:685:13
--> rayon-core/src/registry.rs:685:13
|
685 | self.wait_until_cold(latch);
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^
note: inside `registry::main_loop` at rayon-core/src/registry.rs:809:5
--> rayon-core/src/registry.rs:809:5
|
809 | worker_thread.wait_until(®istry.terminate_latch);
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
note: inside `registry::ThreadBuilder::run` at rayon-core/src/registry.rs:56:18
--> rayon-core/src/registry.rs:56:18
|
56 | unsafe { main_loop(self.worker, self.registry, self.index) }
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
note: inside closure at rayon-core/src/registry.rs:101:20
--> rayon-core/src/registry.rs:101:20
|
101 | b.spawn(|| thread.run())?;
| ^^^^^^^^^^^^
note: some details are omitted, run with `MIRIFLAGS=-Zmiri-backtrace=full` for a verbose backtrace
error: aborting due to previous error; 32 warnings emitted
error: test failed, to rerun pass '--lib'
Metadata
Metadata
Assignees
Labels
No labels