From 09919c2b596b19ad36850b77d8f6ea00b3b60612 Mon Sep 17 00:00:00 2001 From: Ralf Jung Date: Wed, 7 Nov 2018 14:56:25 +0100 Subject: [PATCH 01/15] Retag is the only operation that generates new tags --- .gitignore | 1 - src/fn_call.rs | 2 +- src/intrinsic.rs | 22 +-- src/lib.rs | 58 ++++---- src/stacked_borrows.rs | 127 +++++++++++------- .../stacked_borrows/buggy_as_mut_slice.rs | 8 +- .../stacked_borrows/buggy_split_at_mut.rs | 2 +- .../static_memory_modification.rs | 3 + .../stacked_borrows/transmute-is-no-escape.rs | 12 ++ .../stacked_borrows/unescaped_local.rs | 8 +- tests/run-pass/stacked-borrows.rs | 20 ++- 11 files changed, 161 insertions(+), 102 deletions(-) create mode 100644 tests/compile-fail/stacked_borrows/transmute-is-no-escape.rs diff --git a/.gitignore b/.gitignore index dcca7ec10a..ca23de4208 100644 --- a/.gitignore +++ b/.gitignore @@ -2,6 +2,5 @@ target /doc tex/*/out *.dot -*.mir *.rs.bk Cargo.lock diff --git a/src/fn_call.rs b/src/fn_call.rs index d9bf049df7..150cf7402a 100644 --- a/src/fn_call.rs +++ b/src/fn_call.rs @@ -555,7 +555,7 @@ impl<'a, 'mir, 'tcx: 'mir + 'a> EvalContextExt<'tcx, 'mir> for super::MiriEvalCo } "pthread_attr_getstack" => { // second argument is where we are supposed to write the stack size - let ptr = self.ref_to_mplace(self.read_immediate(args[1])?)?; + let ptr = self.deref_operand(args[1])?; let stackaddr = Scalar::from_int(0x80000, args[1].layout.size); // just any address self.write_scalar(stackaddr, ptr.into())?; // return 0 diff --git a/src/intrinsic.rs b/src/intrinsic.rs index 20402f4a23..e23cadfcaf 100644 --- a/src/intrinsic.rs +++ b/src/intrinsic.rs @@ -59,7 +59,7 @@ impl<'a, 'mir, 'tcx> EvalContextExt<'tcx> for super::MiriEvalContext<'a, 'mir, ' "atomic_load_relaxed" | "atomic_load_acq" | "volatile_load" => { - let ptr = self.ref_to_mplace(self.read_immediate(args[0])?)?; + let ptr = self.deref_operand(args[0])?; let val = self.read_scalar(ptr.into())?; // make sure it fits into a scalar; otherwise it cannot be atomic self.write_scalar(val, dest)?; } @@ -68,7 +68,7 @@ impl<'a, 'mir, 'tcx> EvalContextExt<'tcx> for super::MiriEvalContext<'a, 'mir, ' "atomic_store_relaxed" | "atomic_store_rel" | "volatile_store" => { - let ptr = self.ref_to_mplace(self.read_immediate(args[0])?)?; + let ptr = self.deref_operand(args[0])?; let val = self.read_scalar(args[1])?; // make sure it fits into a scalar; otherwise it cannot be atomic self.write_scalar(val, ptr.into())?; } @@ -78,7 +78,7 @@ impl<'a, 'mir, 'tcx> EvalContextExt<'tcx> for super::MiriEvalContext<'a, 'mir, ' } _ if intrinsic_name.starts_with("atomic_xchg") => { - let ptr = self.ref_to_mplace(self.read_immediate(args[0])?)?; + let ptr = self.deref_operand(args[0])?; let new = self.read_scalar(args[1])?; let old = self.read_scalar(ptr.into())?; self.write_scalar(old, dest)?; // old value is returned @@ -86,10 +86,10 @@ impl<'a, 'mir, 'tcx> EvalContextExt<'tcx> for super::MiriEvalContext<'a, 'mir, ' } _ if intrinsic_name.starts_with("atomic_cxchg") => { - let ptr = self.ref_to_mplace(self.read_immediate(args[0])?)?; - let expect_old = self.read_immediate(args[1])?; // read as value for the sake of `binary_op_imm()` + let ptr = self.deref_operand(args[0])?; + let expect_old = self.read_immediate(args[1])?; // read as immediate for the sake of `binary_op_imm()` let new = self.read_scalar(args[2])?; - let old = self.read_immediate(ptr.into())?; // read as value for the sake of `binary_op_imm()` + let old = self.read_immediate(ptr.into())?; // read as immediate for the sake of `binary_op_imm()` // binary_op_imm will bail if either of them is not a scalar let (eq, _) = self.binary_op_imm(mir::BinOp::Eq, old, expect_old)?; let res = Immediate::ScalarPair(old.to_scalar_or_undef(), eq.into()); @@ -125,7 +125,7 @@ impl<'a, 'mir, 'tcx> EvalContextExt<'tcx> for super::MiriEvalContext<'a, 'mir, ' "atomic_xsub_rel" | "atomic_xsub_acqrel" | "atomic_xsub_relaxed" => { - let ptr = self.ref_to_mplace(self.read_immediate(args[0])?)?; + let ptr = self.deref_operand(args[0])?; if !ptr.layout.ty.is_integral() { return err!(Unimplemented(format!("Atomic arithmetic operations only work on integer types"))); } @@ -167,7 +167,7 @@ impl<'a, 'mir, 'tcx> EvalContextExt<'tcx> for super::MiriEvalContext<'a, 'mir, ' } "discriminant_value" => { - let place = self.ref_to_mplace(self.read_immediate(args[0])?)?; + let place = self.deref_operand(args[0])?; let discr_val = self.read_discriminant(place.into())?.0; self.write_scalar(Scalar::from_uint(discr_val, dest.layout.size), dest)?; } @@ -279,7 +279,7 @@ impl<'a, 'mir, 'tcx> EvalContextExt<'tcx> for super::MiriEvalContext<'a, 'mir, ' } "move_val_init" => { - let ptr = self.ref_to_mplace(self.read_immediate(args[0])?)?; + let ptr = self.deref_operand(args[0])?; self.copy_op(args[1], ptr.into())?; } @@ -347,7 +347,7 @@ impl<'a, 'mir, 'tcx> EvalContextExt<'tcx> for super::MiriEvalContext<'a, 'mir, ' } "size_of_val" => { - let mplace = self.ref_to_mplace(self.read_immediate(args[0])?)?; + let mplace = self.deref_operand(args[0])?; let (size, _) = self.size_and_align_of_mplace(mplace)? .expect("size_of_val called on extern type"); let ptr_size = self.pointer_size(); @@ -359,7 +359,7 @@ impl<'a, 'mir, 'tcx> EvalContextExt<'tcx> for super::MiriEvalContext<'a, 'mir, ' "min_align_of_val" | "align_of_val" => { - let mplace = self.ref_to_mplace(self.read_immediate(args[0])?)?; + let mplace = self.deref_operand(args[0])?; let (_, align) = self.size_and_align_of_mplace(mplace)? .expect("size_of_val called on extern type"); let ptr_size = self.pointer_size(); diff --git a/src/lib.rs b/src/lib.rs index b7deb8ee11..e8691409b1 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -296,7 +296,6 @@ impl<'a, 'mir, 'tcx> Machine<'a, 'mir, 'tcx> for Evaluator<'tcx> { type AllocExtra = stacked_borrows::Stacks; type PointerTag = Borrow; - const ENABLE_PTR_TRACKING_HOOKS: bool = true; type MemoryMap = MonoHashMap, Allocation)>; @@ -446,26 +445,6 @@ impl<'a, 'mir, 'tcx> Machine<'a, 'mir, 'tcx> for Evaluator<'tcx> { Cow::Owned(alloc) } - #[inline(always)] - fn tag_reference( - ecx: &mut EvalContext<'a, 'mir, 'tcx, Self>, - place: MPlaceTy<'tcx, Borrow>, - mutability: Option, - ) -> EvalResult<'tcx, Scalar> { - let (size, _) = ecx.size_and_align_of_mplace(place)? - // for extern types, just cover what we can - .unwrap_or_else(|| place.layout.size_and_align()); - if !ecx.machine.validate || size == Size::ZERO { - // No tracking - Ok(place.ptr) - } else { - let ptr = place.ptr.to_ptr()?; - let tag = ecx.tag_reference(place, size, mutability.into())?; - Ok(Scalar::Ptr(Pointer::new_with_tag(ptr.alloc_id, ptr.offset, tag))) - } - } - - #[inline(always)] fn tag_dereference( ecx: &EvalContext<'a, 'mir, 'tcx, Self>, place: MPlaceTy<'tcx, Borrow>, @@ -478,7 +457,7 @@ impl<'a, 'mir, 'tcx> Machine<'a, 'mir, 'tcx> for Evaluator<'tcx> { // No tracking Ok(place.ptr) } else { - let ptr = place.ptr.to_ptr()?; + let ptr = place.ptr.to_ptr()?; // assert this is not a scalar let tag = ecx.tag_dereference(place, size, mutability.into())?; Ok(Scalar::Ptr(Pointer::new_with_tag(ptr.alloc_id, ptr.offset, tag))) } @@ -499,6 +478,31 @@ impl<'a, 'mir, 'tcx> Machine<'a, 'mir, 'tcx> for Evaluator<'tcx> { } } + #[inline] + fn escape_to_raw( + ecx: &mut EvalContext<'a, 'mir, 'tcx, Self>, + ptr: OpTy<'tcx, Self::PointerTag>, + ) -> EvalResult<'tcx> { + // It is tempting to check the type here, but drop glue does EscapeToRaw + // on a raw pointer. + // This is deliberately NOT `deref_operand` as we do not want `tag_dereference` + // to be called! That would kill the original tag if we got a raw ptr. + let place = ecx.ref_to_mplace(ecx.read_immediate(ptr)?)?; + let (size, _) = ecx.size_and_align_of_mplace(place)? + // for extern types, just cover what we can + .unwrap_or_else(|| place.layout.size_and_align()); + if !ecx.tcx.sess.opts.debugging_opts.mir_emit_retag || + !ecx.machine.validate || size == Size::ZERO + { + // No tracking, or no retagging. The latter is possible because a dependency of ours + // might be called with different flags than we are, so there are `Retag` + // statements but we do not want to execute them. + Ok(()) + } else { + ecx.escape_to_raw(place, size) + } + } + #[inline(always)] fn retag( ecx: &mut EvalContext<'a, 'mir, 'tcx, Self>, @@ -506,12 +510,14 @@ impl<'a, 'mir, 'tcx> Machine<'a, 'mir, 'tcx> for Evaluator<'tcx> { place: PlaceTy<'tcx, Borrow>, ) -> EvalResult<'tcx> { if !ecx.tcx.sess.opts.debugging_opts.mir_emit_retag || !Self::enforce_validity(ecx) { - // No tracking, or no retagging. This is possible because a dependency of ours might be - // called with different flags than we are, + // No tracking, or no retagging. The latter is possible because a dependency of ours + // might be called with different flags than we are, so there are `Retag` + // statements but we do not want to execute them. // Also, honor the whitelist in `enforce_validity` because otherwise we might retag // uninitialized data. - return Ok(()) + Ok(()) + } else { + ecx.retag(fn_entry, place) } - ecx.retag(fn_entry, place) } } diff --git a/src/stacked_borrows.rs b/src/stacked_borrows.rs index c6cd7f5005..e7c595f1c6 100644 --- a/src/stacked_borrows.rs +++ b/src/stacked_borrows.rs @@ -6,7 +6,7 @@ use rustc::hir; use crate::{ EvalResult, MiriEvalContext, HelpersEvalContextExt, MemoryKind, MiriMemoryKind, RangeMap, AllocId, Allocation, AllocationExtra, - Pointer, PlaceTy, MPlaceTy, + Pointer, MemPlace, Scalar, Immediate, ImmTy, PlaceTy, MPlaceTy, }; pub type Timestamp = u64; @@ -395,13 +395,6 @@ impl<'tcx> Stacks { pub trait EvalContextExt<'tcx> { - fn tag_reference( - &mut self, - place: MPlaceTy<'tcx, Borrow>, - size: Size, - usage: UsageKind, - ) -> EvalResult<'tcx, Borrow>; - fn tag_dereference( &self, place: MPlaceTy<'tcx, Borrow>, @@ -415,47 +408,27 @@ pub trait EvalContextExt<'tcx> { kind: MemoryKind, ) -> Borrow; + /// Retag an indidual pointer, returning the retagged version. + fn retag_ptr( + &mut self, + ptr: ImmTy<'tcx, Borrow>, + mutbl: hir::Mutability, + ) -> EvalResult<'tcx, Immediate>; + fn retag( &mut self, fn_entry: bool, place: PlaceTy<'tcx, Borrow> ) -> EvalResult<'tcx>; -} -impl<'a, 'mir, 'tcx> EvalContextExt<'tcx> for MiriEvalContext<'a, 'mir, 'tcx> { - /// Called for place-to-value conversion. - fn tag_reference( + fn escape_to_raw( &mut self, place: MPlaceTy<'tcx, Borrow>, size: Size, - usage: UsageKind, - ) -> EvalResult<'tcx, Borrow> { - let ptr = place.ptr.to_ptr()?; - let time = self.machine.stacked_borrows.increment_clock(); - let new_bor = match usage { - UsageKind::Write => Borrow::Uniq(time), - UsageKind::Read => Borrow::Shr(Some(time)), - UsageKind::Raw => Borrow::Shr(None), - }; - trace!("tag_reference: Creating new reference ({:?}) for {:?} (pointee {}): {:?}", - usage, ptr, place.layout.ty, new_bor); - - // Update the stacks. First create the new ref as usual, then maybe freeze stuff. - self.memory().check_bounds(ptr, size, false)?; - let alloc = self.memory().get(ptr.alloc_id).expect("We checked that the ptr is fine!"); - alloc.extra.use_and_maybe_re_borrow(ptr, size, usage, Some(new_bor))?; - // Maybe freeze stuff - if let Borrow::Shr(Some(bor_t)) = new_bor { - self.visit_frozen(place, size, |frz_ptr, size| { - debug_assert_eq!(frz_ptr.alloc_id, ptr.alloc_id); - // Be frozen! - alloc.extra.freeze(frz_ptr, size, bor_t) - })?; - } - - Ok(new_bor) - } + ) -> EvalResult<'tcx>; +} +impl<'a, 'mir, 'tcx> EvalContextExt<'tcx> for MiriEvalContext<'a, 'mir, 'tcx> { /// Called for value-to-place conversion. /// /// Note that this does NOT mean that all this memory will actually get accessed/referenced! @@ -466,9 +439,9 @@ impl<'a, 'mir, 'tcx> EvalContextExt<'tcx> for MiriEvalContext<'a, 'mir, 'tcx> { size: Size, usage: UsageKind, ) -> EvalResult<'tcx, Borrow> { - let ptr = place.ptr.to_ptr()?; trace!("tag_dereference: Accessing reference ({:?}) for {:?} (pointee {})", - usage, ptr, place.layout.ty); + usage, place.ptr, place.layout.ty); + let ptr = place.ptr.to_ptr()?; // In principle we should not have to do anything here. However, with transmutes involved, // it can happen that the tag of `ptr` does not actually match `usage`, and we // should adjust for that. @@ -551,6 +524,50 @@ impl<'a, 'mir, 'tcx> EvalContextExt<'tcx> for MiriEvalContext<'a, 'mir, 'tcx> { Borrow::Uniq(time) } + fn retag_ptr( + &mut self, + val: ImmTy<'tcx, Borrow>, + mutbl: hir::Mutability, + ) -> EvalResult<'tcx, Immediate> { + // We want a place for where the ptr *points to*, so we get one. + let place = self.ref_to_mplace(val)?; + let size = self.size_and_align_of_mplace(place)? + .map(|(size, _)| size) + .unwrap_or_else(|| place.layout.size); + if size == Size::ZERO { + // Nothing to do for ZSTs. + return Ok(*val); + } + + // Prepare to re-borrow this place. + let ptr = place.ptr.to_ptr()?; + let time = self.machine.stacked_borrows.increment_clock(); + let new_bor = match mutbl { + hir::MutMutable => Borrow::Uniq(time), + hir::MutImmutable => Borrow::Shr(Some(time)), + }; + trace!("retag: Creating new reference ({:?}) for {:?} (pointee {}): {:?}", + mutbl, ptr, place.layout.ty, new_bor); + + // Update the stacks. First create a new borrow, then maybe freeze stuff. + self.memory().check_bounds(ptr, size, false)?; // `ptr_dereference` wouldn't do any checks if this is a raw ptr + let alloc = self.memory().get(ptr.alloc_id).expect("We checked that the ptr is fine!"); + alloc.extra.use_and_maybe_re_borrow(ptr, size, Some(mutbl).into(), Some(new_bor))?; + // Maybe freeze stuff + if let Borrow::Shr(Some(bor_t)) = new_bor { + self.visit_frozen(place, size, |frz_ptr, size| { + debug_assert_eq!(frz_ptr.alloc_id, ptr.alloc_id); + // Be frozen! + alloc.extra.freeze(frz_ptr, size, bor_t) + })?; + } + + // Compute the new value and return that + let new_ptr = Scalar::Ptr(Pointer::new_with_tag(ptr.alloc_id, ptr.offset, new_bor)); + let new_place = MemPlace { ptr: new_ptr, ..*place }; + Ok(new_place.to_ref()) + } + fn retag( &mut self, _fn_entry: bool, @@ -558,20 +575,30 @@ impl<'a, 'mir, 'tcx> EvalContextExt<'tcx> for MiriEvalContext<'a, 'mir, 'tcx> { ) -> EvalResult<'tcx> { // For now, we only retag if the toplevel type is a reference. // TODO: Recurse into structs and enums, sharing code with validation. + // TODO: Honor `fn_entry`. let mutbl = match place.layout.ty.sty { ty::Ref(_, _, mutbl) => mutbl, // go ahead - _ => return Ok(()), // don't do a thing + _ => return Ok(()), // do nothing, for now }; - // We want to reborrow the reference stored there. This will call the hooks - // above. First deref, which will call `tag_dereference`. - // (This is somewhat redundant because validation already did the same thing, - // but what can you do.) + // Retag the pointer and write it back. let val = self.read_immediate(self.place_to_op(place)?)?; - let dest = self.ref_to_mplace(val)?; - // Now put a new ref into the old place, which will call `tag_reference`. - // FIXME: Honor `fn_entry`! - let val = self.create_ref(dest, Some(mutbl))?; + let val = self.retag_ptr(val, mutbl)?; self.write_immediate(val, place)?; Ok(()) } + + fn escape_to_raw( + &mut self, + place: MPlaceTy<'tcx, Borrow>, + size: Size, + ) -> EvalResult<'tcx> { + trace!("self: {:?} is now accessible by raw pointers", *place); + // Re-borrow to raw. This is a NOP for shared borrows, but we do not know the borrow + // type here and that's also okay. + let ptr = place.ptr.to_ptr()?; + self.memory().check_bounds(ptr, size, false)?; // `ptr_dereference` wouldn't do any checks if this is a raw ptr + let alloc = self.memory().get(ptr.alloc_id).expect("We checked that the ptr is fine!"); + alloc.extra.use_and_maybe_re_borrow(ptr, size, UsageKind::Raw, Some(Borrow::default()))?; + Ok(()) + } } diff --git a/tests/compile-fail/stacked_borrows/buggy_as_mut_slice.rs b/tests/compile-fail/stacked_borrows/buggy_as_mut_slice.rs index 4857ada7fb..2f3d0793f6 100644 --- a/tests/compile-fail/stacked_borrows/buggy_as_mut_slice.rs +++ b/tests/compile-fail/stacked_borrows/buggy_as_mut_slice.rs @@ -1,4 +1,4 @@ -#![allow(unused_variables)] +// error-pattern: mutable reference with frozen tag mod safe { use std::slice::from_raw_parts_mut; @@ -12,8 +12,10 @@ mod safe { fn main() { let v = vec![0,1,2]; - let v1 = safe::as_mut_slice(&v); + let _v1 = safe::as_mut_slice(&v); +/* let v2 = safe::as_mut_slice(&v); - v1[1] = 5; //~ ERROR does not exist on the stack + v1[1] = 5; v1[1] = 6; +*/ } diff --git a/tests/compile-fail/stacked_borrows/buggy_split_at_mut.rs b/tests/compile-fail/stacked_borrows/buggy_split_at_mut.rs index a6daa5d93d..711544f801 100644 --- a/tests/compile-fail/stacked_borrows/buggy_split_at_mut.rs +++ b/tests/compile-fail/stacked_borrows/buggy_split_at_mut.rs @@ -11,7 +11,6 @@ mod safe { assert!(mid <= len); (from_raw_parts_mut(ptr, len - mid), // BUG: should be "mid" instead of "len - mid" - //~^ ERROR does not exist on the stack from_raw_parts_mut(ptr.offset(mid as isize), len - mid)) } } @@ -20,6 +19,7 @@ mod safe { fn main() { let mut array = [1,2,3,4]; let (a, b) = safe::split_at_mut(&mut array, 0); + //~^ ERROR does not exist on the stack a[1] = 5; b[1] = 6; } diff --git a/tests/compile-fail/stacked_borrows/static_memory_modification.rs b/tests/compile-fail/stacked_borrows/static_memory_modification.rs index c092cbfe50..5c605eff67 100644 --- a/tests/compile-fail/stacked_borrows/static_memory_modification.rs +++ b/tests/compile-fail/stacked_borrows/static_memory_modification.rs @@ -1,3 +1,6 @@ +// FIXME still considering whether we are okay with this not being an error +// ignore-test + static X: usize = 5; #[allow(mutable_transmutes)] diff --git a/tests/compile-fail/stacked_borrows/transmute-is-no-escape.rs b/tests/compile-fail/stacked_borrows/transmute-is-no-escape.rs new file mode 100644 index 0000000000..1ab005e3fa --- /dev/null +++ b/tests/compile-fail/stacked_borrows/transmute-is-no-escape.rs @@ -0,0 +1,12 @@ +// Make sure we cannot use raw ptrs that got transmuted from mutable references +// (i.e, no EscapeToRaw happened). +// We could, in principle, to EscapeToRaw lazily to allow this code, but that +// would no alleviate the need for EscapeToRaw (see `ref_raw_int_raw` in +// `run-pass/stacked-borrows.rs`), and thus increase overall complexity. +use std::mem; + +fn main() { + let mut x: i32 = 42; + let raw: *mut i32 = unsafe { mem::transmute(&mut x) }; + unsafe { *raw = 13; } //~ ERROR does not exist on the stack +} diff --git a/tests/compile-fail/stacked_borrows/unescaped_local.rs b/tests/compile-fail/stacked_borrows/unescaped_local.rs index 8627cc44c2..054697b04a 100644 --- a/tests/compile-fail/stacked_borrows/unescaped_local.rs +++ b/tests/compile-fail/stacked_borrows/unescaped_local.rs @@ -1,10 +1,8 @@ -use std::mem; - // Make sure we cannot use raw ptrs to access a local that -// has never been escaped to the raw world. +// we took the direct address of. fn main() { let mut x = 42; - let ptr = &mut x; - let raw: *mut i32 = unsafe { mem::transmute(ptr) }; + let raw = &mut x as *mut i32 as usize as *mut i32; + let _ptr = &mut x; unsafe { *raw = 13; } //~ ERROR does not exist on the stack } diff --git a/tests/run-pass/stacked-borrows.rs b/tests/run-pass/stacked-borrows.rs index 8faa4d6591..adab2b5a56 100644 --- a/tests/run-pass/stacked-borrows.rs +++ b/tests/run-pass/stacked-borrows.rs @@ -3,15 +3,17 @@ fn main() { deref_partially_dangling_raw(); read_does_not_invalidate1(); read_does_not_invalidate2(); + ref_raw_int_raw(); } // Deref a raw ptr to access a field of a large struct, where the field // is allocated but not the entire struct is. // For now, we want to allow this. fn deref_partially_dangling_raw() { - let x = (1, 1); + let x = (1, 13); let xptr = &x as *const _ as *const (i32, i32, i32); - let _val = unsafe { (*xptr).1 }; + let val = unsafe { (*xptr).1 }; + assert_eq!(val, 13); } // Make sure that reading from an `&mut` does, like reborrowing to `&`, @@ -23,7 +25,7 @@ fn read_does_not_invalidate1() { let _val = x.1; // we just read, this does NOT invalidate the reborrows. ret } - foo(&mut (1, 2)); + assert_eq!(*foo(&mut (1, 2)), 2); } // Same as above, but this time we first create a raw, then read from `&mut` // and then freeze from the raw. @@ -34,5 +36,15 @@ fn read_does_not_invalidate2() { let ret = unsafe { &(*xraw).1 }; ret } - foo(&mut (1, 2)); + assert_eq!(*foo(&mut (1, 2)), 2); +} + +// Just to make sure that casting a ref to raw, to int and back to raw +// and only then using it works. This rules out ideas like "do escape-to-raw lazily": +// After casting to int and back, we lost the tag that could have let us do that. +fn ref_raw_int_raw() { + let mut x = 3; + let xref = &mut x; + let xraw = xref as *mut i32 as usize as *mut i32; + assert_eq!(unsafe { *xraw }, 3); } From 020313dd8506d3704a71d2ec1499f0e7ee93f3dd Mon Sep 17 00:00:00 2001 From: Ralf Jung Date: Wed, 7 Nov 2018 16:56:25 +0100 Subject: [PATCH 02/15] make freezing inherently part of the high-level reactivate/initiate operations --- src/helpers.rs | 35 +++++--- src/stacked_borrows.rs | 199 +++++++++++++++++++++-------------------- 2 files changed, 125 insertions(+), 109 deletions(-) diff --git a/src/helpers.rs b/src/helpers.rs index 880b18c7d5..31e2972957 100644 --- a/src/helpers.rs +++ b/src/helpers.rs @@ -33,13 +33,13 @@ impl ScalarExt for ScalarMaybeUndef { pub trait EvalContextExt<'tcx> { fn resolve_path(&self, path: &[&str]) -> EvalResult<'tcx, ty::Instance<'tcx>>; - /// Visit the memory covered by `place` that is frozen -- i.e., NOT - /// what is inside an `UnsafeCell`. - fn visit_frozen( + /// Visit the memory covered by `place`, sensitive to freezing: The 3rd parameter + /// will be true if this is frozen, false if this is in an `UnsafeCell`. + fn visit_freeze_sensitive( &self, place: MPlaceTy<'tcx, Borrow>, size: Size, - action: impl FnMut(Pointer, Size) -> EvalResult<'tcx>, + action: impl FnMut(Pointer, Size, bool) -> EvalResult<'tcx>, ) -> EvalResult<'tcx>; } @@ -79,13 +79,11 @@ impl<'a, 'mir, 'tcx> EvalContextExt<'tcx> for EvalContext<'a, 'mir, 'tcx, super: }) } - /// Visit the memory covered by `place` that is frozen -- i.e., NOT - /// what is inside an `UnsafeCell`. - fn visit_frozen( + fn visit_freeze_sensitive( &self, place: MPlaceTy<'tcx, Borrow>, size: Size, - mut frozen_action: impl FnMut(Pointer, Size) -> EvalResult<'tcx>, + mut action: impl FnMut(Pointer, Size, bool) -> EvalResult<'tcx>, ) -> EvalResult<'tcx> { trace!("visit_frozen(place={:?}, size={:?})", *place, size); debug_assert_eq!(size, @@ -99,18 +97,29 @@ impl<'a, 'mir, 'tcx> EvalContextExt<'tcx> for EvalContext<'a, 'mir, 'tcx, super: let mut end_ptr = place.ptr; // Called when we detected an `UnsafeCell` at the given offset and size. // Calls `action` and advances `end_ptr`. - let mut unsafe_cell_action = |unsafe_cell_offset, unsafe_cell_size| { + let mut unsafe_cell_action = |unsafe_cell_ptr: Scalar, unsafe_cell_size: Size| { + if unsafe_cell_size != Size::ZERO { + debug_assert_eq!(unsafe_cell_ptr.to_ptr().unwrap().alloc_id, + end_ptr.to_ptr().unwrap().alloc_id); + debug_assert_eq!(unsafe_cell_ptr.to_ptr().unwrap().tag, + end_ptr.to_ptr().unwrap().tag); + } // We assume that we are given the fields in increasing offset order, // and nothing else changes. + let unsafe_cell_offset = unsafe_cell_ptr.get_ptr_offset(self); let end_offset = end_ptr.get_ptr_offset(self); assert!(unsafe_cell_offset >= end_offset); let frozen_size = unsafe_cell_offset - end_offset; // Everything between the end_ptr and this `UnsafeCell` is frozen. if frozen_size != Size::ZERO { - frozen_action(end_ptr.to_ptr()?, frozen_size)?; + action(end_ptr.to_ptr()?, frozen_size, /*frozen*/true)?; + } + // This `UnsafeCell` is NOT frozen. + if unsafe_cell_size != Size::ZERO { + action(unsafe_cell_ptr.to_ptr()?, unsafe_cell_size, /*frozen*/false)?; } // Update end end_ptr. - end_ptr = end_ptr.ptr_wrapping_offset(frozen_size+unsafe_cell_size, self); + end_ptr = unsafe_cell_ptr.ptr_wrapping_offset(unsafe_cell_size, self); // Done Ok(()) }; @@ -126,7 +135,7 @@ impl<'a, 'mir, 'tcx> EvalContextExt<'tcx> for EvalContext<'a, 'mir, 'tcx, super: .unwrap_or_else(|| place.layout.size_and_align()); // Now handle this `UnsafeCell`, unless it is empty. if unsafe_cell_size != Size::ZERO { - unsafe_cell_action(place.ptr.get_ptr_offset(self), unsafe_cell_size) + unsafe_cell_action(place.ptr, unsafe_cell_size) } else { Ok(()) } @@ -136,7 +145,7 @@ impl<'a, 'mir, 'tcx> EvalContextExt<'tcx> for EvalContext<'a, 'mir, 'tcx, super: } // The part between the end_ptr and the end of the place is also frozen. // So pretend there is a 0-sized `UnsafeCell` at the end. - unsafe_cell_action(place.ptr.get_ptr_offset(self) + size, Size::ZERO)?; + unsafe_cell_action(place.ptr.ptr_wrapping_offset(size, self), Size::ZERO)?; // Done! return Ok(()); diff --git a/src/stacked_borrows.rs b/src/stacked_borrows.rs index e7c595f1c6..ab0c3f8994 100644 --- a/src/stacked_borrows.rs +++ b/src/stacked_borrows.rs @@ -217,10 +217,12 @@ impl<'tcx> Stack { /// Initiate `bor`; mostly this means pushing. /// This operation cannot fail; it is up to the caller to ensure that the precondition - /// is met: We cannot push onto frozen stacks. + /// is met: We cannot push `Uniq` onto frozen stacks. + /// Crucially, this makes pushing a `Shr` onto a frozen location a NOP. We do not want + /// such a location to get mutably shared this way! fn initiate(&mut self, bor: Borrow) { if let Some(_) = self.frozen_since { - // "Pushing" a Shr or Frz on top is redundant. + // A frozen location, we won't change anything here! match bor { Borrow::Uniq(_) => bug!("Trying to create unique ref to frozen location"), Borrow::Shr(_) => trace!("initiate: New shared ref to frozen location is a NOP"), @@ -272,39 +274,41 @@ impl State { /// Higher-level operations impl<'tcx> Stacks { - /// The single most important operation: Make sure that using `ptr` is okay, - /// and if `new_bor` is present then make that the new current borrow. - fn use_and_maybe_re_borrow( + /// `ptr` got used, reflect that in the stack. + fn reactivate( &self, ptr: Pointer, size: Size, usage: UsageKind, - new_bor: Option, ) -> EvalResult<'tcx> { - trace!("use_and_maybe_re_borrow of tag {:?} as {:?}, new {:?}: {:?}, size {}", - ptr.tag, usage, new_bor, ptr, size.bytes()); + trace!("use_borrow of tag {:?} as {:?}: {:?}, size {}", + ptr.tag, usage, ptr, size.bytes()); let mut stacks = self.stacks.borrow_mut(); for stack in stacks.iter_mut(ptr.offset, size) { stack.reactivate(ptr.tag, usage == UsageKind::Write)?; - if let Some(new_bor) = new_bor { - stack.initiate(new_bor); - } } Ok(()) } - /// Freeze the given memory range. - fn freeze( + /// Create a new borrow, the ptr must already have the new tag. + /// Also freezes the location if `freeze` is set and the tag is a timestamped `Shr`. + fn initiate( &self, ptr: Pointer, size: Size, - bor_t: Timestamp - ) -> EvalResult<'tcx> { + freeze: bool, + ) { + trace!("reborrow for tag {:?}: {:?}, size {}", + ptr.tag, ptr, size.bytes()); let mut stacks = self.stacks.borrow_mut(); for stack in stacks.iter_mut(ptr.offset, size) { - stack.freeze(bor_t); + stack.initiate(ptr.tag); + if freeze { + if let Borrow::Shr(Some(bor_t)) = ptr.tag { + stack.freeze(bor_t); + } + } } - Ok(()) } /// Check that this stack is fine with being dereferenced @@ -312,6 +316,7 @@ impl<'tcx> Stacks { &self, ptr: Pointer, size: Size, + frozen: bool, ) -> EvalResult<'tcx> { let mut stacks = self.stacks.borrow_mut(); // We need `iter_mut` because `iter` would skip gaps! @@ -323,20 +328,14 @@ impl<'tcx> Stacks { err ))) } - } - Ok(()) - } - - /// Check that this stack is appropriately frozen - fn check_frozen( - &self, - ptr: Pointer, - size: Size, - bor_t: Timestamp - ) -> EvalResult<'tcx> { - let mut stacks = self.stacks.borrow_mut(); - for stack in stacks.iter_mut(ptr.offset, size) { - stack.check_frozen(bor_t)?; + // Sometimes we also need to be frozen. + if frozen { + // Even shared refs can have uniq tags (after transmute). That's not an error + // but they do not get any freezing benefits. + if let Borrow::Shr(Some(bor_t)) = ptr.tag { + stack.check_frozen(bor_t)?; + } + } } Ok(()) } @@ -351,7 +350,7 @@ impl AllocationExtra for Stacks { size: Size, ) -> EvalResult<'tcx> { // Reads behave exactly like the first half of a reborrow-to-shr - alloc.extra.use_and_maybe_re_borrow(ptr, size, UsageKind::Read, None) + alloc.extra.reactivate(ptr, size, UsageKind::Read) } #[inline(always)] @@ -361,7 +360,7 @@ impl AllocationExtra for Stacks { size: Size, ) -> EvalResult<'tcx> { // Writes behave exactly like the first half of a reborrow-to-mut - alloc.extra.use_and_maybe_re_borrow(ptr, size, UsageKind::Write, None) + alloc.extra.reactivate(ptr, size, UsageKind::Read) } #[inline(always)] @@ -371,7 +370,7 @@ impl AllocationExtra for Stacks { size: Size, ) -> EvalResult<'tcx> { // This is like mutating - alloc.extra.use_and_maybe_re_borrow(ptr, size, UsageKind::Write, None) + alloc.extra.reactivate(ptr, size, UsageKind::Write) // FIXME: Error out of there are any barriers? } } @@ -429,6 +428,35 @@ pub trait EvalContextExt<'tcx> { } impl<'a, 'mir, 'tcx> EvalContextExt<'tcx> for MiriEvalContext<'a, 'mir, 'tcx> { + fn tag_new_allocation( + &mut self, + id: AllocId, + kind: MemoryKind, + ) -> Borrow { + let time = match kind { + MemoryKind::Stack => { + // New unique borrow. This `Uniq` is not accessible by the program, + // so it will only ever be used when using the local directly (i.e., + // not through a pointer). IOW, whenever we directly use a local this will pop + // everything else off the stack, invalidating all previous pointers + // and, in particular, *all* raw pointers. This subsumes the explicit + // `reset` which the blog post [1] says to perform when accessing a local. + // + // [1] https://www.ralfj.de/blog/2018/08/07/stacked-borrows.html + self.machine.stacked_borrows.increment_clock() + } + _ => { + // Nothing to do for everything else + return Borrow::default() + } + }; + // Make this the active borrow for this allocation + let alloc = self.memory_mut().get_mut(id).expect("This is a new allocation, it must still exist"); + let size = Size::from_bytes(alloc.bytes.len() as u64); + alloc.extra.first_item(BorStackItem::Uniq(time), size); + Borrow::Uniq(time) + } + /// Called for value-to-place conversion. /// /// Note that this does NOT mean that all this memory will actually get accessed/referenced! @@ -449,8 +477,8 @@ impl<'a, 'mir, 'tcx> EvalContextExt<'tcx> for MiriEvalContext<'a, 'mir, 'tcx> { // That can transmute a raw ptr to a (shared/mut) ref, and a mut ref to a shared one. match (usage, ptr.tag) { (UsageKind::Raw, _) => { - // Don't use the tag, this is a raw access! Even if there is a tag, - // that means transmute happened and we ignore the tag. + // Don't use the tag, this is a raw access! They should happen tagless. + // This does mean, however, that `&*foo` is *not* a NOP *if* `foo` is a raw ptr. // Also don't do any further validation, this is raw after all. return Ok(Borrow::default()); } @@ -478,50 +506,41 @@ impl<'a, 'mir, 'tcx> EvalContextExt<'tcx> for MiriEvalContext<'a, 'mir, 'tcx> { } } - // If we got here, we do some checking, *but* we leave the tag unchanged. + // Get the allocation self.memory().check_bounds(ptr, size, false)?; let alloc = self.memory().get(ptr.alloc_id).expect("We checked that the ptr is fine!"); - alloc.extra.check_deref(ptr, size)?; - // Maybe check frozen stuff - if let Borrow::Shr(Some(bor_t)) = ptr.tag { - self.visit_frozen(place, size, |frz_ptr, size| { - debug_assert_eq!(frz_ptr.alloc_id, ptr.alloc_id); - // Are you frozen? - alloc.extra.check_frozen(frz_ptr, size, bor_t) + // If we got here, we do some checking, *but* we leave the tag unchanged. + if let Borrow::Shr(Some(_)) = ptr.tag { + // We need a frozen-sensitive check + self.visit_freeze_sensitive(place, size, |cur_ptr, size, frozen| { + alloc.extra.check_deref(cur_ptr, size, frozen) })?; + } else { + // Just treat this as one big chunk + alloc.extra.check_deref(ptr, size, /*frozen*/false)?; } // All is good, and do not change the tag Ok(ptr.tag) } - fn tag_new_allocation( + /// The given place may henceforth be accessed through raw pointers. + fn escape_to_raw( &mut self, - id: AllocId, - kind: MemoryKind, - ) -> Borrow { - let time = match kind { - MemoryKind::Stack => { - // New unique borrow. This `Uniq` is not accessible by the program, - // so it will only ever be used when using the local directly (i.e., - // not through a pointer). IOW, whenever we directly use a local this will pop - // everything else off the stack, invalidating all previous pointers - // and, in particular, *all* raw pointers. This subsumes the explicit - // `reset` which the blog post [1] says to perform when accessing a local. - // - // [1] https://www.ralfj.de/blog/2018/08/07/stacked-borrows.html - self.machine.stacked_borrows.increment_clock() - } - _ => { - // Nothing to do for everything else - return Borrow::default() - } - }; - // Make this the active borrow for this allocation - let alloc = self.memory_mut().get_mut(id).expect("This is a new allocation, it must still exist"); - let size = Size::from_bytes(alloc.bytes.len() as u64); - alloc.extra.first_item(BorStackItem::Uniq(time), size); - Borrow::Uniq(time) + place: MPlaceTy<'tcx, Borrow>, + size: Size, + ) -> EvalResult<'tcx> { + trace!("self: {:?} is now accessible by raw pointers", *place); + // Get the allocation + let mut ptr = place.ptr.to_ptr()?; + self.memory().check_bounds(ptr, size, false)?; // `ptr_dereference` wouldn't do any checks if this is a raw ptr + let alloc = self.memory().get(ptr.alloc_id).expect("We checked that the ptr is fine!"); + // Re-borrow to raw. This is a NOP for shared borrows, but we do not know the borrow + // type here and that's also okay. Freezing does not matter here. + alloc.extra.reactivate(ptr, size, UsageKind::Raw)?; + ptr.tag = Borrow::default(); + alloc.extra.initiate(ptr, size, /*freeze*/false); + Ok(()) } fn retag_ptr( @@ -546,25 +565,28 @@ impl<'a, 'mir, 'tcx> EvalContextExt<'tcx> for MiriEvalContext<'a, 'mir, 'tcx> { hir::MutMutable => Borrow::Uniq(time), hir::MutImmutable => Borrow::Shr(Some(time)), }; + let new_ptr = Pointer::new_with_tag(ptr.alloc_id, ptr.offset, new_bor); trace!("retag: Creating new reference ({:?}) for {:?} (pointee {}): {:?}", mutbl, ptr, place.layout.ty, new_bor); - // Update the stacks. First create a new borrow, then maybe freeze stuff. + // Get the allocation self.memory().check_bounds(ptr, size, false)?; // `ptr_dereference` wouldn't do any checks if this is a raw ptr let alloc = self.memory().get(ptr.alloc_id).expect("We checked that the ptr is fine!"); - alloc.extra.use_and_maybe_re_borrow(ptr, size, Some(mutbl).into(), Some(new_bor))?; - // Maybe freeze stuff - if let Borrow::Shr(Some(bor_t)) = new_bor { - self.visit_frozen(place, size, |frz_ptr, size| { - debug_assert_eq!(frz_ptr.alloc_id, ptr.alloc_id); - // Be frozen! - alloc.extra.freeze(frz_ptr, size, bor_t) + // Update the stacks. First use old borrow, then initiate new one. + alloc.extra.reactivate(ptr, size, Some(mutbl).into())?; + if mutbl == hir::MutImmutable { + // We need a frozen-sensitive initiate + self.visit_freeze_sensitive(place, size, |mut cur_ptr, size, frozen| { + cur_ptr.tag = new_bor; + Ok(alloc.extra.initiate(cur_ptr, size, frozen)) })?; + } else { + // Just treat this as one big chunk + alloc.extra.initiate(new_ptr, size, /*frozen*/false); } - // Compute the new value and return that - let new_ptr = Scalar::Ptr(Pointer::new_with_tag(ptr.alloc_id, ptr.offset, new_bor)); - let new_place = MemPlace { ptr: new_ptr, ..*place }; + // Return new ptr + let new_place = MemPlace { ptr: Scalar::Ptr(new_ptr), ..*place }; Ok(new_place.to_ref()) } @@ -586,19 +608,4 @@ impl<'a, 'mir, 'tcx> EvalContextExt<'tcx> for MiriEvalContext<'a, 'mir, 'tcx> { self.write_immediate(val, place)?; Ok(()) } - - fn escape_to_raw( - &mut self, - place: MPlaceTy<'tcx, Borrow>, - size: Size, - ) -> EvalResult<'tcx> { - trace!("self: {:?} is now accessible by raw pointers", *place); - // Re-borrow to raw. This is a NOP for shared borrows, but we do not know the borrow - // type here and that's also okay. - let ptr = place.ptr.to_ptr()?; - self.memory().check_bounds(ptr, size, false)?; // `ptr_dereference` wouldn't do any checks if this is a raw ptr - let alloc = self.memory().get(ptr.alloc_id).expect("We checked that the ptr is fine!"); - alloc.extra.use_and_maybe_re_borrow(ptr, size, UsageKind::Raw, Some(Borrow::default()))?; - Ok(()) - } } From 94e751267c881bc26219853733222e4fe8fdc87a Mon Sep 17 00:00:00 2001 From: Ralf Jung Date: Wed, 7 Nov 2018 17:33:41 +0100 Subject: [PATCH 03/15] add another mean test case --- .../stacked_borrows/illegal_read3.rs | 29 +++++++++++++++++++ 1 file changed, 29 insertions(+) create mode 100644 tests/compile-fail/stacked_borrows/illegal_read3.rs diff --git a/tests/compile-fail/stacked_borrows/illegal_read3.rs b/tests/compile-fail/stacked_borrows/illegal_read3.rs new file mode 100644 index 0000000000..b0da0511de --- /dev/null +++ b/tests/compile-fail/stacked_borrows/illegal_read3.rs @@ -0,0 +1,29 @@ +#![feature(untagged_unions)] +// A callee may not read the destination of our `&mut` without +// us noticing. +// Thise code got carefully checked to not introduce any reborrows +// that are not explicit in the source. Let's hope the compiler does not break this later! + +use std::mem; + +fn main() { + let mut x: i32 = 15; + let xref1 = &mut x; + let xref1_sneaky: usize = unsafe { mem::transmute_copy(&xref1) }; + let xref2 = &mut *xref1; // derived from xref1, so using raw is still okay... + callee(xref1_sneaky); + let _val = *xref2; // ...but any use of it will invalidate our ref. + //~^ ERROR: does not exist on the stack +} + +fn callee(xref1: usize) { + // Transmuting through a union to avoid retagging + union UsizeToRef { + from: usize, + to: &'static mut i32, + } + let xref1 = UsizeToRef { from: xref1 }; + // Doing the deref and the transmute (through the union) in the same place expression + // should avoid retagging. + let _val = unsafe { *xref1.to }; +} From aa8f523df6447a32c15d2620a52a55761f94da97 Mon Sep 17 00:00:00 2001 From: Ralf Jung Date: Wed, 7 Nov 2018 21:08:34 +0100 Subject: [PATCH 04/15] test for special things that are now possible --- tests/run-pass/stacked-borrows.rs | 28 ++++++++++++++++++++++++++++ 1 file changed, 28 insertions(+) diff --git a/tests/run-pass/stacked-borrows.rs b/tests/run-pass/stacked-borrows.rs index adab2b5a56..00b21c746c 100644 --- a/tests/run-pass/stacked-borrows.rs +++ b/tests/run-pass/stacked-borrows.rs @@ -4,6 +4,8 @@ fn main() { read_does_not_invalidate1(); read_does_not_invalidate2(); ref_raw_int_raw(); + mut_shr_raw(); + mut_raw_then_mut_shr(); } // Deref a raw ptr to access a field of a large struct, where the field @@ -48,3 +50,29 @@ fn ref_raw_int_raw() { let xraw = xref as *mut i32 as usize as *mut i32; assert_eq!(unsafe { *xraw }, 3); } + +// Creating a raw from a `&mut` through an `&` works, even if we +// write through that raw. +fn mut_shr_raw() { + let mut x = 2; + { + let xref = &mut x; + let xraw = &*xref as *const i32 as *mut i32; + unsafe { *xraw = 4; } + } + assert_eq!(x, 4); +} + +// Escape a mut to raw, then share the same mut and use the share, then the raw. +// That should work. +fn mut_raw_then_mut_shr() { + let mut x = 2; + { + let xref = &mut x; + let xraw = &mut *xref as *mut _; + let xshr = &*xref; + assert_eq!(*xshr, 2); + unsafe { *xraw = 4; } + } + assert_eq!(x, 4); +} From a94e197105a0ce67cba816299cdd59efdb6df7a4 Mon Sep 17 00:00:00 2001 From: Ralf Jung Date: Wed, 7 Nov 2018 21:08:20 +0100 Subject: [PATCH 05/15] better test the special exception for reading through unique when things are shared --- src/stacked_borrows.rs | 14 +++++++---- .../stacked_borrows/illegal_read4.rs | 9 ++++++++ tests/run-pass/stacked-borrows.rs | 23 +++++++++++++++++++ 3 files changed, 41 insertions(+), 5 deletions(-) create mode 100644 tests/compile-fail/stacked_borrows/illegal_read4.rs diff --git a/src/stacked_borrows.rs b/src/stacked_borrows.rs index ab0c3f8994..56688ca10f 100644 --- a/src/stacked_borrows.rs +++ b/src/stacked_borrows.rs @@ -167,13 +167,15 @@ impl<'tcx> Stack { behind a barrier", bor)) } (BorStackItem::Uniq(itm_t), Borrow::Uniq(bor_t)) if itm_t == bor_t => { - // Found matching unique item. + // Found matching unique item. This is *always* required to use a `Uniq`: + // The item must still be on the stack. if !is_write { - // As a special case, if we are reading and since we *did* find the `Uniq`, - // we try to pop less: We are happy with making a `Shr` or `Frz` active; - // that one will not mind concurrent reads. + // As a special case, if we are reading, let us see if it would be + // beneficial to pretend we are a raw pointer instead. If + // raw pointers are allowed to read while popping *less* than we + // would have to pop, there is no reason not to let them do this. match self.reactivatable(Borrow::default(), is_write) { - // If we got something better that `idx`, use that + // If we got something better (popping less) that `idx`, use that Ok(None) => return Ok(None), Ok(Some(shr_idx)) if shr_idx <= idx => return Ok(Some(shr_idx)), // Otherwise just go on. @@ -329,6 +331,8 @@ impl<'tcx> Stacks { ))) } // Sometimes we also need to be frozen. + // In this case we *both* push `Shr` and then freeze. This means that a `&mut` + // to `*const` to `*mut` cast through `&` actually works. if frozen { // Even shared refs can have uniq tags (after transmute). That's not an error // but they do not get any freezing benefits. diff --git a/tests/compile-fail/stacked_borrows/illegal_read4.rs b/tests/compile-fail/stacked_borrows/illegal_read4.rs new file mode 100644 index 0000000000..c86ec1286d --- /dev/null +++ b/tests/compile-fail/stacked_borrows/illegal_read4.rs @@ -0,0 +1,9 @@ +// Using a raw invalidates derived `&mut` even for reading. +fn main() { + let mut x = 2; + let xref1 = &mut x; + let xraw = xref1 as *mut _; + let xref2 = unsafe { &mut *xraw }; + let _val = unsafe { *xraw }; // use the raw again, this invalidates xref2 *even* with the special read except for uniq refs + let _illegal = *xref2; //~ ERROR does not exist on the stack +} diff --git a/tests/run-pass/stacked-borrows.rs b/tests/run-pass/stacked-borrows.rs index 00b21c746c..7b7a7c9be2 100644 --- a/tests/run-pass/stacked-borrows.rs +++ b/tests/run-pass/stacked-borrows.rs @@ -6,6 +6,7 @@ fn main() { ref_raw_int_raw(); mut_shr_raw(); mut_raw_then_mut_shr(); + mut_raw_mut(); } // Deref a raw ptr to access a field of a large struct, where the field @@ -76,3 +77,25 @@ fn mut_raw_then_mut_shr() { } assert_eq!(x, 4); } + +// Ensure that if we derive from a mut a raw, and then from that a mut, +// and then read through the original mut, that does not invalidate the raw. +// This shows that the read-exception for `&mut` applies even if the `Shr` item +// on the stack is not at the top. +fn mut_raw_mut() { + let mut x = 2; + { + let xref1 = &mut x; + let xraw = xref1 as *mut _; + let _xref2 = unsafe { &mut *xraw }; + let _val = *xref1; + unsafe { *xraw = 4; } + // we can now use both xraw and xref1, for reading + assert_eq!(*xref1, 4); + assert_eq!(unsafe { *xraw }, 4); + assert_eq!(*xref1, 4); + assert_eq!(unsafe { *xraw }, 4); + // we cannot use xref2; see `compile-fail/stacked-borows/illegal_read4.rs` + } + assert_eq!(x, 4); +} From 224d03dbdc642a60a3326c7bb9b9206082d4cda4 Mon Sep 17 00:00:00 2001 From: Ralf Jung Date: Thu, 8 Nov 2018 08:58:03 +0100 Subject: [PATCH 06/15] organize std tests a bit better --- tests/run-pass/rc.rs | 35 +++++++++++++++++++++++++++++++++-- tests/run-pass/refcell.rs | 6 +++++- tests/run-pass/std.rs | 34 ---------------------------------- 3 files changed, 38 insertions(+), 37 deletions(-) delete mode 100644 tests/run-pass/std.rs diff --git a/tests/run-pass/rc.rs b/tests/run-pass/rc.rs index 0bf7075031..bc89d752e0 100644 --- a/tests/run-pass/rc.rs +++ b/tests/run-pass/rc.rs @@ -1,13 +1,33 @@ -use std::cell::RefCell; +use std::cell::{Cell, RefCell}; use std::rc::Rc; +use std::sync::Arc; fn rc_refcell() { let r = Rc::new(RefCell::new(42)); + let r2 = r.clone(); *r.borrow_mut() += 10; - let x = *r.borrow(); + let x = *r2.borrow(); assert_eq!(x, 52); } +fn rc_cell() { + let r = Rc::new(Cell::new(42)); + let r2 = r.clone(); + let x = r.get(); + r2.set(x + x); + assert_eq!(r.get(), 84); +} + +fn rc_refcell2() { + let r = Rc::new(RefCell::new(42)); + let r2 = r.clone(); + *r.borrow_mut() += 10; + let x = r2.borrow(); + let r3 = r.clone(); + let y = r3.borrow(); + assert_eq!((*x + *y)/2, 52); +} + fn rc_raw() { let r = Rc::new(0); let r2 = Rc::into_raw(r.clone()); @@ -17,6 +37,14 @@ fn rc_raw() { assert!(Rc::try_unwrap(r2).is_ok()); } +fn arc() { + fn test() -> Arc { + let a = Arc::new(42); + a + } + assert_eq!(*test(), 42); +} + // Make sure this Rc doesn't fall apart when touched fn check_unique_rc(mut r: Rc) { let r2 = r.clone(); @@ -34,6 +62,9 @@ fn rc_from() { fn main() { rc_refcell(); + rc_refcell2(); + rc_cell(); rc_raw(); rc_from(); + arc(); } diff --git a/tests/run-pass/refcell.rs b/tests/run-pass/refcell.rs index 93cef1572a..52dcdbd36d 100644 --- a/tests/run-pass/refcell.rs +++ b/tests/run-pass/refcell.rs @@ -1,6 +1,6 @@ use std::cell::RefCell; -fn main() { +fn lots_of_funny_borrows() { let c = RefCell::new(42); { let s1 = c.borrow(); @@ -31,3 +31,7 @@ fn main() { let _y: i32 = *s2; } } + +fn main() { + lots_of_funny_borrows(); +} diff --git a/tests/run-pass/std.rs b/tests/run-pass/std.rs deleted file mode 100644 index 7ff967b29f..0000000000 --- a/tests/run-pass/std.rs +++ /dev/null @@ -1,34 +0,0 @@ -use std::cell::{Cell, RefCell}; -use std::rc::Rc; -use std::sync::Arc; - -fn rc_cell() -> Rc> { - let r = Rc::new(Cell::new(42)); - let x = r.get(); - r.set(x + x); - r -} - -fn rc_refcell() -> i32 { - let r = Rc::new(RefCell::new(42)); - *r.borrow_mut() += 10; - let x = r.borrow(); - let y = r.borrow(); - (*x + *y)/2 -} - -fn arc() -> Arc { - let a = Arc::new(42); - a -} - -fn true_assert() { - assert_eq!(1, 1); -} - -fn main() { - assert_eq!(*arc(), 42); - assert_eq!(rc_cell().get(), 84); - assert_eq!(rc_refcell(), 52); - true_assert(); -} From a87e9521029fdc319b5c86680d6645f411751943 Mon Sep 17 00:00:00 2001 From: Ralf Jung Date: Fri, 9 Nov 2018 10:53:28 +0100 Subject: [PATCH 07/15] Separate deref and access into different operations; add special exception for creating raw references --- src/lib.rs | 14 +- src/stacked_borrows.rs | 423 +++++++++--------- .../stacked_borrows/alias_through_mutation.rs | 2 +- .../stacked_borrows/illegal_read5.rs | 16 + .../stacked_borrows/illegal_write1.rs | 2 +- .../stacked_borrows/illegal_write3.rs | 2 +- .../stacked_borrows/illegal_write4.rs | 2 +- .../stacked_borrows/illegal_write5.rs | 2 +- .../stacked_borrows/load_invalid_mut.rs | 2 +- .../stacked_borrows/load_invalid_shr.rs | 4 +- .../stacked_borrows/pass_invalid_mut.rs | 2 +- .../stacked_borrows/pass_invalid_shr.rs | 6 +- .../stacked_borrows/return_invalid_mut.rs | 2 +- .../stacked_borrows/return_invalid_shr.rs | 4 +- tests/compiletest.rs | 4 +- tests/run-pass/refcell.rs | 17 + 16 files changed, 280 insertions(+), 224 deletions(-) create mode 100644 tests/compile-fail/stacked_borrows/illegal_read5.rs diff --git a/src/lib.rs b/src/lib.rs index e8691409b1..a0ed7c8e4f 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -308,16 +308,18 @@ impl<'a, 'mir, 'tcx> Machine<'a, 'mir, 'tcx> for Evaluator<'tcx> { // Some functions are whitelisted until we figure out how to fix them. // We walk up the stack a few frames to also cover their callees. - const WHITELIST: &[&str] = &[ + const WHITELIST: &[(&str, &str)] = &[ // Uses mem::uninitialized - "std::ptr::read", - "std::sys::windows::mutex::Mutex::", + ("std::ptr::read", ""), + ("std::sys::windows::mutex::Mutex::", ""), + // Should directly take a raw reference + (">", "::get"), ]; for frame in ecx.stack().iter() .rev().take(3) { let name = frame.instance.to_string(); - if WHITELIST.iter().any(|white| name.starts_with(white)) { + if WHITELIST.iter().any(|(prefix, suffix)| name.starts_with(prefix) && name.ends_with(suffix)) { return false; } } @@ -453,7 +455,9 @@ impl<'a, 'mir, 'tcx> Machine<'a, 'mir, 'tcx> for Evaluator<'tcx> { let (size, _) = ecx.size_and_align_of_mplace(place)? // for extern types, just cover what we can .unwrap_or_else(|| place.layout.size_and_align()); - if !ecx.machine.validate || size == Size::ZERO { + if !ecx.tcx.sess.opts.debugging_opts.mir_emit_retag || + !Self::enforce_validity(ecx) || size == Size::ZERO + { // No tracking Ok(place.ptr) } else { diff --git a/src/stacked_borrows.rs b/src/stacked_borrows.rs index 56688ca10f..ab536b5785 100644 --- a/src/stacked_borrows.rs +++ b/src/stacked_borrows.rs @@ -1,10 +1,10 @@ use std::cell::RefCell; use rustc::ty::{self, layout::Size}; -use rustc::hir; +use rustc::hir::{Mutability, MutMutable, MutImmutable}; use crate::{ - EvalResult, MiriEvalContext, HelpersEvalContextExt, + EvalResult, EvalErrorKind, MiriEvalContext, HelpersEvalContextExt, MemoryKind, MiriMemoryKind, RangeMap, AllocId, Allocation, AllocationExtra, Pointer, MemPlace, Scalar, Immediate, ImmTy, PlaceTy, MPlaceTy, }; @@ -27,7 +27,7 @@ pub enum Borrow { impl Borrow { #[inline(always)] - pub fn is_shr(self) -> bool { + pub fn is_shared(self) -> bool { match self { Borrow::Shr(_) => true, _ => false, @@ -35,7 +35,7 @@ impl Borrow { } #[inline(always)] - pub fn is_uniq(self) -> bool { + pub fn is_unique(self) -> bool { match self { Borrow::Uniq(_) => true, _ => false, @@ -96,27 +96,17 @@ impl Stack { } } -/// What kind of usage of the pointer are we talking about? +/// What kind of reference is being used? #[derive(Copy, Clone, Debug, Hash, PartialEq, Eq)] -pub enum UsageKind { - /// Write, or create &mut - Write, - /// Read, or create & - Read, - /// Create * (raw ptr) +pub enum RefKind { + /// &mut + Unique, + /// & without interior mutability + Frozen, + /// * (raw pointer) or & to `UnsafeCell` Raw, } -impl From> for UsageKind { - fn from(mutbl: Option) -> Self { - match mutbl { - None => UsageKind::Raw, - Some(hir::MutMutable) => UsageKind::Write, - Some(hir::MutImmutable) => UsageKind::Read, - } - } -} - /// Extra global machine state #[derive(Clone, Debug)] pub struct State { @@ -127,6 +117,12 @@ impl State { pub fn new() -> State { State { clock: 0 } } + + fn increment_clock(&mut self) -> Timestamp { + let val = self.clock; + self.clock = val + 1; + val + } } /// Extra per-allocation state @@ -136,52 +132,45 @@ pub struct Stacks { stacks: RefCell>, } -/// Core operations +/// Core per-location operations: deref, access, create. +/// We need to make at least the following things true: +/// +/// U1: After creating a Uniq, it is at the top (+unfrozen). +/// U2: If the top is Uniq (+unfrozen), accesses must be through that Uniq or pop it. +/// U3: If an access (deref sufficient?) happens with a Uniq, it requires the Uniq to be in the stack. +/// +/// F1: After creating a &, the parts outside `UnsafeCell` are frozen. +/// F2: If a write access happens, it unfreezes. +/// F3: If an access (well, a deref) happens with an & outside `UnsafeCell`, it requires the location to still be frozen. impl<'tcx> Stack { - /// Check if `bor` could be activated by unfreezing and popping. - /// `is_write` indicates whether this is being used to write (or, equivalently, to - /// borrow as &mut). - /// Returns `Err` if the answer is "no"; otherwise the return value indicates what to - /// do: With `Some(n)` you need to unfreeze, and then additionally pop `n` items. - fn reactivatable(&self, bor: Borrow, is_write: bool) -> Result, String> { - // Check if we can match the frozen "item". Not possible on writes! - if !is_write { - // For now, we do NOT check the timestamp. That might be surprising, but - // we cannot even notice when a location should be frozen but is not! - // Those checks are both done in `tag_dereference`, where we have type information. - // Either way, it is crucial that the frozen "item" matches raw pointers: - // Reading through a raw should not unfreeze. - match (self.frozen_since, bor) { - (Some(_), Borrow::Shr(_)) => { - return Ok(None) + /// Deref `bor`: Check if the location is frozen and the tag in the stack. + /// This dos *not* constitute an access! "Deref" refers to the `*` operator + /// in Rust, and includs cases like `&*x` or `(*x).foo` where no or only part + /// of the memory actually gets accessed. Also we cannot know if we are + /// going to read or write. + /// Returns the index of the item we matched, `None` if it was the frozen one. + /// `kind` indicates which kind of reference is being dereferenced. + fn deref(&self, bor: Borrow, kind: RefKind) -> Result, String> { + // Checks related to freezing + match bor { + Borrow::Shr(Some(bor_t)) if kind == RefKind::Frozen => { + // We need the location to be frozen. This ensures F3. + let frozen = self.frozen_since.map_or(false, |itm_t| itm_t <= bor_t); + return if frozen { Ok(None) } else { + Err(format!("Location is not frozen long enough")) } - _ => {}, } + Borrow::Shr(_) if self.frozen_since.is_some() => { + return Ok(None) // Shared deref to frozen location, looking good + } + _ => {} // Not sufficient, go on looking. } - // See if we can find this borrow. - for (idx, &itm) in self.borrows.iter().rev().enumerate() { - // Check borrow and stack item for compatibility. + // If we got here, we have to look for our item in the stack. + for (idx, &itm) in self.borrows.iter().enumerate().rev() { match (itm, bor) { - (BorStackItem::FnBarrier(_), _) => { - return Err(format!("Trying to reactivate a borrow ({:?}) that lives \ - behind a barrier", bor)) - } + (BorStackItem::FnBarrier(_), _) => break, (BorStackItem::Uniq(itm_t), Borrow::Uniq(bor_t)) if itm_t == bor_t => { - // Found matching unique item. This is *always* required to use a `Uniq`: - // The item must still be on the stack. - if !is_write { - // As a special case, if we are reading, let us see if it would be - // beneficial to pretend we are a raw pointer instead. If - // raw pointers are allowed to read while popping *less* than we - // would have to pop, there is no reason not to let them do this. - match self.reactivatable(Borrow::default(), is_write) { - // If we got something better (popping less) that `idx`, use that - Ok(None) => return Ok(None), - Ok(Some(shr_idx)) if shr_idx <= idx => return Ok(Some(shr_idx)), - // Otherwise just go on. - _ => {}, - } - } + // Found matching unique item. This satisfies U3. return Ok(Some(idx)) } (BorStackItem::Shr, Borrow::Shr(_)) => { @@ -192,154 +181,182 @@ impl<'tcx> Stack { _ => {} } } - // Nothing to be found. - Err(format!("Borrow-to-reactivate {:?} does not exist on the stack", bor)) + // If we got here, we did not find our item. We have to error to satisfy U3. + Err(format!( + "Borrow being dereferenced ({:?}) does not exist on the stack, or is guarded by a barrier", + bor + )) } - /// Reactive `bor` for this stack. `is_write` indicates whether this is being - /// used to write (or, equivalently, to borrow as &mut). - fn reactivate(&mut self, bor: Borrow, is_write: bool) -> EvalResult<'tcx> { - let mut pop = match self.reactivatable(bor, is_write) { - Ok(None) => return Ok(()), - Ok(Some(pop)) => pop, - Err(err) => return err!(MachineError(err)), - }; - // Pop what `reactivatable` told us to pop. Always unfreeze. + /// Perform an actual memory access using `bor`. We do not know any types here + /// or whether things should be frozen, but we *do* know if this is reading + /// or writing. + fn access(&mut self, bor: Borrow, is_write: bool) -> EvalResult<'tcx> { + // Check if we can match the frozen "item". + // Not possible on writes! if self.is_frozen() { - trace!("reactivate: Unfreezing"); + if !is_write { + // When we are frozen, we just accept all reads. No harm in this. + // The deref already checked that `Uniq` items are in the stack, and that + // the location is frozen if it should be. + return Ok(()); + } + trace!("access: Unfreezing"); } + // Unfreeze on writes. This ensures F2. self.frozen_since = None; - while pop > 0 { - let itm = self.borrows.pop().unwrap(); - trace!("reactivate: Popping {:?}", itm); - pop -= 1; + // Pop the stack until we have something matching. + while let Some(&itm) = self.borrows.last() { + match (itm, bor) { + (BorStackItem::FnBarrier(_), _) => break, + (BorStackItem::Uniq(itm_t), Borrow::Uniq(bor_t)) if itm_t == bor_t => { + // Found matching unique item. + return Ok(()) + } + (BorStackItem::Shr, _) if !is_write => { + // When reading, everything can use a shared item! + // We do not want to do this when writing: Writing to an `&mut` + // should reaffirm its exclusivity (i.e., make sure it is + // on top of the stack). + return Ok(()) + } + (BorStackItem::Shr, Borrow::Shr(_)) => { + // Found matching shared item. + return Ok(()) + } + _ => { + // Pop this. This ensures U2. + let itm = self.borrows.pop().unwrap(); + trace!("access: Popping {:?}", itm); + } + } } - Ok(()) + // If we got here, we did not find our item. + err!(MachineError(format!( + "Borrow being accessed ({:?}) does not exist on the stack, or is guarded by a barrier", + bor + ))) } /// Initiate `bor`; mostly this means pushing. /// This operation cannot fail; it is up to the caller to ensure that the precondition /// is met: We cannot push `Uniq` onto frozen stacks. - /// Crucially, this makes pushing a `Shr` onto a frozen location a NOP. We do not want - /// such a location to get mutably shared this way! - fn initiate(&mut self, bor: Borrow) { - if let Some(_) = self.frozen_since { + /// `kind` indicates which kind of reference is being created. + fn create(&mut self, bor: Borrow, kind: RefKind) { + // First, push the item. We do this even if we will later freeze, because we + // will allow mutation of shared data at the expense of unfreezing. + if let Some(itm_t) = self.frozen_since { // A frozen location, we won't change anything here! match bor { Borrow::Uniq(_) => bug!("Trying to create unique ref to frozen location"), - Borrow::Shr(_) => trace!("initiate: New shared ref to frozen location is a NOP"), + Borrow::Shr(Some(bor_t)) if kind == RefKind::Frozen => { + // Make sure we are frozen long enough. This is part 1 of ensuring F1. + assert!(itm_t <= bor_t, "Trying to freeze shorter than it was frozen?"); + trace!("create: Freezing a frozen location is a NOP"); + } + Borrow::Shr(_) => trace!("create: Sharing a frozen location is a NOP"), } } else { - // Just push. + // First push. let itm = match bor { Borrow::Uniq(t) => BorStackItem::Uniq(t), - Borrow::Shr(_) if *self.borrows.last().unwrap() == BorStackItem::Shr => { - // Optimization: Don't push a Shr onto a Shr. - trace!("initiate: New shared ref to already shared location is a NOP"); - return - }, Borrow::Shr(_) => BorStackItem::Shr, }; - trace!("initiate: Pushing {:?}", itm); - self.borrows.push(itm) - } - } - - /// Check if this location is "frozen enough". - fn check_frozen(&self, bor_t: Timestamp) -> EvalResult<'tcx> { - let frozen = self.frozen_since.map_or(false, |itm_t| itm_t <= bor_t); - if !frozen { - err!(MachineError(format!("Location is not frozen long enough"))) - } else { - Ok(()) - } - } - - /// Freeze this location, since `bor_t`. - fn freeze(&mut self, bor_t: Timestamp) { - if let Some(itm_t) = self.frozen_since { - assert!(itm_t <= bor_t, "Trying to freeze shorter than it was frozen?"); - } else { - trace!("Freezing"); - self.frozen_since = Some(bor_t); + if *self.borrows.last().unwrap() == itm { + assert!(bor.is_shared()); + trace!("create: Sharing a shared location is a NOP"); + } else { + // This ensures U1. + trace!("create: Pushing {:?}", itm); + self.borrows.push(itm); + } + // Now, maybe freeze. This is part 2 of ensuring F1. + if kind == RefKind::Frozen { + let bor_t = match bor { + Borrow::Shr(Some(t)) => t, + _ => bug!("Creating illegal borrow {:?} for frozen ref", bor), + }; + trace!("create: Freezing"); + self.frozen_since = Some(bor_t); + } } } } -impl State { - fn increment_clock(&mut self) -> Timestamp { - let val = self.clock; - self.clock = val + 1; - val - } -} - -/// Higher-level operations +/// Higher-level per-location operations: deref, access, reborrow. impl<'tcx> Stacks { - /// `ptr` got used, reflect that in the stack. - fn reactivate( + /// Check that this stack is fine with being dereferenced + fn deref( &self, ptr: Pointer, size: Size, - usage: UsageKind, + kind: RefKind, ) -> EvalResult<'tcx> { - trace!("use_borrow of tag {:?} as {:?}: {:?}, size {}", - ptr.tag, usage, ptr, size.bytes()); + trace!("deref for tag {:?} as {:?}: {:?}, size {}", + ptr.tag, kind, ptr, size.bytes()); let mut stacks = self.stacks.borrow_mut(); + // We need `iter_mut` because `iter` would skip gaps! for stack in stacks.iter_mut(ptr.offset, size) { - stack.reactivate(ptr.tag, usage == UsageKind::Write)?; + stack.deref(ptr.tag, kind).map_err(EvalErrorKind::MachineError)?; } Ok(()) } - /// Create a new borrow, the ptr must already have the new tag. - /// Also freezes the location if `freeze` is set and the tag is a timestamped `Shr`. - fn initiate( + /// `ptr` got used, reflect that in the stack. + fn access( &self, ptr: Pointer, size: Size, - freeze: bool, - ) { - trace!("reborrow for tag {:?}: {:?}, size {}", + is_write: bool, + ) -> EvalResult<'tcx> { + trace!("{} access of tag {:?}: {:?}, size {}", + if is_write { "read" } else { "write" }, ptr.tag, ptr, size.bytes()); let mut stacks = self.stacks.borrow_mut(); for stack in stacks.iter_mut(ptr.offset, size) { - stack.initiate(ptr.tag); - if freeze { - if let Borrow::Shr(Some(bor_t)) = ptr.tag { - stack.freeze(bor_t); - } - } + stack.access(ptr.tag, is_write)?; } + Ok(()) } - /// Check that this stack is fine with being dereferenced - fn check_deref( + /// Reborrow the given pointer to the new tag for the given kind of reference. + fn reborrow( &self, ptr: Pointer, size: Size, - frozen: bool, + new_bor: Borrow, + new_kind: RefKind, ) -> EvalResult<'tcx> { + trace!("reborrow for tag {:?} to {:?} as {:?}: {:?}, size {}", + ptr.tag, new_bor, new_kind, ptr, size.bytes()); let mut stacks = self.stacks.borrow_mut(); - // We need `iter_mut` because `iter` would skip gaps! for stack in stacks.iter_mut(ptr.offset, size) { - // Conservatively assume we will just read - if let Err(err) = stack.reactivatable(ptr.tag, /*is_write*/false) { - return err!(MachineError(format!( - "Encountered reference with non-reactivatable tag: {}", - err - ))) - } - // Sometimes we also need to be frozen. - // In this case we *both* push `Shr` and then freeze. This means that a `&mut` - // to `*const` to `*mut` cast through `&` actually works. - if frozen { - // Even shared refs can have uniq tags (after transmute). That's not an error - // but they do not get any freezing benefits. - if let Borrow::Shr(Some(bor_t)) = ptr.tag { - stack.check_frozen(bor_t)?; + // Access source `ptr`, create new ref. + let ptr_idx = stack.deref(ptr.tag, new_kind).map_err(EvalErrorKind::MachineError)?; + if new_kind == RefKind::Raw { + assert!(new_bor.is_shared()); + // Raw references do not get quite as many guarantees as the other kinds: + // If we can deref the new tag already, and if that tag lives higher on + // the stack than the one we come from, just use that. + // IOW, we check if `new_bor` *already* is "derived from" `ptr.tag`. + match (ptr_idx, stack.deref(new_bor, new_kind)) { + // If the new borrow works with the forzen item, or else if it lives + // above the old one in the stack, our job here is done. + (_, Ok(None)) => { + trace!("reborrow-to-raw on a frozen location is a NOP"); + continue + }, + (Some(ptr_idx), Ok(Some(new_idx))) if new_idx >= ptr_idx => { + trace!("reborrow-to-raw is a NOP because the src ptr already got reborrowed-to-raw"); + continue + }, + _ => {}, } } + // Non-raw reborrows should behave exactly as if we also did a + // read/write to the given location. + stack.access(ptr.tag, new_kind == RefKind::Unique)?; + stack.create(new_bor, new_kind); } Ok(()) } @@ -353,8 +370,7 @@ impl AllocationExtra for Stacks { ptr: Pointer, size: Size, ) -> EvalResult<'tcx> { - // Reads behave exactly like the first half of a reborrow-to-shr - alloc.extra.reactivate(ptr, size, UsageKind::Read) + alloc.extra.access(ptr, size, /*is_write*/false) } #[inline(always)] @@ -363,8 +379,7 @@ impl AllocationExtra for Stacks { ptr: Pointer, size: Size, ) -> EvalResult<'tcx> { - // Writes behave exactly like the first half of a reborrow-to-mut - alloc.extra.reactivate(ptr, size, UsageKind::Read) + alloc.extra.access(ptr, size, /*is_write*/true) } #[inline(always)] @@ -374,7 +389,7 @@ impl AllocationExtra for Stacks { size: Size, ) -> EvalResult<'tcx> { // This is like mutating - alloc.extra.reactivate(ptr, size, UsageKind::Write) + alloc.extra.access(ptr, size, /*is_write*/true) // FIXME: Error out of there are any barriers? } } @@ -402,7 +417,7 @@ pub trait EvalContextExt<'tcx> { &self, place: MPlaceTy<'tcx, Borrow>, size: Size, - usage: UsageKind, + mutability: Option, ) -> EvalResult<'tcx, Borrow>; fn tag_new_allocation( @@ -412,10 +427,10 @@ pub trait EvalContextExt<'tcx> { ) -> Borrow; /// Retag an indidual pointer, returning the retagged version. - fn retag_ptr( + fn reborrow( &mut self, ptr: ImmTy<'tcx, Borrow>, - mutbl: hir::Mutability, + mutbl: Mutability, ) -> EvalResult<'tcx, Immediate>; fn retag( @@ -461,7 +476,7 @@ impl<'a, 'mir, 'tcx> EvalContextExt<'tcx> for MiriEvalContext<'a, 'mir, 'tcx> { Borrow::Uniq(time) } - /// Called for value-to-place conversion. + /// Called for value-to-place conversion. `mutability` is `None` for raw pointers. /// /// Note that this does NOT mean that all this memory will actually get accessed/referenced! /// We could be in the middle of `&(*var).1`. @@ -469,38 +484,41 @@ impl<'a, 'mir, 'tcx> EvalContextExt<'tcx> for MiriEvalContext<'a, 'mir, 'tcx> { &self, place: MPlaceTy<'tcx, Borrow>, size: Size, - usage: UsageKind, + mutability: Option, ) -> EvalResult<'tcx, Borrow> { - trace!("tag_dereference: Accessing reference ({:?}) for {:?} (pointee {})", - usage, place.ptr, place.layout.ty); + trace!("tag_dereference: Accessing {} reference for {:?} (pointee {})", + if let Some(mutability) = mutability { format!("{:?}", mutability) } else { format!("raw") }, + place.ptr, place.layout.ty); let ptr = place.ptr.to_ptr()?; // In principle we should not have to do anything here. However, with transmutes involved, - // it can happen that the tag of `ptr` does not actually match `usage`, and we + // it can happen that the tag of `ptr` does not actually match `mutability`, and we // should adjust for that. // Notably, the compiler can introduce such transmutes by optimizing away `&[mut]*`. // That can transmute a raw ptr to a (shared/mut) ref, and a mut ref to a shared one. - match (usage, ptr.tag) { - (UsageKind::Raw, _) => { + match (mutability, ptr.tag) { + (None, _) => { // Don't use the tag, this is a raw access! They should happen tagless. + // This is needed for `*mut` to make any sense: Writes *do* enforce the + // `Uniq` tag to be up top, but we must make sure raw writes do not do that. // This does mean, however, that `&*foo` is *not* a NOP *if* `foo` is a raw ptr. // Also don't do any further validation, this is raw after all. return Ok(Borrow::default()); } - (UsageKind::Write, Borrow::Uniq(_)) | - (UsageKind::Read, Borrow::Shr(_)) => { + (Some(MutMutable), Borrow::Uniq(_)) | + (Some(MutImmutable), Borrow::Shr(_)) => { // Expected combinations. Nothing to do. } - (UsageKind::Write, Borrow::Shr(None)) => { + (Some(MutMutable), Borrow::Shr(None)) => { // Raw transmuted to mut ref. Keep this as raw access. // We cannot reborrow here; there might be a raw in `&(*var).1` where // `var` is an `&mut`. The other field of the struct might be already frozen, // also using `var`, and that would be okay. } - (UsageKind::Read, Borrow::Uniq(_)) => { + (Some(MutImmutable), Borrow::Uniq(_)) => { // A mut got transmuted to shr. Can happen even from compiler transformations: // `&*x` gets optimized to `x` even when `x` is a `&mut`. } - (UsageKind::Write, Borrow::Shr(Some(_))) => { + (Some(MutMutable), Borrow::Shr(Some(_))) => { // This is just invalid: A shr got transmuted to a mut. // If we ever allow this, we have to consider what we do when a turn a // `Raw`-tagged `&mut` into a raw pointer pointing to a frozen location. @@ -515,13 +533,16 @@ impl<'a, 'mir, 'tcx> EvalContextExt<'tcx> for MiriEvalContext<'a, 'mir, 'tcx> { let alloc = self.memory().get(ptr.alloc_id).expect("We checked that the ptr is fine!"); // If we got here, we do some checking, *but* we leave the tag unchanged. if let Borrow::Shr(Some(_)) = ptr.tag { + assert_eq!(mutability, Some(MutImmutable)); // We need a frozen-sensitive check self.visit_freeze_sensitive(place, size, |cur_ptr, size, frozen| { - alloc.extra.check_deref(cur_ptr, size, frozen) + let kind = if frozen { RefKind::Frozen } else { RefKind::Raw }; + alloc.extra.deref(cur_ptr, size, kind) })?; } else { // Just treat this as one big chunk - alloc.extra.check_deref(ptr, size, /*frozen*/false)?; + let kind = if mutability == Some(MutMutable) { RefKind::Unique } else { RefKind::Raw }; + alloc.extra.deref(ptr, size, kind)?; } // All is good, and do not change the tag @@ -534,23 +555,20 @@ impl<'a, 'mir, 'tcx> EvalContextExt<'tcx> for MiriEvalContext<'a, 'mir, 'tcx> { place: MPlaceTy<'tcx, Borrow>, size: Size, ) -> EvalResult<'tcx> { - trace!("self: {:?} is now accessible by raw pointers", *place); + trace!("escape_to_raw: {:?} is now accessible by raw pointers", *place); // Get the allocation - let mut ptr = place.ptr.to_ptr()?; + let ptr = place.ptr.to_ptr()?; self.memory().check_bounds(ptr, size, false)?; // `ptr_dereference` wouldn't do any checks if this is a raw ptr let alloc = self.memory().get(ptr.alloc_id).expect("We checked that the ptr is fine!"); // Re-borrow to raw. This is a NOP for shared borrows, but we do not know the borrow // type here and that's also okay. Freezing does not matter here. - alloc.extra.reactivate(ptr, size, UsageKind::Raw)?; - ptr.tag = Borrow::default(); - alloc.extra.initiate(ptr, size, /*freeze*/false); - Ok(()) + alloc.extra.reborrow(ptr, size, Borrow::default(), RefKind::Raw) } - fn retag_ptr( + fn reborrow( &mut self, val: ImmTy<'tcx, Borrow>, - mutbl: hir::Mutability, + mutbl: Mutability, ) -> EvalResult<'tcx, Immediate> { // We want a place for where the ptr *points to*, so we get one. let place = self.ref_to_mplace(val)?; @@ -566,30 +584,29 @@ impl<'a, 'mir, 'tcx> EvalContextExt<'tcx> for MiriEvalContext<'a, 'mir, 'tcx> { let ptr = place.ptr.to_ptr()?; let time = self.machine.stacked_borrows.increment_clock(); let new_bor = match mutbl { - hir::MutMutable => Borrow::Uniq(time), - hir::MutImmutable => Borrow::Shr(Some(time)), + MutMutable => Borrow::Uniq(time), + MutImmutable => Borrow::Shr(Some(time)), }; - let new_ptr = Pointer::new_with_tag(ptr.alloc_id, ptr.offset, new_bor); - trace!("retag: Creating new reference ({:?}) for {:?} (pointee {}): {:?}", + trace!("reborrow: Creating new {:?} reference for {:?} (pointee {}): {:?}", mutbl, ptr, place.layout.ty, new_bor); - // Get the allocation - self.memory().check_bounds(ptr, size, false)?; // `ptr_dereference` wouldn't do any checks if this is a raw ptr + // Get the allocation. It might not be mutable, so we cannot use `get_mut`. + self.memory().check_bounds(ptr, size, false)?; let alloc = self.memory().get(ptr.alloc_id).expect("We checked that the ptr is fine!"); - // Update the stacks. First use old borrow, then initiate new one. - alloc.extra.reactivate(ptr, size, Some(mutbl).into())?; - if mutbl == hir::MutImmutable { - // We need a frozen-sensitive initiate - self.visit_freeze_sensitive(place, size, |mut cur_ptr, size, frozen| { - cur_ptr.tag = new_bor; - Ok(alloc.extra.initiate(cur_ptr, size, frozen)) + // Update the stacks. + if mutbl == MutImmutable { + // Shared reference. We need a frozen-sensitive reborrow. + self.visit_freeze_sensitive(place, size, |cur_ptr, size, frozen| { + let kind = if frozen { RefKind::Frozen } else { RefKind::Raw }; + alloc.extra.reborrow(cur_ptr, size, new_bor, kind) })?; } else { - // Just treat this as one big chunk - alloc.extra.initiate(new_ptr, size, /*frozen*/false); + // Mutable reference. Just treat this as one big chunk. + alloc.extra.reborrow(ptr, size, new_bor, RefKind::Unique)?; } // Return new ptr + let new_ptr = Pointer::new_with_tag(ptr.alloc_id, ptr.offset, new_bor); let new_place = MemPlace { ptr: Scalar::Ptr(new_ptr), ..*place }; Ok(new_place.to_ref()) } @@ -608,7 +625,7 @@ impl<'a, 'mir, 'tcx> EvalContextExt<'tcx> for MiriEvalContext<'a, 'mir, 'tcx> { }; // Retag the pointer and write it back. let val = self.read_immediate(self.place_to_op(place)?)?; - let val = self.retag_ptr(val, mutbl)?; + let val = self.reborrow(val, mutbl)?; self.write_immediate(val, place)?; Ok(()) } diff --git a/tests/compile-fail/stacked_borrows/alias_through_mutation.rs b/tests/compile-fail/stacked_borrows/alias_through_mutation.rs index 0b2d459366..092f3f09ed 100644 --- a/tests/compile-fail/stacked_borrows/alias_through_mutation.rs +++ b/tests/compile-fail/stacked_borrows/alias_through_mutation.rs @@ -11,5 +11,5 @@ fn main() { retarget(&mut target_alias, target); // now `target_alias` points to the same thing as `target` *target = 13; - let _val = *target_alias; //~ ERROR reference with non-reactivatable tag + let _val = *target_alias; //~ ERROR does not exist on the stack } diff --git a/tests/compile-fail/stacked_borrows/illegal_read5.rs b/tests/compile-fail/stacked_borrows/illegal_read5.rs new file mode 100644 index 0000000000..863649a47b --- /dev/null +++ b/tests/compile-fail/stacked_borrows/illegal_read5.rs @@ -0,0 +1,16 @@ +// We *can* have aliasing &RefCell and &mut T, but we cannot read through the former. +// Else we couldn't optimize based on the assumption that `xref` below is truly unique. + +use std::cell::RefCell; +use std::{mem, ptr}; + +fn main() { + let rc = RefCell::new(0); + let mut refmut = rc.borrow_mut(); + let xref: &mut i32 = &mut *refmut; + let xshr = &rc; // creating this is okay + let _val = *xref; // we can even still use our mutable reference + mem::forget(unsafe { ptr::read(xshr) }); // but after reading through the shared ref + let _val = *xref; // the mutable one is dead and gone + //~^ ERROR does not exist on the stack +} diff --git a/tests/compile-fail/stacked_borrows/illegal_write1.rs b/tests/compile-fail/stacked_borrows/illegal_write1.rs index 7378907fa7..b106cc8dc4 100644 --- a/tests/compile-fail/stacked_borrows/illegal_write1.rs +++ b/tests/compile-fail/stacked_borrows/illegal_write1.rs @@ -8,5 +8,5 @@ fn main() { let target = Box::new(42); // has an implicit raw let ref_ = &*target; evil(ref_); // invalidates shared ref, activates raw - let _x = *ref_; //~ ERROR is not frozen long enough + let _x = *ref_; //~ ERROR is not frozen } diff --git a/tests/compile-fail/stacked_borrows/illegal_write3.rs b/tests/compile-fail/stacked_borrows/illegal_write3.rs index c82da1e9c4..01559af21e 100644 --- a/tests/compile-fail/stacked_borrows/illegal_write3.rs +++ b/tests/compile-fail/stacked_borrows/illegal_write3.rs @@ -4,5 +4,5 @@ fn main() { let r#ref = ⌖ // freeze let ptr = r#ref as *const _ as *mut _; // raw ptr, with raw tag unsafe { *ptr = 42; } - let _val = *r#ref; //~ ERROR is not frozen long enough + let _val = *r#ref; //~ ERROR is not frozen } diff --git a/tests/compile-fail/stacked_borrows/illegal_write4.rs b/tests/compile-fail/stacked_borrows/illegal_write4.rs index 49bf9279fa..37ae0f055f 100644 --- a/tests/compile-fail/stacked_borrows/illegal_write4.rs +++ b/tests/compile-fail/stacked_borrows/illegal_write4.rs @@ -9,5 +9,5 @@ fn main() { let ptr = reference as *const _ as *mut i32; // raw ptr, with raw tag let _mut_ref: &mut i32 = unsafe { mem::transmute(ptr) }; // &mut, with raw tag // Now we retag, making our ref top-of-stack -- and, in particular, unfreezing. - let _val = *reference; //~ ERROR is not frozen long enough + let _val = *reference; //~ ERROR is not frozen } diff --git a/tests/compile-fail/stacked_borrows/illegal_write5.rs b/tests/compile-fail/stacked_borrows/illegal_write5.rs index f4704ad571..57b2ca87d8 100644 --- a/tests/compile-fail/stacked_borrows/illegal_write5.rs +++ b/tests/compile-fail/stacked_borrows/illegal_write5.rs @@ -7,7 +7,7 @@ fn main() { let xref = unsafe { &mut *xraw }; // derived from raw, so using raw is still okay... callee(xraw); let _val = *xref; // ...but any use of raw will invalidate our ref. - //~^ ERROR: reference with non-reactivatable tag + //~^ ERROR: does not exist on the stack } fn callee(xraw: *mut i32) { diff --git a/tests/compile-fail/stacked_borrows/load_invalid_mut.rs b/tests/compile-fail/stacked_borrows/load_invalid_mut.rs index d3db462343..98b9451eda 100644 --- a/tests/compile-fail/stacked_borrows/load_invalid_mut.rs +++ b/tests/compile-fail/stacked_borrows/load_invalid_mut.rs @@ -4,6 +4,6 @@ fn main() { let xraw = x as *mut _; let xref = unsafe { &mut *xraw }; let xref_in_mem = Box::new(xref); - let _val = *x; // invalidate xraw + let _val = unsafe { *xraw }; // invalidate xref let _val = *xref_in_mem; //~ ERROR does not exist on the stack } diff --git a/tests/compile-fail/stacked_borrows/load_invalid_shr.rs b/tests/compile-fail/stacked_borrows/load_invalid_shr.rs index 71b578817a..6599924f0f 100644 --- a/tests/compile-fail/stacked_borrows/load_invalid_shr.rs +++ b/tests/compile-fail/stacked_borrows/load_invalid_shr.rs @@ -4,6 +4,6 @@ fn main() { let xraw = x as *mut _; let xref = unsafe { &*xraw }; let xref_in_mem = Box::new(xref); - *x = 42; // invalidate xraw - let _val = *xref_in_mem; //~ ERROR does not exist on the stack + unsafe { *xraw = 42 }; // unfreeze + let _val = *xref_in_mem; //~ ERROR is not frozen } diff --git a/tests/compile-fail/stacked_borrows/pass_invalid_mut.rs b/tests/compile-fail/stacked_borrows/pass_invalid_mut.rs index 41cf89d874..28288c6c63 100644 --- a/tests/compile-fail/stacked_borrows/pass_invalid_mut.rs +++ b/tests/compile-fail/stacked_borrows/pass_invalid_mut.rs @@ -5,6 +5,6 @@ fn main() { let x = &mut 42; let xraw = x as *mut _; let xref = unsafe { &mut *xraw }; - let _val = *x; // invalidate xraw + let _val = unsafe { *xraw }; // invalidate xref foo(xref); //~ ERROR does not exist on the stack } diff --git a/tests/compile-fail/stacked_borrows/pass_invalid_shr.rs b/tests/compile-fail/stacked_borrows/pass_invalid_shr.rs index 0bdb1b4a41..67bbc88e40 100644 --- a/tests/compile-fail/stacked_borrows/pass_invalid_shr.rs +++ b/tests/compile-fail/stacked_borrows/pass_invalid_shr.rs @@ -3,8 +3,8 @@ fn foo(_: &i32) {} fn main() { let x = &mut 42; - let xraw = &*x as *const _; + let xraw = &*x as *const _ as *mut _; let xref = unsafe { &*xraw }; - *x = 42; // invalidate xraw - foo(xref); //~ ERROR does not exist on the stack + unsafe { *xraw = 42 }; // unfreeze + foo(xref); //~ ERROR is not frozen } diff --git a/tests/compile-fail/stacked_borrows/return_invalid_mut.rs b/tests/compile-fail/stacked_borrows/return_invalid_mut.rs index aef8fafdf5..e7f0b9bc9d 100644 --- a/tests/compile-fail/stacked_borrows/return_invalid_mut.rs +++ b/tests/compile-fail/stacked_borrows/return_invalid_mut.rs @@ -2,7 +2,7 @@ fn foo(x: &mut (i32, i32)) -> &mut i32 { let xraw = x as *mut (i32, i32); let ret = unsafe { &mut (*xraw).1 }; - let _val = *x; // invalidate xraw and its children + let _val = unsafe { *xraw }; // invalidate xref ret //~ ERROR does not exist on the stack } diff --git a/tests/compile-fail/stacked_borrows/return_invalid_shr.rs b/tests/compile-fail/stacked_borrows/return_invalid_shr.rs index 074942eb95..986dd18b2e 100644 --- a/tests/compile-fail/stacked_borrows/return_invalid_shr.rs +++ b/tests/compile-fail/stacked_borrows/return_invalid_shr.rs @@ -2,8 +2,8 @@ fn foo(x: &mut (i32, i32)) -> &i32 { let xraw = x as *mut (i32, i32); let ret = unsafe { &(*xraw).1 }; - x.1 = 42; // invalidate xraw on the 2nd field - ret //~ ERROR does not exist on the stack + unsafe { *xraw = (42, 23) }; // unfreeze + ret //~ ERROR is not frozen } fn main() { diff --git a/tests/compiletest.rs b/tests/compiletest.rs index 55eaa7bfba..f393fcb2c2 100644 --- a/tests/compiletest.rs +++ b/tests/compiletest.rs @@ -99,7 +99,9 @@ fn miri_pass(sysroot: &Path, path: &str, target: &str, host: &str, need_fullmir: flags.push(format!("--sysroot {}", sysroot.display())); flags.push("-Dwarnings -Dunused".to_owned()); // overwrite the -Aunused in compiletest-rs if opt { - flags.push("-Zmir-opt-level=3".to_owned()); + // FIXME: We use opt level 1 because MIR inlining defeats the validation + // whitelist. + flags.push("-Zmir-opt-level=1".to_owned()); } let mut config = mk_config("ui"); diff --git a/tests/run-pass/refcell.rs b/tests/run-pass/refcell.rs index 52dcdbd36d..5f2f3523b9 100644 --- a/tests/run-pass/refcell.rs +++ b/tests/run-pass/refcell.rs @@ -32,6 +32,23 @@ fn lots_of_funny_borrows() { } } +fn aliasing_mut_and_shr() { + fn inner(rc: &RefCell, aliasing: &mut i32) { + *aliasing += 4; + let _escape_to_raw = rc as *const _; + *aliasing += 4; + let _shr = &*rc; + *aliasing += 4; + } + + let rc = RefCell::new(23); + let mut bmut = rc.borrow_mut(); + inner(&rc, &mut *bmut); + drop(bmut); + assert_eq!(*rc.borrow(), 23+12); +} + fn main() { lots_of_funny_borrows(); + aliasing_mut_and_shr(); } From 5a801c0dc1620948108e23ab9c75b75137eb7691 Mon Sep 17 00:00:00 2001 From: Ralf Jung Date: Mon, 12 Nov 2018 16:18:02 +0100 Subject: [PATCH 08/15] adjust comment --- src/stacked_borrows.rs | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/src/stacked_borrows.rs b/src/stacked_borrows.rs index ab536b5785..f5ae7cb933 100644 --- a/src/stacked_borrows.rs +++ b/src/stacked_borrows.rs @@ -509,10 +509,8 @@ impl<'a, 'mir, 'tcx> EvalContextExt<'tcx> for MiriEvalContext<'a, 'mir, 'tcx> { // Expected combinations. Nothing to do. } (Some(MutMutable), Borrow::Shr(None)) => { - // Raw transmuted to mut ref. Keep this as raw access. - // We cannot reborrow here; there might be a raw in `&(*var).1` where - // `var` is an `&mut`. The other field of the struct might be already frozen, - // also using `var`, and that would be okay. + // Raw transmuted to mut ref. This is something real unsafe code does. + // We cannot reborrow here because we do not want to mutate state on a deref. } (Some(MutImmutable), Borrow::Uniq(_)) => { // A mut got transmuted to shr. Can happen even from compiler transformations: From ba8eb7608ea6520238a1d587197a795e9dc7147a Mon Sep 17 00:00:00 2001 From: Ralf Jung Date: Mon, 12 Nov 2018 19:30:35 +0100 Subject: [PATCH 09/15] add an interesting demo for &mut being unique --- .../mut_exclusive_violation1.rs | 29 +++++++++++++++++++ 1 file changed, 29 insertions(+) create mode 100644 tests/compile-fail/stacked_borrows/mut_exclusive_violation1.rs diff --git a/tests/compile-fail/stacked_borrows/mut_exclusive_violation1.rs b/tests/compile-fail/stacked_borrows/mut_exclusive_violation1.rs new file mode 100644 index 0000000000..255e35b145 --- /dev/null +++ b/tests/compile-fail/stacked_borrows/mut_exclusive_violation1.rs @@ -0,0 +1,29 @@ +fn demo_mut_advanced_unique(our: &mut i32) -> i32 { + unknown_code_1(&*our); + + // This "re-asserts" uniqueness of the reference: After writing, we know + // our tag is at the top of the stack. + *our = 5; + + unknown_code_2(); + + // We know this will return 5 + *our +} + +// Now comes the evil context +use std::ptr; + +static mut LEAK: *mut i32 = ptr::null_mut(); + +fn unknown_code_1(x: &i32) { unsafe { + LEAK = x as *const _ as *mut _; +} } + +fn unknown_code_2() { unsafe { + *LEAK = 7; //~ ERROR does not exist on the stack +} } + +fn main() { + assert_eq!(demo_mut_advanced_unique(&mut 0), 5); +} From c234009fddf3a58c486a06f1c40980149df114f7 Mon Sep 17 00:00:00 2001 From: Ralf Jung Date: Tue, 13 Nov 2018 13:39:03 +0100 Subject: [PATCH 10/15] generalize reborrow-to-raw exception to a general redundancy check --- src/stacked_borrows.rs | 38 +++++++++++++++++--------------------- 1 file changed, 17 insertions(+), 21 deletions(-) diff --git a/src/stacked_borrows.rs b/src/stacked_borrows.rs index f5ae7cb933..d2abfbc9df 100644 --- a/src/stacked_borrows.rs +++ b/src/stacked_borrows.rs @@ -333,28 +333,24 @@ impl<'tcx> Stacks { for stack in stacks.iter_mut(ptr.offset, size) { // Access source `ptr`, create new ref. let ptr_idx = stack.deref(ptr.tag, new_kind).map_err(EvalErrorKind::MachineError)?; - if new_kind == RefKind::Raw { - assert!(new_bor.is_shared()); - // Raw references do not get quite as many guarantees as the other kinds: - // If we can deref the new tag already, and if that tag lives higher on - // the stack than the one we come from, just use that. - // IOW, we check if `new_bor` *already* is "derived from" `ptr.tag`. - match (ptr_idx, stack.deref(new_bor, new_kind)) { - // If the new borrow works with the forzen item, or else if it lives - // above the old one in the stack, our job here is done. - (_, Ok(None)) => { - trace!("reborrow-to-raw on a frozen location is a NOP"); - continue - }, - (Some(ptr_idx), Ok(Some(new_idx))) if new_idx >= ptr_idx => { - trace!("reborrow-to-raw is a NOP because the src ptr already got reborrowed-to-raw"); - continue - }, - _ => {}, - } + // If we can deref the new tag already, and if that tag lives higher on + // the stack than the one we come from, just use that. + // IOW, we check if `new_bor` *already* is "derived from" `ptr.tag`. + // This also checks frozenness, if required. + let bor_already_happened = match (ptr_idx, stack.deref(new_bor, new_kind)) { + // If the new borrow works with the frozen item, or else if it lives + // above the old one in the stack, our job here is done. + (_, Ok(None)) => true, + (Some(ptr_idx), Ok(Some(new_idx))) if new_idx >= ptr_idx => true, + // Otherwise we need to create a new borrow. + _ => false, + }; + if bor_already_happened { + assert!(new_bor.is_shared(), "A unique reborrow can never be redundant"); + trace!("Reborrow is a NOP"); + continue; } - // Non-raw reborrows should behave exactly as if we also did a - // read/write to the given location. + // We need to do some actual work. stack.access(ptr.tag, new_kind == RefKind::Unique)?; stack.create(new_bor, new_kind); } From f521fd5e0fbaa54d5a07f61a794a91a0576ff557 Mon Sep 17 00:00:00 2001 From: Ralf Jung Date: Tue, 13 Nov 2018 14:49:04 +0100 Subject: [PATCH 11/15] let's call this a redundant reborrow --- src/stacked_borrows.rs | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/stacked_borrows.rs b/src/stacked_borrows.rs index d2abfbc9df..8c40903016 100644 --- a/src/stacked_borrows.rs +++ b/src/stacked_borrows.rs @@ -337,7 +337,7 @@ impl<'tcx> Stacks { // the stack than the one we come from, just use that. // IOW, we check if `new_bor` *already* is "derived from" `ptr.tag`. // This also checks frozenness, if required. - let bor_already_happened = match (ptr_idx, stack.deref(new_bor, new_kind)) { + let bor_redundant = match (ptr_idx, stack.deref(new_bor, new_kind)) { // If the new borrow works with the frozen item, or else if it lives // above the old one in the stack, our job here is done. (_, Ok(None)) => true, @@ -345,9 +345,9 @@ impl<'tcx> Stacks { // Otherwise we need to create a new borrow. _ => false, }; - if bor_already_happened { + if bor_redundant { assert!(new_bor.is_shared(), "A unique reborrow can never be redundant"); - trace!("Reborrow is a NOP"); + trace!("reborrow is redundant"); continue; } // We need to do some actual work. From cf1746222ee01eefb8c076fd48f05755b4cdc81a Mon Sep 17 00:00:00 2001 From: Ralf Jung Date: Tue, 13 Nov 2018 17:05:47 +0100 Subject: [PATCH 12/15] we no longer even try pushing to a frozen location --- src/stacked_borrows.rs | 55 ++++++++++++++++++------------------------ 1 file changed, 23 insertions(+), 32 deletions(-) diff --git a/src/stacked_borrows.rs b/src/stacked_borrows.rs index 8c40903016..3790b5d13f 100644 --- a/src/stacked_borrows.rs +++ b/src/stacked_borrows.rs @@ -245,40 +245,31 @@ impl<'tcx> Stack { fn create(&mut self, bor: Borrow, kind: RefKind) { // First, push the item. We do this even if we will later freeze, because we // will allow mutation of shared data at the expense of unfreezing. - if let Some(itm_t) = self.frozen_since { - // A frozen location, we won't change anything here! - match bor { - Borrow::Uniq(_) => bug!("Trying to create unique ref to frozen location"), - Borrow::Shr(Some(bor_t)) if kind == RefKind::Frozen => { - // Make sure we are frozen long enough. This is part 1 of ensuring F1. - assert!(itm_t <= bor_t, "Trying to freeze shorter than it was frozen?"); - trace!("create: Freezing a frozen location is a NOP"); - } - Borrow::Shr(_) => trace!("create: Sharing a frozen location is a NOP"), - } + if self.frozen_since.is_some() { + // A frozen location, this should be impossible! + bug!("We should never try pushing to a frozen stack"); + } + // First, push. + let itm = match bor { + Borrow::Uniq(t) => BorStackItem::Uniq(t), + Borrow::Shr(_) => BorStackItem::Shr, + }; + if *self.borrows.last().unwrap() == itm { + assert!(bor.is_shared()); + trace!("create: Sharing a shared location is a NOP"); } else { - // First push. - let itm = match bor { - Borrow::Uniq(t) => BorStackItem::Uniq(t), - Borrow::Shr(_) => BorStackItem::Shr, + // This ensures U1. + trace!("create: Pushing {:?}", itm); + self.borrows.push(itm); + } + // Then, maybe freeze. This is part 2 of ensuring F1. + if kind == RefKind::Frozen { + let bor_t = match bor { + Borrow::Shr(Some(t)) => t, + _ => bug!("Creating illegal borrow {:?} for frozen ref", bor), }; - if *self.borrows.last().unwrap() == itm { - assert!(bor.is_shared()); - trace!("create: Sharing a shared location is a NOP"); - } else { - // This ensures U1. - trace!("create: Pushing {:?}", itm); - self.borrows.push(itm); - } - // Now, maybe freeze. This is part 2 of ensuring F1. - if kind == RefKind::Frozen { - let bor_t = match bor { - Borrow::Shr(Some(t)) => t, - _ => bug!("Creating illegal borrow {:?} for frozen ref", bor), - }; - trace!("create: Freezing"); - self.frozen_since = Some(bor_t); - } + trace!("create: Freezing"); + self.frozen_since = Some(bor_t); } } } From 60e26af3236480a73c7116a20fce49e4e8f74b5e Mon Sep 17 00:00:00 2001 From: Ralf Jung Date: Fri, 16 Nov 2018 08:40:00 +0100 Subject: [PATCH 13/15] add a sanity assertion --- src/stacked_borrows.rs | 1 + 1 file changed, 1 insertion(+) diff --git a/src/stacked_borrows.rs b/src/stacked_borrows.rs index 3790b5d13f..f564bf9ab7 100644 --- a/src/stacked_borrows.rs +++ b/src/stacked_borrows.rs @@ -318,6 +318,7 @@ impl<'tcx> Stacks { new_bor: Borrow, new_kind: RefKind, ) -> EvalResult<'tcx> { + assert_eq!(new_bor.is_unique(), new_kind == RefKind::Unique); trace!("reborrow for tag {:?} to {:?} as {:?}: {:?}, size {}", ptr.tag, new_bor, new_kind, ptr, size.bytes()); let mut stacks = self.stacks.borrow_mut(); From 4e34457715cd06eee13b49806e1d157e1add9a66 Mon Sep 17 00:00:00 2001 From: Ralf Jung Date: Fri, 16 Nov 2018 08:40:08 +0100 Subject: [PATCH 14/15] bump Rust --- rust-version | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/rust-version b/rust-version index a829ec0f5a..6c3858ccc9 100644 --- a/rust-version +++ b/rust-version @@ -1 +1 @@ -nightly-2018-11-15 +nightly-2018-11-16 From 827e5180f20ebeec990ab8f92a4196ad8c5feb19 Mon Sep 17 00:00:00 2001 From: Ralf Jung Date: Fri, 16 Nov 2018 10:01:54 +0100 Subject: [PATCH 15/15] stacked borrows is broken without full MIR --- README.md | 13 +++++++++---- .../copy_nonoverlapping.rs | 0 .../memleak_rc.rs | 0 .../stacked_borrows/alias_through_mutation.rs | 0 .../stacked_borrows/aliasing_mut1.rs | 0 .../stacked_borrows/aliasing_mut2.rs | 0 .../stacked_borrows/aliasing_mut3.rs | 0 .../stacked_borrows/aliasing_mut4.rs | 0 .../stacked_borrows/buggy_as_mut_slice.rs | 0 .../stacked_borrows/buggy_split_at_mut.rs | 0 .../stacked_borrows/illegal_read1.rs | 0 .../stacked_borrows/illegal_read2.rs | 0 .../stacked_borrows/illegal_read3.rs | 0 .../stacked_borrows/illegal_read4.rs | 0 .../stacked_borrows/illegal_read5.rs | 0 .../stacked_borrows/illegal_write1.rs | 0 .../stacked_borrows/illegal_write2.rs | 0 .../stacked_borrows/illegal_write3.rs | 0 .../stacked_borrows/illegal_write4.rs | 0 .../stacked_borrows/illegal_write5.rs | 0 .../stacked_borrows/load_invalid_mut.rs | 0 .../stacked_borrows/load_invalid_shr.rs | 0 .../stacked_borrows/mut_exclusive_violation1.rs | 0 .../stacked_borrows/outdated_local.rs | 0 .../stacked_borrows/pass_invalid_mut.rs | 0 .../stacked_borrows/pass_invalid_shr.rs | 0 .../stacked_borrows/pointer_smuggling.rs | 0 .../stacked_borrows/return_invalid_mut.rs | 0 .../stacked_borrows/return_invalid_shr.rs | 0 .../stacked_borrows/static_memory_modification.rs | 0 .../stacked_borrows/transmute-is-no-escape.rs | 0 .../stacked_borrows/unescaped_local.rs | 0 .../transmute-pair-undef.rs | 0 tests/compiletest.rs | 4 ++++ 34 files changed, 13 insertions(+), 4 deletions(-) rename tests/{compile-fail => compile-fail-fullmir}/copy_nonoverlapping.rs (100%) rename tests/{compile-fail => compile-fail-fullmir}/memleak_rc.rs (100%) rename tests/{compile-fail => compile-fail-fullmir}/stacked_borrows/alias_through_mutation.rs (100%) rename tests/{compile-fail => compile-fail-fullmir}/stacked_borrows/aliasing_mut1.rs (100%) rename tests/{compile-fail => compile-fail-fullmir}/stacked_borrows/aliasing_mut2.rs (100%) rename tests/{compile-fail => compile-fail-fullmir}/stacked_borrows/aliasing_mut3.rs (100%) rename tests/{compile-fail => compile-fail-fullmir}/stacked_borrows/aliasing_mut4.rs (100%) rename tests/{compile-fail => compile-fail-fullmir}/stacked_borrows/buggy_as_mut_slice.rs (100%) rename tests/{compile-fail => compile-fail-fullmir}/stacked_borrows/buggy_split_at_mut.rs (100%) rename tests/{compile-fail => compile-fail-fullmir}/stacked_borrows/illegal_read1.rs (100%) rename tests/{compile-fail => compile-fail-fullmir}/stacked_borrows/illegal_read2.rs (100%) rename tests/{compile-fail => compile-fail-fullmir}/stacked_borrows/illegal_read3.rs (100%) rename tests/{compile-fail => compile-fail-fullmir}/stacked_borrows/illegal_read4.rs (100%) rename tests/{compile-fail => compile-fail-fullmir}/stacked_borrows/illegal_read5.rs (100%) rename tests/{compile-fail => compile-fail-fullmir}/stacked_borrows/illegal_write1.rs (100%) rename tests/{compile-fail => compile-fail-fullmir}/stacked_borrows/illegal_write2.rs (100%) rename tests/{compile-fail => compile-fail-fullmir}/stacked_borrows/illegal_write3.rs (100%) rename tests/{compile-fail => compile-fail-fullmir}/stacked_borrows/illegal_write4.rs (100%) rename tests/{compile-fail => compile-fail-fullmir}/stacked_borrows/illegal_write5.rs (100%) rename tests/{compile-fail => compile-fail-fullmir}/stacked_borrows/load_invalid_mut.rs (100%) rename tests/{compile-fail => compile-fail-fullmir}/stacked_borrows/load_invalid_shr.rs (100%) rename tests/{compile-fail => compile-fail-fullmir}/stacked_borrows/mut_exclusive_violation1.rs (100%) rename tests/{compile-fail => compile-fail-fullmir}/stacked_borrows/outdated_local.rs (100%) rename tests/{compile-fail => compile-fail-fullmir}/stacked_borrows/pass_invalid_mut.rs (100%) rename tests/{compile-fail => compile-fail-fullmir}/stacked_borrows/pass_invalid_shr.rs (100%) rename tests/{compile-fail => compile-fail-fullmir}/stacked_borrows/pointer_smuggling.rs (100%) rename tests/{compile-fail => compile-fail-fullmir}/stacked_borrows/return_invalid_mut.rs (100%) rename tests/{compile-fail => compile-fail-fullmir}/stacked_borrows/return_invalid_shr.rs (100%) rename tests/{compile-fail => compile-fail-fullmir}/stacked_borrows/static_memory_modification.rs (100%) rename tests/{compile-fail => compile-fail-fullmir}/stacked_borrows/transmute-is-no-escape.rs (100%) rename tests/{compile-fail => compile-fail-fullmir}/stacked_borrows/unescaped_local.rs (100%) rename tests/{compile-fail => compile-fail-fullmir}/transmute-pair-undef.rs (100%) diff --git a/README.md b/README.md index 04a263597c..5eb7b34425 100644 --- a/README.md +++ b/README.md @@ -45,14 +45,19 @@ in this directory. ## Running Miri ```sh -cargo +nightly run tests/run-pass/vecs.rs # Or whatever test you like. +cargo +nightly run -- -Zmiri-disable-validation tests/run-pass/vecs.rs # Or whatever test you like. ``` +We have to disable validation because that can lead to errors when libstd is not +compiled the right way. + ## Running Miri with full libstd -Per default libstd does not contain the MIR of non-polymorphic functions. When -Miri hits a call to such a function, execution terminates. To fix this, it is -possible to compile libstd with full MIR: +Per default libstd does not contain the MIR of non-polymorphic functions, and +also does not contain some extra MIR statements that miri needs for validation. +When Miri hits a call to such a function, execution terminates, and even when +the MIR is present, validation can fail. To fix this, it is possible to compile +libstd with full MIR: ```sh rustup component add --toolchain nightly rust-src diff --git a/tests/compile-fail/copy_nonoverlapping.rs b/tests/compile-fail-fullmir/copy_nonoverlapping.rs similarity index 100% rename from tests/compile-fail/copy_nonoverlapping.rs rename to tests/compile-fail-fullmir/copy_nonoverlapping.rs diff --git a/tests/compile-fail/memleak_rc.rs b/tests/compile-fail-fullmir/memleak_rc.rs similarity index 100% rename from tests/compile-fail/memleak_rc.rs rename to tests/compile-fail-fullmir/memleak_rc.rs diff --git a/tests/compile-fail/stacked_borrows/alias_through_mutation.rs b/tests/compile-fail-fullmir/stacked_borrows/alias_through_mutation.rs similarity index 100% rename from tests/compile-fail/stacked_borrows/alias_through_mutation.rs rename to tests/compile-fail-fullmir/stacked_borrows/alias_through_mutation.rs diff --git a/tests/compile-fail/stacked_borrows/aliasing_mut1.rs b/tests/compile-fail-fullmir/stacked_borrows/aliasing_mut1.rs similarity index 100% rename from tests/compile-fail/stacked_borrows/aliasing_mut1.rs rename to tests/compile-fail-fullmir/stacked_borrows/aliasing_mut1.rs diff --git a/tests/compile-fail/stacked_borrows/aliasing_mut2.rs b/tests/compile-fail-fullmir/stacked_borrows/aliasing_mut2.rs similarity index 100% rename from tests/compile-fail/stacked_borrows/aliasing_mut2.rs rename to tests/compile-fail-fullmir/stacked_borrows/aliasing_mut2.rs diff --git a/tests/compile-fail/stacked_borrows/aliasing_mut3.rs b/tests/compile-fail-fullmir/stacked_borrows/aliasing_mut3.rs similarity index 100% rename from tests/compile-fail/stacked_borrows/aliasing_mut3.rs rename to tests/compile-fail-fullmir/stacked_borrows/aliasing_mut3.rs diff --git a/tests/compile-fail/stacked_borrows/aliasing_mut4.rs b/tests/compile-fail-fullmir/stacked_borrows/aliasing_mut4.rs similarity index 100% rename from tests/compile-fail/stacked_borrows/aliasing_mut4.rs rename to tests/compile-fail-fullmir/stacked_borrows/aliasing_mut4.rs diff --git a/tests/compile-fail/stacked_borrows/buggy_as_mut_slice.rs b/tests/compile-fail-fullmir/stacked_borrows/buggy_as_mut_slice.rs similarity index 100% rename from tests/compile-fail/stacked_borrows/buggy_as_mut_slice.rs rename to tests/compile-fail-fullmir/stacked_borrows/buggy_as_mut_slice.rs diff --git a/tests/compile-fail/stacked_borrows/buggy_split_at_mut.rs b/tests/compile-fail-fullmir/stacked_borrows/buggy_split_at_mut.rs similarity index 100% rename from tests/compile-fail/stacked_borrows/buggy_split_at_mut.rs rename to tests/compile-fail-fullmir/stacked_borrows/buggy_split_at_mut.rs diff --git a/tests/compile-fail/stacked_borrows/illegal_read1.rs b/tests/compile-fail-fullmir/stacked_borrows/illegal_read1.rs similarity index 100% rename from tests/compile-fail/stacked_borrows/illegal_read1.rs rename to tests/compile-fail-fullmir/stacked_borrows/illegal_read1.rs diff --git a/tests/compile-fail/stacked_borrows/illegal_read2.rs b/tests/compile-fail-fullmir/stacked_borrows/illegal_read2.rs similarity index 100% rename from tests/compile-fail/stacked_borrows/illegal_read2.rs rename to tests/compile-fail-fullmir/stacked_borrows/illegal_read2.rs diff --git a/tests/compile-fail/stacked_borrows/illegal_read3.rs b/tests/compile-fail-fullmir/stacked_borrows/illegal_read3.rs similarity index 100% rename from tests/compile-fail/stacked_borrows/illegal_read3.rs rename to tests/compile-fail-fullmir/stacked_borrows/illegal_read3.rs diff --git a/tests/compile-fail/stacked_borrows/illegal_read4.rs b/tests/compile-fail-fullmir/stacked_borrows/illegal_read4.rs similarity index 100% rename from tests/compile-fail/stacked_borrows/illegal_read4.rs rename to tests/compile-fail-fullmir/stacked_borrows/illegal_read4.rs diff --git a/tests/compile-fail/stacked_borrows/illegal_read5.rs b/tests/compile-fail-fullmir/stacked_borrows/illegal_read5.rs similarity index 100% rename from tests/compile-fail/stacked_borrows/illegal_read5.rs rename to tests/compile-fail-fullmir/stacked_borrows/illegal_read5.rs diff --git a/tests/compile-fail/stacked_borrows/illegal_write1.rs b/tests/compile-fail-fullmir/stacked_borrows/illegal_write1.rs similarity index 100% rename from tests/compile-fail/stacked_borrows/illegal_write1.rs rename to tests/compile-fail-fullmir/stacked_borrows/illegal_write1.rs diff --git a/tests/compile-fail/stacked_borrows/illegal_write2.rs b/tests/compile-fail-fullmir/stacked_borrows/illegal_write2.rs similarity index 100% rename from tests/compile-fail/stacked_borrows/illegal_write2.rs rename to tests/compile-fail-fullmir/stacked_borrows/illegal_write2.rs diff --git a/tests/compile-fail/stacked_borrows/illegal_write3.rs b/tests/compile-fail-fullmir/stacked_borrows/illegal_write3.rs similarity index 100% rename from tests/compile-fail/stacked_borrows/illegal_write3.rs rename to tests/compile-fail-fullmir/stacked_borrows/illegal_write3.rs diff --git a/tests/compile-fail/stacked_borrows/illegal_write4.rs b/tests/compile-fail-fullmir/stacked_borrows/illegal_write4.rs similarity index 100% rename from tests/compile-fail/stacked_borrows/illegal_write4.rs rename to tests/compile-fail-fullmir/stacked_borrows/illegal_write4.rs diff --git a/tests/compile-fail/stacked_borrows/illegal_write5.rs b/tests/compile-fail-fullmir/stacked_borrows/illegal_write5.rs similarity index 100% rename from tests/compile-fail/stacked_borrows/illegal_write5.rs rename to tests/compile-fail-fullmir/stacked_borrows/illegal_write5.rs diff --git a/tests/compile-fail/stacked_borrows/load_invalid_mut.rs b/tests/compile-fail-fullmir/stacked_borrows/load_invalid_mut.rs similarity index 100% rename from tests/compile-fail/stacked_borrows/load_invalid_mut.rs rename to tests/compile-fail-fullmir/stacked_borrows/load_invalid_mut.rs diff --git a/tests/compile-fail/stacked_borrows/load_invalid_shr.rs b/tests/compile-fail-fullmir/stacked_borrows/load_invalid_shr.rs similarity index 100% rename from tests/compile-fail/stacked_borrows/load_invalid_shr.rs rename to tests/compile-fail-fullmir/stacked_borrows/load_invalid_shr.rs diff --git a/tests/compile-fail/stacked_borrows/mut_exclusive_violation1.rs b/tests/compile-fail-fullmir/stacked_borrows/mut_exclusive_violation1.rs similarity index 100% rename from tests/compile-fail/stacked_borrows/mut_exclusive_violation1.rs rename to tests/compile-fail-fullmir/stacked_borrows/mut_exclusive_violation1.rs diff --git a/tests/compile-fail/stacked_borrows/outdated_local.rs b/tests/compile-fail-fullmir/stacked_borrows/outdated_local.rs similarity index 100% rename from tests/compile-fail/stacked_borrows/outdated_local.rs rename to tests/compile-fail-fullmir/stacked_borrows/outdated_local.rs diff --git a/tests/compile-fail/stacked_borrows/pass_invalid_mut.rs b/tests/compile-fail-fullmir/stacked_borrows/pass_invalid_mut.rs similarity index 100% rename from tests/compile-fail/stacked_borrows/pass_invalid_mut.rs rename to tests/compile-fail-fullmir/stacked_borrows/pass_invalid_mut.rs diff --git a/tests/compile-fail/stacked_borrows/pass_invalid_shr.rs b/tests/compile-fail-fullmir/stacked_borrows/pass_invalid_shr.rs similarity index 100% rename from tests/compile-fail/stacked_borrows/pass_invalid_shr.rs rename to tests/compile-fail-fullmir/stacked_borrows/pass_invalid_shr.rs diff --git a/tests/compile-fail/stacked_borrows/pointer_smuggling.rs b/tests/compile-fail-fullmir/stacked_borrows/pointer_smuggling.rs similarity index 100% rename from tests/compile-fail/stacked_borrows/pointer_smuggling.rs rename to tests/compile-fail-fullmir/stacked_borrows/pointer_smuggling.rs diff --git a/tests/compile-fail/stacked_borrows/return_invalid_mut.rs b/tests/compile-fail-fullmir/stacked_borrows/return_invalid_mut.rs similarity index 100% rename from tests/compile-fail/stacked_borrows/return_invalid_mut.rs rename to tests/compile-fail-fullmir/stacked_borrows/return_invalid_mut.rs diff --git a/tests/compile-fail/stacked_borrows/return_invalid_shr.rs b/tests/compile-fail-fullmir/stacked_borrows/return_invalid_shr.rs similarity index 100% rename from tests/compile-fail/stacked_borrows/return_invalid_shr.rs rename to tests/compile-fail-fullmir/stacked_borrows/return_invalid_shr.rs diff --git a/tests/compile-fail/stacked_borrows/static_memory_modification.rs b/tests/compile-fail-fullmir/stacked_borrows/static_memory_modification.rs similarity index 100% rename from tests/compile-fail/stacked_borrows/static_memory_modification.rs rename to tests/compile-fail-fullmir/stacked_borrows/static_memory_modification.rs diff --git a/tests/compile-fail/stacked_borrows/transmute-is-no-escape.rs b/tests/compile-fail-fullmir/stacked_borrows/transmute-is-no-escape.rs similarity index 100% rename from tests/compile-fail/stacked_borrows/transmute-is-no-escape.rs rename to tests/compile-fail-fullmir/stacked_borrows/transmute-is-no-escape.rs diff --git a/tests/compile-fail/stacked_borrows/unescaped_local.rs b/tests/compile-fail-fullmir/stacked_borrows/unescaped_local.rs similarity index 100% rename from tests/compile-fail/stacked_borrows/unescaped_local.rs rename to tests/compile-fail-fullmir/stacked_borrows/unescaped_local.rs diff --git a/tests/compile-fail/transmute-pair-undef.rs b/tests/compile-fail-fullmir/transmute-pair-undef.rs similarity index 100% rename from tests/compile-fail/transmute-pair-undef.rs rename to tests/compile-fail-fullmir/transmute-pair-undef.rs diff --git a/tests/compiletest.rs b/tests/compiletest.rs index f393fcb2c2..7ecf64590b 100644 --- a/tests/compiletest.rs +++ b/tests/compiletest.rs @@ -103,6 +103,10 @@ fn miri_pass(sysroot: &Path, path: &str, target: &str, host: &str, need_fullmir: // whitelist. flags.push("-Zmir-opt-level=1".to_owned()); } + if !have_fullmir() { + // Validation relies on the EscapeToRaw statements being emitted + flags.push("-Zmiri-disable-validation".to_owned()); + } let mut config = mk_config("ui"); config.src_base = PathBuf::from(path);