Skip to content

Commit 4b6f386

Browse files
committed
Make interning explicitly care about types and the mutability of memory
1 parent 8d5728a commit 4b6f386

18 files changed

+668
-172
lines changed

src/librustc_mir/const_eval.rs

Lines changed: 53 additions & 44 deletions
Original file line numberDiff line numberDiff line change
@@ -9,7 +9,7 @@ use std::convert::TryInto;
99

1010
use rustc::hir::def::DefKind;
1111
use rustc::hir::def_id::DefId;
12-
use rustc::mir::interpret::{ConstEvalErr, ErrorHandled};
12+
use rustc::mir::interpret::{ConstEvalErr, ErrorHandled, ScalarMaybeUndef};
1313
use rustc::mir;
1414
use rustc::ty::{self, TyCtxt, query::TyCtxtAt};
1515
use rustc::ty::layout::{self, LayoutOf, VariantIdx};
@@ -18,15 +18,14 @@ use rustc::traits::Reveal;
1818
use rustc::util::common::ErrorReported;
1919
use rustc_data_structures::fx::FxHashMap;
2020

21-
use syntax::ast::Mutability;
2221
use syntax::source_map::{Span, DUMMY_SP};
2322

2423
use crate::interpret::{self,
25-
PlaceTy, MPlaceTy, MemPlace, OpTy, ImmTy, Immediate, Scalar,
24+
PlaceTy, MPlaceTy, OpTy, ImmTy, Immediate, Scalar,
2625
RawConst, ConstValue,
2726
InterpResult, InterpErrorInfo, InterpError, GlobalId, InterpretCx, StackPopCleanup,
2827
Allocation, AllocId, MemoryKind,
29-
snapshot, RefTracking,
28+
snapshot, RefTracking, intern_const_alloc_recursive,
3029
};
3130

3231
/// Number of steps until the detector even starts doing anything.
@@ -63,33 +62,19 @@ pub(crate) fn eval_promoted<'mir, 'tcx>(
6362
eval_body_using_ecx(&mut ecx, cid, body, param_env)
6463
}
6564

66-
fn mplace_to_const<'tcx>(
67-
ecx: &CompileTimeEvalContext<'_, 'tcx>,
68-
mplace: MPlaceTy<'tcx>,
69-
) -> &'tcx ty::Const<'tcx> {
70-
let MemPlace { ptr, align, meta } = *mplace;
71-
// extract alloc-offset pair
72-
assert!(meta.is_none());
73-
let ptr = ptr.to_ptr().unwrap();
74-
let alloc = ecx.memory.get(ptr.alloc_id).unwrap();
75-
assert!(alloc.align >= align);
76-
assert!(alloc.bytes.len() as u64 - ptr.offset.bytes() >= mplace.layout.size.bytes());
77-
let mut alloc = alloc.clone();
78-
alloc.align = align;
79-
// FIXME shouldn't it be the case that `mark_static_initialized` has already
80-
// interned this? I thought that is the entire point of that `FinishStatic` stuff?
81-
let alloc = ecx.tcx.intern_const_alloc(alloc);
82-
let val = ConstValue::ByRef(ptr, alloc);
83-
ecx.tcx.mk_const(ty::Const { val, ty: mplace.layout.ty })
84-
}
85-
8665
fn op_to_const<'tcx>(
8766
ecx: &CompileTimeEvalContext<'_, 'tcx>,
8867
op: OpTy<'tcx>,
8968
) -> &'tcx ty::Const<'tcx> {
90-
// We do not normalize just any data. Only non-union scalars and slices.
91-
let normalize = match op.layout.abi {
92-
layout::Abi::Scalar(..) => op.layout.ty.ty_adt_def().map_or(true, |adt| !adt.is_union()),
69+
// We do not have value optmizations for everything.
70+
// Only scalars and slices, since they are very common.
71+
// Note that further down we turn scalars of undefined bits back to `ByRef`. These can result
72+
// from scalar unions that are initialized with one of their zero sized variants. We could
73+
// instead allow `ConstValue::Scalar` to store `ScalarMaybeUndef`, but that would affect all
74+
// the usual cases of extracting e.g. a `usize`, without there being a real use case for the
75+
// `Undef` situation.
76+
let try_as_immediate = match op.layout.abi {
77+
layout::Abi::Scalar(..) => true,
9378
layout::Abi::ScalarPair(..) => match op.layout.ty.sty {
9479
ty::Ref(_, inner, _) => match inner.sty {
9580
ty::Slice(elem) => elem == ecx.tcx.types.u8,
@@ -100,16 +85,38 @@ fn op_to_const<'tcx>(
10085
},
10186
_ => false,
10287
};
103-
let normalized_op = if normalize {
104-
Err(*ecx.read_immediate(op).expect("normalization works on validated constants"))
88+
let immediate = if try_as_immediate {
89+
Err(ecx.read_immediate(op).expect("normalization works on validated constants"))
10590
} else {
91+
// It is guaranteed that any non-slice scalar pair is actually ByRef here.
92+
// When we come back from raw const eval, we are always by-ref. The only way our op here is
93+
// by-val is if we are in const_field, i.e., if this is (a field of) something that we
94+
// "tried to make immediate" before. We wouldn't do that for non-slice scalar pairs or
95+
// structs containing such.
10696
op.try_as_mplace()
10797
};
108-
let val = match normalized_op {
109-
Ok(mplace) => return mplace_to_const(ecx, mplace),
110-
Err(Immediate::Scalar(x)) =>
111-
ConstValue::Scalar(x.not_undef().unwrap()),
112-
Err(Immediate::ScalarPair(a, b)) => {
98+
let val = match immediate {
99+
Ok(mplace) => {
100+
let ptr = mplace.ptr.to_ptr().unwrap();
101+
let alloc = ecx.tcx.alloc_map.lock().unwrap_memory(ptr.alloc_id);
102+
ConstValue::ByRef(ptr, alloc)
103+
},
104+
// see comment on `let try_as_immediate` above
105+
Err(ImmTy { imm: Immediate::Scalar(x), .. }) => match x {
106+
ScalarMaybeUndef::Scalar(s) => ConstValue::Scalar(s),
107+
ScalarMaybeUndef::Undef => {
108+
// When coming out of "normal CTFE", we'll always have an `Indirect` operand as
109+
// argument and we will not need this. The only way we can already have an
110+
// `Immediate` is when we are called from `const_field`, and that `Immediate`
111+
// comes from a constant so it can happen have `Undef`, because the indirect
112+
// memory that was read had undefined bytes.
113+
let mplace = op.to_mem_place();
114+
let ptr = mplace.ptr.to_ptr().unwrap();
115+
let alloc = ecx.tcx.alloc_map.lock().unwrap_memory(ptr.alloc_id);
116+
ConstValue::ByRef(ptr, alloc)
117+
},
118+
},
119+
Err(ImmTy { imm: Immediate::ScalarPair(a, b), .. }) => {
113120
let (data, start) = match a.not_undef().unwrap() {
114121
Scalar::Ptr(ptr) => (
115122
ecx.tcx.alloc_map.lock().unwrap_memory(ptr.alloc_id),
@@ -164,13 +171,12 @@ fn eval_body_using_ecx<'mir, 'tcx>(
164171
ecx.run()?;
165172

166173
// Intern the result
167-
let mutability = if tcx.is_mutable_static(cid.instance.def_id()) ||
168-
!layout.ty.is_freeze(tcx, param_env, body.span) {
169-
Mutability::Mutable
170-
} else {
171-
Mutability::Immutable
172-
};
173-
ecx.memory.intern_static(ret.ptr.to_ptr()?.alloc_id, mutability)?;
174+
intern_const_alloc_recursive(
175+
ecx,
176+
cid.instance.def_id(),
177+
ret,
178+
param_env,
179+
)?;
174180

175181
debug!("eval_body_using_ecx done: {:?}", *ret);
176182
Ok(ret)
@@ -297,7 +303,7 @@ impl<K: Hash + Eq, V> interpret::AllocMap<K, V> for FxHashMap<K, V> {
297303
}
298304
}
299305

300-
type CompileTimeEvalContext<'mir, 'tcx> =
306+
crate type CompileTimeEvalContext<'mir, 'tcx> =
301307
InterpretCx<'mir, 'tcx, CompileTimeInterpreter<'mir, 'tcx>>;
302308

303309
impl interpret::MayLeak for ! {
@@ -526,13 +532,16 @@ fn validate_and_turn_into_const<'tcx>(
526532
mplace.into(),
527533
path,
528534
Some(&mut ref_tracking),
529-
true, // const mode
530535
)?;
531536
}
532537
// Now that we validated, turn this into a proper constant.
533538
let def_id = cid.instance.def.def_id();
534539
if tcx.is_static(def_id) || cid.promoted.is_some() {
535-
Ok(mplace_to_const(&ecx, mplace))
540+
let ptr = mplace.ptr.to_ptr()?;
541+
Ok(tcx.mk_const(ty::Const {
542+
val: ConstValue::ByRef(ptr, ecx.tcx.alloc_map.lock().unwrap_memory(ptr.alloc_id)),
543+
ty: mplace.layout.ty,
544+
}))
536545
} else {
537546
Ok(op_to_const(&ecx, mplace.into()))
538547
}

src/librustc_mir/interpret/eval_context.rs

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -576,7 +576,6 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> InterpretCx<'mir, 'tcx, M> {
576576
self.place_to_op(return_place)?,
577577
vec![],
578578
None,
579-
/*const_mode*/false,
580579
)?;
581580
}
582581
} else {

0 commit comments

Comments
 (0)