Skip to content

Commit 1ddbdc6

Browse files
committed
use checked casts and arithmetic in Miri engine
1 parent 5be304b commit 1ddbdc6

File tree

16 files changed

+186
-153
lines changed

16 files changed

+186
-153
lines changed

src/librustc/mir/interpret/allocation.rs

Lines changed: 38 additions & 40 deletions
Original file line numberDiff line numberDiff line change
@@ -1,18 +1,20 @@
11
//! The virtual memory representation of the MIR interpreter.
22
3+
use std::borrow::Cow;
4+
use std::convert::TryFrom;
5+
use std::iter;
6+
use std::ops::{Add, Deref, DerefMut, Mul, Range, Sub};
7+
8+
use rustc_ast::ast::Mutability;
9+
use rustc_data_structures::sorted_map::SortedMap;
10+
use rustc_target::abi::HasDataLayout;
11+
312
use super::{
413
read_target_uint, write_target_uint, AllocId, InterpResult, Pointer, Scalar, ScalarMaybeUndef,
514
};
615

716
use crate::ty::layout::{Align, Size};
817

9-
use rustc_ast::ast::Mutability;
10-
use rustc_data_structures::sorted_map::SortedMap;
11-
use rustc_target::abi::HasDataLayout;
12-
use std::borrow::Cow;
13-
use std::iter;
14-
use std::ops::{Deref, DerefMut, Range};
15-
1618
// NOTE: When adding new fields, make sure to adjust the `Snapshot` impl in
1719
// `src/librustc_mir/interpret/snapshot.rs`.
1820
#[derive(Clone, Debug, Eq, PartialEq, PartialOrd, Ord, Hash, RustcEncodable, RustcDecodable)]
@@ -90,7 +92,7 @@ impl<Tag> Allocation<Tag> {
9092
/// Creates a read-only allocation initialized by the given bytes
9193
pub fn from_bytes<'a>(slice: impl Into<Cow<'a, [u8]>>, align: Align) -> Self {
9294
let bytes = slice.into().into_owned();
93-
let size = Size::from_bytes(bytes.len() as u64);
95+
let size = Size::from_bytes(u64::try_from(bytes.len()).unwrap());
9496
Self {
9597
bytes,
9698
relocations: Relocations::new(),
@@ -107,9 +109,8 @@ impl<Tag> Allocation<Tag> {
107109
}
108110

109111
pub fn undef(size: Size, align: Align) -> Self {
110-
assert_eq!(size.bytes() as usize as u64, size.bytes());
111112
Allocation {
112-
bytes: vec![0; size.bytes() as usize],
113+
bytes: vec![0; usize::try_from(size.bytes()).unwrap()],
113114
relocations: Relocations::new(),
114115
undef_mask: UndefMask::new(size, false),
115116
size,
@@ -152,7 +153,7 @@ impl Allocation<(), ()> {
152153
/// Raw accessors. Provide access to otherwise private bytes.
153154
impl<Tag, Extra> Allocation<Tag, Extra> {
154155
pub fn len(&self) -> usize {
155-
self.size.bytes() as usize
156+
usize::try_from(self.size.bytes()).unwrap()
156157
}
157158

158159
/// Looks at a slice which may describe undefined bytes or describe a relocation. This differs
@@ -182,21 +183,16 @@ impl<'tcx, Tag: Copy, Extra: AllocationExtra<Tag>> Allocation<Tag, Extra> {
182183
/// Returns the range of this allocation that was meant.
183184
#[inline]
184185
fn check_bounds(&self, offset: Size, size: Size) -> Range<usize> {
185-
let end = offset + size; // This does overflow checking.
186-
assert_eq!(
187-
end.bytes() as usize as u64,
188-
end.bytes(),
189-
"cannot handle this access on this host architecture"
190-
);
191-
let end = end.bytes() as usize;
186+
let end = Size::add(offset, size); // This does overflow checking.
187+
let end = usize::try_from(end.bytes()).expect("access too big for this host architecture");
192188
assert!(
193189
end <= self.len(),
194190
"Out-of-bounds access at offset {}, size {} in allocation of size {}",
195191
offset.bytes(),
196192
size.bytes(),
197193
self.len()
198194
);
199-
(offset.bytes() as usize)..end
195+
usize::try_from(offset.bytes()).unwrap()..end
200196
}
201197

202198
/// The last argument controls whether we error out when there are undefined
@@ -294,11 +290,11 @@ impl<'tcx, Tag: Copy, Extra: AllocationExtra<Tag>> Allocation<Tag, Extra> {
294290
cx: &impl HasDataLayout,
295291
ptr: Pointer<Tag>,
296292
) -> InterpResult<'tcx, &[u8]> {
297-
assert_eq!(ptr.offset.bytes() as usize as u64, ptr.offset.bytes());
298-
let offset = ptr.offset.bytes() as usize;
293+
let offset = usize::try_from(ptr.offset.bytes()).unwrap();
299294
Ok(match self.bytes[offset..].iter().position(|&c| c == 0) {
300295
Some(size) => {
301-
let size_with_null = Size::from_bytes((size + 1) as u64);
296+
let size_with_null =
297+
Size::from_bytes(u64::try_from(size.checked_add(1).unwrap()).unwrap());
302298
// Go through `get_bytes` for checks and AllocationExtra hooks.
303299
// We read the null, so we include it in the request, but we want it removed
304300
// from the result, so we do subslicing.
@@ -343,7 +339,7 @@ impl<'tcx, Tag: Copy, Extra: AllocationExtra<Tag>> Allocation<Tag, Extra> {
343339
let (lower, upper) = src.size_hint();
344340
let len = upper.expect("can only write bounded iterators");
345341
assert_eq!(lower, len, "can only write iterators with a precise length");
346-
let bytes = self.get_bytes_mut(cx, ptr, Size::from_bytes(len as u64))?;
342+
let bytes = self.get_bytes_mut(cx, ptr, Size::from_bytes(u64::try_from(len).unwrap()))?;
347343
// `zip` would stop when the first iterator ends; we want to definitely
348344
// cover all of `bytes`.
349345
for dest in bytes {
@@ -386,7 +382,11 @@ impl<'tcx, Tag: Copy, Extra: AllocationExtra<Tag>> Allocation<Tag, Extra> {
386382
} else {
387383
match self.relocations.get(&ptr.offset) {
388384
Some(&(tag, alloc_id)) => {
389-
let ptr = Pointer::new_with_tag(alloc_id, Size::from_bytes(bits as u64), tag);
385+
let ptr = Pointer::new_with_tag(
386+
alloc_id,
387+
Size::from_bytes(u64::try_from(bits).unwrap()),
388+
tag,
389+
);
390390
return Ok(ScalarMaybeUndef::Scalar(ptr.into()));
391391
}
392392
None => {}
@@ -433,7 +433,7 @@ impl<'tcx, Tag: Copy, Extra: AllocationExtra<Tag>> Allocation<Tag, Extra> {
433433
};
434434

435435
let bytes = match val.to_bits_or_ptr(type_size, cx) {
436-
Err(val) => val.offset.bytes() as u128,
436+
Err(val) => u128::from(val.offset.bytes()),
437437
Ok(data) => data,
438438
};
439439

@@ -479,7 +479,7 @@ impl<'tcx, Tag: Copy, Extra> Allocation<Tag, Extra> {
479479
// We have to go back `pointer_size - 1` bytes, as that one would still overlap with
480480
// the beginning of this range.
481481
let start = ptr.offset.bytes().saturating_sub(cx.data_layout().pointer_size.bytes() - 1);
482-
let end = ptr.offset + size; // This does overflow checking.
482+
let end = Size::add(ptr.offset, size); // This does overflow checking.
483483
self.relocations.range(Size::from_bytes(start)..end)
484484
}
485485

@@ -524,7 +524,7 @@ impl<'tcx, Tag: Copy, Extra> Allocation<Tag, Extra> {
524524
)
525525
};
526526
let start = ptr.offset;
527-
let end = start + size;
527+
let end = Size::add(start, size);
528528

529529
// Mark parts of the outermost relocations as undefined if they partially fall outside the
530530
// given range.
@@ -563,15 +563,15 @@ impl<'tcx, Tag, Extra> Allocation<Tag, Extra> {
563563
#[inline]
564564
fn check_defined(&self, ptr: Pointer<Tag>, size: Size) -> InterpResult<'tcx> {
565565
self.undef_mask
566-
.is_range_defined(ptr.offset, ptr.offset + size)
566+
.is_range_defined(ptr.offset, Size::add(ptr.offset, size))
567567
.or_else(|idx| throw_ub!(InvalidUndefBytes(Some(Pointer::new(ptr.alloc_id, idx)))))
568568
}
569569

570570
pub fn mark_definedness(&mut self, ptr: Pointer<Tag>, size: Size, new_state: bool) {
571571
if size.bytes() == 0 {
572572
return;
573573
}
574-
self.undef_mask.set_range(ptr.offset, ptr.offset + size, new_state);
574+
self.undef_mask.set_range(ptr.offset, Size::add(ptr.offset, size), new_state);
575575
}
576576
}
577577

@@ -616,7 +616,7 @@ impl<Tag, Extra> Allocation<Tag, Extra> {
616616

617617
for i in 1..size.bytes() {
618618
// FIXME: optimize to bitshift the current undef block's bits and read the top bit.
619-
if self.undef_mask.get(src.offset + Size::from_bytes(i)) == cur {
619+
if self.undef_mask.get(Size::add(src.offset, Size::from_bytes(i))) == cur {
620620
cur_len += 1;
621621
} else {
622622
ranges.push(cur_len);
@@ -643,7 +643,7 @@ impl<Tag, Extra> Allocation<Tag, Extra> {
643643
if defined.ranges.len() <= 1 {
644644
self.undef_mask.set_range_inbounds(
645645
dest.offset,
646-
dest.offset + size * repeat,
646+
Size::add(dest.offset, Size::mul(size, repeat)),
647647
defined.initial,
648648
);
649649
return;
@@ -721,10 +721,10 @@ impl<Tag: Copy, Extra> Allocation<Tag, Extra> {
721721
for i in 0..length {
722722
new_relocations.extend(relocations.iter().map(|&(offset, reloc)| {
723723
// compute offset for current repetition
724-
let dest_offset = dest.offset + (i * size);
724+
let dest_offset = Size::add(dest.offset, Size::mul(size, i));
725725
(
726726
// shift offsets from source allocation to destination allocation
727-
offset + dest_offset - src.offset,
727+
Size::sub(Size::add(offset, dest_offset), src.offset),
728728
reloc,
729729
)
730730
}));
@@ -861,18 +861,18 @@ impl UndefMask {
861861
if amount.bytes() == 0 {
862862
return;
863863
}
864-
let unused_trailing_bits = self.blocks.len() as u64 * Self::BLOCK_SIZE - self.len.bytes();
864+
let unused_trailing_bits =
865+
u64::try_from(self.blocks.len()).unwrap() * Self::BLOCK_SIZE - self.len.bytes();
865866
if amount.bytes() > unused_trailing_bits {
866867
let additional_blocks = amount.bytes() / Self::BLOCK_SIZE + 1;
867-
assert_eq!(additional_blocks as usize as u64, additional_blocks);
868868
self.blocks.extend(
869869
// FIXME(oli-obk): optimize this by repeating `new_state as Block`.
870-
iter::repeat(0).take(additional_blocks as usize),
870+
iter::repeat(0).take(usize::try_from(additional_blocks).unwrap()),
871871
);
872872
}
873873
let start = self.len;
874874
self.len += amount;
875-
self.set_range_inbounds(start, start + amount, new_state);
875+
self.set_range_inbounds(start, Size::add(start, amount), new_state);
876876
}
877877
}
878878

@@ -881,7 +881,5 @@ fn bit_index(bits: Size) -> (usize, usize) {
881881
let bits = bits.bytes();
882882
let a = bits / UndefMask::BLOCK_SIZE;
883883
let b = bits % UndefMask::BLOCK_SIZE;
884-
assert_eq!(a as usize as u64, a);
885-
assert_eq!(b as usize as u64, b);
886-
(a as usize, b as usize)
884+
(usize::try_from(a).unwrap(), usize::try_from(b).unwrap())
887885
}

src/librustc/mir/interpret/mod.rs

Lines changed: 23 additions & 20 deletions
Original file line numberDiff line numberDiff line change
@@ -95,6 +95,27 @@ mod pointer;
9595
mod queries;
9696
mod value;
9797

98+
use std::convert::TryFrom;
99+
use std::fmt;
100+
use std::io;
101+
use std::num::NonZeroU32;
102+
use std::sync::atomic::{AtomicU32, Ordering};
103+
104+
use byteorder::{BigEndian, LittleEndian, ReadBytesExt, WriteBytesExt};
105+
use rustc_ast::ast::LitKind;
106+
use rustc_data_structures::fx::FxHashMap;
107+
use rustc_data_structures::sync::{HashMapExt, Lock};
108+
use rustc_data_structures::tiny_list::TinyList;
109+
use rustc_hir::def_id::DefId;
110+
use rustc_macros::HashStable;
111+
use rustc_serialize::{Decodable, Encodable, Encoder};
112+
113+
use crate::mir;
114+
use crate::ty::codec::TyDecoder;
115+
use crate::ty::layout::{self, Size};
116+
use crate::ty::subst::GenericArgKind;
117+
use crate::ty::{self, Instance, Ty, TyCtxt};
118+
98119
pub use self::error::{
99120
struct_error, ConstEvalErr, ConstEvalRawResult, ConstEvalResult, ErrorHandled, FrameInfo,
100121
InterpError, InterpErrorInfo, InterpResult, InvalidProgramInfo, MachineStopType,
@@ -107,24 +128,6 @@ pub use self::allocation::{Allocation, AllocationExtra, Relocations, UndefMask};
107128

108129
pub use self::pointer::{CheckInAllocMsg, Pointer, PointerArithmetic};
109130

110-
use crate::mir;
111-
use crate::ty::codec::TyDecoder;
112-
use crate::ty::layout::{self, Size};
113-
use crate::ty::subst::GenericArgKind;
114-
use crate::ty::{self, Instance, Ty, TyCtxt};
115-
use byteorder::{BigEndian, LittleEndian, ReadBytesExt, WriteBytesExt};
116-
use rustc_ast::ast::LitKind;
117-
use rustc_data_structures::fx::FxHashMap;
118-
use rustc_data_structures::sync::{HashMapExt, Lock};
119-
use rustc_data_structures::tiny_list::TinyList;
120-
use rustc_hir::def_id::DefId;
121-
use rustc_macros::HashStable;
122-
use rustc_serialize::{Decodable, Encodable, Encoder};
123-
use std::fmt;
124-
use std::io;
125-
use std::num::NonZeroU32;
126-
use std::sync::atomic::{AtomicU32, Ordering};
127-
128131
/// Uniquely identifies one of the following:
129132
/// - A constant
130133
/// - A static
@@ -264,8 +267,8 @@ impl<'s> AllocDecodingSession<'s> {
264267
D: TyDecoder<'tcx>,
265268
{
266269
// Read the index of the allocation.
267-
let idx = decoder.read_u32()? as usize;
268-
let pos = self.state.data_offsets[idx] as usize;
270+
let idx = usize::try_from(decoder.read_u32()?).unwrap();
271+
let pos = usize::try_from(self.state.data_offsets[idx]).unwrap();
269272

270273
// Decode the `AllocDiscriminant` now so that we know if we have to reserve an
271274
// `AllocId`.

src/librustc/mir/interpret/pointer.rs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -62,9 +62,9 @@ pub trait PointerArithmetic: layout::HasDataLayout {
6262
/// This should be called by all the other methods before returning!
6363
#[inline]
6464
fn truncate_to_ptr(&self, (val, over): (u64, bool)) -> (u64, bool) {
65-
let val = val as u128;
65+
let val = u128::from(val);
6666
let max_ptr_plus_1 = 1u128 << self.pointer_size().bits();
67-
((val % max_ptr_plus_1) as u64, over || val >= max_ptr_plus_1)
67+
(u64::try_from(val % max_ptr_plus_1).unwrap(), over || val >= max_ptr_plus_1)
6868
}
6969

7070
#[inline]

0 commit comments

Comments
 (0)