Skip to content

Commit 2108b6b

Browse files
committed
Move UndefMask and Relocations into allocation.rs
1 parent 6def30b commit 2108b6b

File tree

2 files changed

+138
-138
lines changed

2 files changed

+138
-138
lines changed

src/librustc/mir/interpret/allocation.rs

Lines changed: 134 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -10,13 +10,13 @@
1010

1111
//! The virtual memory representation of the MIR interpreter
1212
13-
use super::{
14-
UndefMask,
15-
Relocations,
16-
};
17-
1813
use ty::layout::{Size, Align};
1914
use syntax::ast::Mutability;
15+
use rustc_target::abi::HasDataLayout;
16+
use std::iter;
17+
use mir;
18+
use std::ops::{Deref, DerefMut};
19+
use rustc_data_structures::sorted_map::SortedMap;
2020

2121
#[derive(Clone, Debug, Eq, PartialEq, PartialOrd, Ord, Hash, RustcEncodable, RustcDecodable)]
2222
pub struct Allocation<Tag=(),Extra=()> {
@@ -103,3 +103,132 @@ impl<Tag, Extra: Default> Allocation<Tag, Extra> {
103103
}
104104

105105
impl<'tcx> ::serialize::UseSpecializedDecodable for &'tcx Allocation {}
106+
107+
#[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug, RustcEncodable, RustcDecodable)]
108+
pub struct Relocations<Tag=(), Id=AllocId>(SortedMap<Size, (Tag, Id)>);
109+
110+
impl<Tag, Id> Relocations<Tag, Id> {
111+
pub fn new() -> Self {
112+
Relocations(SortedMap::new())
113+
}
114+
115+
// The caller must guarantee that the given relocations are already sorted
116+
// by address and contain no duplicates.
117+
pub fn from_presorted(r: Vec<(Size, (Tag, Id))>) -> Self {
118+
Relocations(SortedMap::from_presorted_elements(r))
119+
}
120+
}
121+
122+
impl<Tag> Deref for Relocations<Tag> {
123+
type Target = SortedMap<Size, (Tag, AllocId)>;
124+
125+
fn deref(&self) -> &Self::Target {
126+
&self.0
127+
}
128+
}
129+
130+
impl<Tag> DerefMut for Relocations<Tag> {
131+
fn deref_mut(&mut self) -> &mut Self::Target {
132+
&mut self.0
133+
}
134+
}
135+
136+
////////////////////////////////////////////////////////////////////////////////
137+
// Undefined byte tracking
138+
////////////////////////////////////////////////////////////////////////////////
139+
140+
type Block = u64;
141+
const BLOCK_SIZE: u64 = 64;
142+
143+
#[derive(Clone, Debug, Eq, PartialEq, PartialOrd, Ord, Hash, RustcEncodable, RustcDecodable)]
144+
pub struct UndefMask {
145+
blocks: Vec<Block>,
146+
len: Size,
147+
}
148+
149+
impl_stable_hash_for!(struct mir::interpret::UndefMask{blocks, len});
150+
151+
impl UndefMask {
152+
pub fn new(size: Size) -> Self {
153+
let mut m = UndefMask {
154+
blocks: vec![],
155+
len: Size::ZERO,
156+
};
157+
m.grow(size, false);
158+
m
159+
}
160+
161+
/// Check whether the range `start..end` (end-exclusive) is entirely defined.
162+
///
163+
/// Returns `Ok(())` if it's defined. Otherwise returns the index of the byte
164+
/// at which the first undefined access begins.
165+
#[inline]
166+
pub fn is_range_defined(&self, start: Size, end: Size) -> Result<(), Size> {
167+
if end > self.len {
168+
return Err(self.len);
169+
}
170+
171+
let idx = (start.bytes()..end.bytes())
172+
.map(|i| Size::from_bytes(i))
173+
.find(|&i| !self.get(i));
174+
175+
match idx {
176+
Some(idx) => Err(idx),
177+
None => Ok(())
178+
}
179+
}
180+
181+
pub fn set_range(&mut self, start: Size, end: Size, new_state: bool) {
182+
let len = self.len;
183+
if end > len {
184+
self.grow(end - len, new_state);
185+
}
186+
self.set_range_inbounds(start, end, new_state);
187+
}
188+
189+
pub fn set_range_inbounds(&mut self, start: Size, end: Size, new_state: bool) {
190+
for i in start.bytes()..end.bytes() {
191+
self.set(Size::from_bytes(i), new_state);
192+
}
193+
}
194+
195+
#[inline]
196+
pub fn get(&self, i: Size) -> bool {
197+
let (block, bit) = bit_index(i);
198+
(self.blocks[block] & 1 << bit) != 0
199+
}
200+
201+
#[inline]
202+
pub fn set(&mut self, i: Size, new_state: bool) {
203+
let (block, bit) = bit_index(i);
204+
if new_state {
205+
self.blocks[block] |= 1 << bit;
206+
} else {
207+
self.blocks[block] &= !(1 << bit);
208+
}
209+
}
210+
211+
pub fn grow(&mut self, amount: Size, new_state: bool) {
212+
let unused_trailing_bits = self.blocks.len() as u64 * BLOCK_SIZE - self.len.bytes();
213+
if amount.bytes() > unused_trailing_bits {
214+
let additional_blocks = amount.bytes() / BLOCK_SIZE + 1;
215+
assert_eq!(additional_blocks as usize as u64, additional_blocks);
216+
self.blocks.extend(
217+
iter::repeat(0).take(additional_blocks as usize),
218+
);
219+
}
220+
let start = self.len;
221+
self.len += amount;
222+
self.set_range_inbounds(start, start + amount, new_state);
223+
}
224+
}
225+
226+
#[inline]
227+
fn bit_index(bits: Size) -> (usize, usize) {
228+
let bits = bits.bytes();
229+
let a = bits / BLOCK_SIZE;
230+
let b = bits % BLOCK_SIZE;
231+
assert_eq!(a as usize as u64, a);
232+
assert_eq!(b as usize as u64, b);
233+
(a as usize, b as usize)
234+
}

src/librustc/mir/interpret/mod.rs

Lines changed: 4 additions & 133 deletions
Original file line numberDiff line numberDiff line change
@@ -26,20 +26,20 @@ pub use self::error::{
2626

2727
pub use self::value::{Scalar, ConstValue};
2828

29-
pub use self::allocation::{Allocation, MemoryAccess};
29+
pub use self::allocation::{
30+
Allocation, AllocationExtra,
31+
Relocations, UndefMask,
32+
};
3033

3134
use std::fmt;
3235
use mir;
3336
use hir::def_id::DefId;
3437
use ty::{self, TyCtxt, Instance};
3538
use ty::layout::{self, HasDataLayout, Size};
3639
use middle::region;
37-
use std::iter;
3840
use std::io;
39-
use std::ops::{Deref, DerefMut};
4041
use std::hash::Hash;
4142
use rustc_serialize::{Encoder, Decodable, Encodable};
42-
use rustc_data_structures::sorted_map::SortedMap;
4343
use rustc_data_structures::fx::FxHashMap;
4444
use rustc_data_structures::sync::{Lock as Mutex, HashMapExt};
4545
use rustc_data_structures::tiny_list::TinyList;
@@ -530,35 +530,6 @@ impl<'tcx, M: fmt::Debug + Eq + Hash + Clone> AllocMap<'tcx, M> {
530530
}
531531
}
532532

533-
#[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug, RustcEncodable, RustcDecodable)]
534-
pub struct Relocations<Tag=(), Id=AllocId>(SortedMap<Size, (Tag, Id)>);
535-
536-
impl<Tag, Id> Relocations<Tag, Id> {
537-
pub fn new() -> Self {
538-
Relocations(SortedMap::new())
539-
}
540-
541-
// The caller must guarantee that the given relocations are already sorted
542-
// by address and contain no duplicates.
543-
pub fn from_presorted(r: Vec<(Size, (Tag, Id))>) -> Self {
544-
Relocations(SortedMap::from_presorted_elements(r))
545-
}
546-
}
547-
548-
impl<Tag> Deref for Relocations<Tag> {
549-
type Target = SortedMap<Size, (Tag, AllocId)>;
550-
551-
fn deref(&self) -> &Self::Target {
552-
&self.0
553-
}
554-
}
555-
556-
impl<Tag> DerefMut for Relocations<Tag> {
557-
fn deref_mut(&mut self) -> &mut Self::Target {
558-
&mut self.0
559-
}
560-
}
561-
562533
////////////////////////////////////////////////////////////////////////////////
563534
// Methods to access integers in the target endianness
564535
////////////////////////////////////////////////////////////////////////////////
@@ -602,106 +573,6 @@ pub fn truncate(value: u128, size: Size) -> u128 {
602573
(value << shift) >> shift
603574
}
604575

605-
////////////////////////////////////////////////////////////////////////////////
606-
// Undefined byte tracking
607-
////////////////////////////////////////////////////////////////////////////////
608-
609-
type Block = u64;
610-
const BLOCK_SIZE: u64 = 64;
611-
612-
#[derive(Clone, Debug, Eq, PartialEq, PartialOrd, Ord, Hash, RustcEncodable, RustcDecodable)]
613-
pub struct UndefMask {
614-
blocks: Vec<Block>,
615-
len: Size,
616-
}
617-
618-
impl_stable_hash_for!(struct mir::interpret::UndefMask{blocks, len});
619-
620-
impl UndefMask {
621-
pub fn new(size: Size) -> Self {
622-
let mut m = UndefMask {
623-
blocks: vec![],
624-
len: Size::ZERO,
625-
};
626-
m.grow(size, false);
627-
m
628-
}
629-
630-
/// Check whether the range `start..end` (end-exclusive) is entirely defined.
631-
///
632-
/// Returns `Ok(())` if it's defined. Otherwise returns the index of the byte
633-
/// at which the first undefined access begins.
634-
#[inline]
635-
pub fn is_range_defined(&self, start: Size, end: Size) -> Result<(), Size> {
636-
if end > self.len {
637-
return Err(self.len);
638-
}
639-
640-
let idx = (start.bytes()..end.bytes())
641-
.map(|i| Size::from_bytes(i))
642-
.find(|&i| !self.get(i));
643-
644-
match idx {
645-
Some(idx) => Err(idx),
646-
None => Ok(())
647-
}
648-
}
649-
650-
pub fn set_range(&mut self, start: Size, end: Size, new_state: bool) {
651-
let len = self.len;
652-
if end > len {
653-
self.grow(end - len, new_state);
654-
}
655-
self.set_range_inbounds(start, end, new_state);
656-
}
657-
658-
pub fn set_range_inbounds(&mut self, start: Size, end: Size, new_state: bool) {
659-
for i in start.bytes()..end.bytes() {
660-
self.set(Size::from_bytes(i), new_state);
661-
}
662-
}
663-
664-
#[inline]
665-
pub fn get(&self, i: Size) -> bool {
666-
let (block, bit) = bit_index(i);
667-
(self.blocks[block] & 1 << bit) != 0
668-
}
669-
670-
#[inline]
671-
pub fn set(&mut self, i: Size, new_state: bool) {
672-
let (block, bit) = bit_index(i);
673-
if new_state {
674-
self.blocks[block] |= 1 << bit;
675-
} else {
676-
self.blocks[block] &= !(1 << bit);
677-
}
678-
}
679-
680-
pub fn grow(&mut self, amount: Size, new_state: bool) {
681-
let unused_trailing_bits = self.blocks.len() as u64 * BLOCK_SIZE - self.len.bytes();
682-
if amount.bytes() > unused_trailing_bits {
683-
let additional_blocks = amount.bytes() / BLOCK_SIZE + 1;
684-
assert_eq!(additional_blocks as usize as u64, additional_blocks);
685-
self.blocks.extend(
686-
iter::repeat(0).take(additional_blocks as usize),
687-
);
688-
}
689-
let start = self.len;
690-
self.len += amount;
691-
self.set_range_inbounds(start, start + amount, new_state);
692-
}
693-
}
694-
695-
#[inline]
696-
fn bit_index(bits: Size) -> (usize, usize) {
697-
let bits = bits.bytes();
698-
let a = bits / BLOCK_SIZE;
699-
let b = bits % BLOCK_SIZE;
700-
assert_eq!(a as usize as u64, a);
701-
assert_eq!(b as usize as u64, b);
702-
(a as usize, b as usize)
703-
}
704-
705576
#[derive(Clone, Copy, Debug, Eq, PartialEq, Ord, PartialOrd, RustcEncodable, RustcDecodable, Hash)]
706577
pub enum ScalarMaybeUndef<Tag=(), Id=AllocId> {
707578
Scalar(Scalar<Tag, Id>),

0 commit comments

Comments
 (0)