Skip to content

Commit 80add95

Browse files
committed
Auto merge of #55293 - oli-obk:self_managing_allocations, r=<try>
Self managing allocations
2 parents 3476ac0 + 07e8233 commit 80add95

File tree

10 files changed

+871
-652
lines changed

10 files changed

+871
-652
lines changed

src/librustc/mir/interpret/allocation.rs

+706
Large diffs are not rendered by default.

src/librustc/mir/interpret/mod.rs

+8-191
Original file line numberDiff line numberDiff line change
@@ -17,27 +17,29 @@ macro_rules! err {
1717

1818
mod error;
1919
mod value;
20+
mod allocation;
2021

2122
pub use self::error::{
2223
EvalError, EvalResult, EvalErrorKind, AssertMessage, ConstEvalErr, struct_error,
2324
FrameInfo, ConstEvalResult,
2425
};
2526

26-
pub use self::value::{Scalar, ConstValue};
27+
pub use self::value::{Scalar, ConstValue, ScalarMaybeUndef};
28+
29+
pub use self::allocation::{
30+
Allocation, MemoryAccess, AllocationExtra,
31+
Relocations, UndefMask,
32+
};
2733

2834
use std::fmt;
2935
use mir;
3036
use hir::def_id::DefId;
3137
use ty::{self, TyCtxt, Instance};
32-
use ty::layout::{self, Align, HasDataLayout, Size};
38+
use ty::layout::{self, HasDataLayout, Size};
3339
use middle::region;
34-
use std::iter;
3540
use std::io;
36-
use std::ops::{Deref, DerefMut};
3741
use std::hash::Hash;
38-
use syntax::ast::Mutability;
3942
use rustc_serialize::{Encoder, Decodable, Encodable};
40-
use rustc_data_structures::sorted_map::SortedMap;
4143
use rustc_data_structures::fx::FxHashMap;
4244
use rustc_data_structures::sync::{Lock as Mutex, HashMapExt};
4345
use rustc_data_structures::tiny_list::TinyList;
@@ -523,91 +525,6 @@ impl<'tcx, M: fmt::Debug + Eq + Hash + Clone> AllocMap<'tcx, M> {
523525
}
524526
}
525527

526-
#[derive(Clone, Debug, Eq, PartialEq, PartialOrd, Ord, Hash, RustcEncodable, RustcDecodable)]
527-
pub struct Allocation<Tag=(),Extra=()> {
528-
/// The actual bytes of the allocation.
529-
/// Note that the bytes of a pointer represent the offset of the pointer
530-
pub bytes: Vec<u8>,
531-
/// Maps from byte addresses to extra data for each pointer.
532-
/// Only the first byte of a pointer is inserted into the map; i.e.,
533-
/// every entry in this map applies to `pointer_size` consecutive bytes starting
534-
/// at the given offset.
535-
pub relocations: Relocations<Tag>,
536-
/// Denotes undefined memory. Reading from undefined memory is forbidden in miri
537-
pub undef_mask: UndefMask,
538-
/// The alignment of the allocation to detect unaligned reads.
539-
pub align: Align,
540-
/// Whether the allocation is mutable.
541-
/// Also used by codegen to determine if a static should be put into mutable memory,
542-
/// which happens for `static mut` and `static` with interior mutability.
543-
pub mutability: Mutability,
544-
/// Extra state for the machine.
545-
pub extra: Extra,
546-
}
547-
548-
impl<Tag, Extra: Default> Allocation<Tag, Extra> {
549-
/// Creates a read-only allocation initialized by the given bytes
550-
pub fn from_bytes(slice: &[u8], align: Align) -> Self {
551-
let mut undef_mask = UndefMask::new(Size::ZERO);
552-
undef_mask.grow(Size::from_bytes(slice.len() as u64), true);
553-
Self {
554-
bytes: slice.to_owned(),
555-
relocations: Relocations::new(),
556-
undef_mask,
557-
align,
558-
mutability: Mutability::Immutable,
559-
extra: Extra::default(),
560-
}
561-
}
562-
563-
pub fn from_byte_aligned_bytes(slice: &[u8]) -> Self {
564-
Allocation::from_bytes(slice, Align::from_bytes(1, 1).unwrap())
565-
}
566-
567-
pub fn undef(size: Size, align: Align) -> Self {
568-
assert_eq!(size.bytes() as usize as u64, size.bytes());
569-
Allocation {
570-
bytes: vec![0; size.bytes() as usize],
571-
relocations: Relocations::new(),
572-
undef_mask: UndefMask::new(size),
573-
align,
574-
mutability: Mutability::Mutable,
575-
extra: Extra::default(),
576-
}
577-
}
578-
}
579-
580-
impl<'tcx> ::serialize::UseSpecializedDecodable for &'tcx Allocation {}
581-
582-
#[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug, RustcEncodable, RustcDecodable)]
583-
pub struct Relocations<Tag=(), Id=AllocId>(SortedMap<Size, (Tag, Id)>);
584-
585-
impl<Tag, Id> Relocations<Tag, Id> {
586-
pub fn new() -> Self {
587-
Relocations(SortedMap::new())
588-
}
589-
590-
// The caller must guarantee that the given relocations are already sorted
591-
// by address and contain no duplicates.
592-
pub fn from_presorted(r: Vec<(Size, (Tag, Id))>) -> Self {
593-
Relocations(SortedMap::from_presorted_elements(r))
594-
}
595-
}
596-
597-
impl<Tag> Deref for Relocations<Tag> {
598-
type Target = SortedMap<Size, (Tag, AllocId)>;
599-
600-
fn deref(&self) -> &Self::Target {
601-
&self.0
602-
}
603-
}
604-
605-
impl<Tag> DerefMut for Relocations<Tag> {
606-
fn deref_mut(&mut self) -> &mut Self::Target {
607-
&mut self.0
608-
}
609-
}
610-
611528
////////////////////////////////////////////////////////////////////////////////
612529
// Methods to access integers in the target endianness
613530
////////////////////////////////////////////////////////////////////////////////
@@ -650,103 +567,3 @@ pub fn truncate(value: u128, size: Size) -> u128 {
650567
// truncate (shift left to drop out leftover values, shift right to fill with zeroes)
651568
(value << shift) >> shift
652569
}
653-
654-
////////////////////////////////////////////////////////////////////////////////
655-
// Undefined byte tracking
656-
////////////////////////////////////////////////////////////////////////////////
657-
658-
type Block = u64;
659-
const BLOCK_SIZE: u64 = 64;
660-
661-
#[derive(Clone, Debug, Eq, PartialEq, PartialOrd, Ord, Hash, RustcEncodable, RustcDecodable)]
662-
pub struct UndefMask {
663-
blocks: Vec<Block>,
664-
len: Size,
665-
}
666-
667-
impl_stable_hash_for!(struct mir::interpret::UndefMask{blocks, len});
668-
669-
impl UndefMask {
670-
pub fn new(size: Size) -> Self {
671-
let mut m = UndefMask {
672-
blocks: vec![],
673-
len: Size::ZERO,
674-
};
675-
m.grow(size, false);
676-
m
677-
}
678-
679-
/// Check whether the range `start..end` (end-exclusive) is entirely defined.
680-
///
681-
/// Returns `Ok(())` if it's defined. Otherwise returns the index of the byte
682-
/// at which the first undefined access begins.
683-
#[inline]
684-
pub fn is_range_defined(&self, start: Size, end: Size) -> Result<(), Size> {
685-
if end > self.len {
686-
return Err(self.len);
687-
}
688-
689-
let idx = (start.bytes()..end.bytes())
690-
.map(|i| Size::from_bytes(i))
691-
.find(|&i| !self.get(i));
692-
693-
match idx {
694-
Some(idx) => Err(idx),
695-
None => Ok(())
696-
}
697-
}
698-
699-
pub fn set_range(&mut self, start: Size, end: Size, new_state: bool) {
700-
let len = self.len;
701-
if end > len {
702-
self.grow(end - len, new_state);
703-
}
704-
self.set_range_inbounds(start, end, new_state);
705-
}
706-
707-
pub fn set_range_inbounds(&mut self, start: Size, end: Size, new_state: bool) {
708-
for i in start.bytes()..end.bytes() {
709-
self.set(Size::from_bytes(i), new_state);
710-
}
711-
}
712-
713-
#[inline]
714-
pub fn get(&self, i: Size) -> bool {
715-
let (block, bit) = bit_index(i);
716-
(self.blocks[block] & 1 << bit) != 0
717-
}
718-
719-
#[inline]
720-
pub fn set(&mut self, i: Size, new_state: bool) {
721-
let (block, bit) = bit_index(i);
722-
if new_state {
723-
self.blocks[block] |= 1 << bit;
724-
} else {
725-
self.blocks[block] &= !(1 << bit);
726-
}
727-
}
728-
729-
pub fn grow(&mut self, amount: Size, new_state: bool) {
730-
let unused_trailing_bits = self.blocks.len() as u64 * BLOCK_SIZE - self.len.bytes();
731-
if amount.bytes() > unused_trailing_bits {
732-
let additional_blocks = amount.bytes() / BLOCK_SIZE + 1;
733-
assert_eq!(additional_blocks as usize as u64, additional_blocks);
734-
self.blocks.extend(
735-
iter::repeat(0).take(additional_blocks as usize),
736-
);
737-
}
738-
let start = self.len;
739-
self.len += amount;
740-
self.set_range_inbounds(start, start + amount, new_state);
741-
}
742-
}
743-
744-
#[inline]
745-
fn bit_index(bits: Size) -> (usize, usize) {
746-
let bits = bits.bytes();
747-
let a = bits / BLOCK_SIZE;
748-
let b = bits % BLOCK_SIZE;
749-
assert_eq!(a as usize as u64, a);
750-
assert_eq!(b as usize as u64, b);
751-
(a as usize, b as usize)
752-
}

src/librustc/mir/interpret/value.rs

+119
Original file line numberDiff line numberDiff line change
@@ -363,3 +363,122 @@ impl<Tag> From<Pointer<Tag>> for Scalar<Tag> {
363363
Scalar::Ptr(ptr)
364364
}
365365
}
366+
367+
#[derive(Clone, Copy, Debug, Eq, PartialEq, Ord, PartialOrd, RustcEncodable, RustcDecodable, Hash)]
368+
pub enum ScalarMaybeUndef<Tag=(), Id=AllocId> {
369+
Scalar(Scalar<Tag, Id>),
370+
Undef,
371+
}
372+
373+
impl<Tag> From<Scalar<Tag>> for ScalarMaybeUndef<Tag> {
374+
#[inline(always)]
375+
fn from(s: Scalar<Tag>) -> Self {
376+
ScalarMaybeUndef::Scalar(s)
377+
}
378+
}
379+
380+
impl<'tcx> ScalarMaybeUndef<()> {
381+
#[inline]
382+
pub fn with_default_tag<Tag>(self) -> ScalarMaybeUndef<Tag>
383+
where Tag: Default
384+
{
385+
match self {
386+
ScalarMaybeUndef::Scalar(s) => ScalarMaybeUndef::Scalar(s.with_default_tag()),
387+
ScalarMaybeUndef::Undef => ScalarMaybeUndef::Undef,
388+
}
389+
}
390+
}
391+
392+
impl<'tcx, Tag> ScalarMaybeUndef<Tag> {
393+
#[inline]
394+
pub fn erase_tag(self) -> ScalarMaybeUndef
395+
{
396+
match self {
397+
ScalarMaybeUndef::Scalar(s) => ScalarMaybeUndef::Scalar(s.erase_tag()),
398+
ScalarMaybeUndef::Undef => ScalarMaybeUndef::Undef,
399+
}
400+
}
401+
402+
#[inline]
403+
pub fn not_undef(self) -> EvalResult<'static, Scalar<Tag>> {
404+
match self {
405+
ScalarMaybeUndef::Scalar(scalar) => Ok(scalar),
406+
ScalarMaybeUndef::Undef => err!(ReadUndefBytes(Size::from_bytes(0))),
407+
}
408+
}
409+
410+
#[inline(always)]
411+
pub fn to_ptr(self) -> EvalResult<'tcx, Pointer<Tag>> {
412+
self.not_undef()?.to_ptr()
413+
}
414+
415+
#[inline(always)]
416+
pub fn to_bits(self, target_size: Size) -> EvalResult<'tcx, u128> {
417+
self.not_undef()?.to_bits(target_size)
418+
}
419+
420+
#[inline(always)]
421+
pub fn to_bool(self) -> EvalResult<'tcx, bool> {
422+
self.not_undef()?.to_bool()
423+
}
424+
425+
#[inline(always)]
426+
pub fn to_char(self) -> EvalResult<'tcx, char> {
427+
self.not_undef()?.to_char()
428+
}
429+
430+
#[inline(always)]
431+
pub fn to_f32(self) -> EvalResult<'tcx, f32> {
432+
self.not_undef()?.to_f32()
433+
}
434+
435+
#[inline(always)]
436+
pub fn to_f64(self) -> EvalResult<'tcx, f64> {
437+
self.not_undef()?.to_f64()
438+
}
439+
440+
#[inline(always)]
441+
pub fn to_u8(self) -> EvalResult<'tcx, u8> {
442+
self.not_undef()?.to_u8()
443+
}
444+
445+
#[inline(always)]
446+
pub fn to_u32(self) -> EvalResult<'tcx, u32> {
447+
self.not_undef()?.to_u32()
448+
}
449+
450+
#[inline(always)]
451+
pub fn to_u64(self) -> EvalResult<'tcx, u64> {
452+
self.not_undef()?.to_u64()
453+
}
454+
455+
#[inline(always)]
456+
pub fn to_usize(self, cx: impl HasDataLayout) -> EvalResult<'tcx, u64> {
457+
self.not_undef()?.to_usize(cx)
458+
}
459+
460+
#[inline(always)]
461+
pub fn to_i8(self) -> EvalResult<'tcx, i8> {
462+
self.not_undef()?.to_i8()
463+
}
464+
465+
#[inline(always)]
466+
pub fn to_i32(self) -> EvalResult<'tcx, i32> {
467+
self.not_undef()?.to_i32()
468+
}
469+
470+
#[inline(always)]
471+
pub fn to_i64(self) -> EvalResult<'tcx, i64> {
472+
self.not_undef()?.to_i64()
473+
}
474+
475+
#[inline(always)]
476+
pub fn to_isize(self, cx: impl HasDataLayout) -> EvalResult<'tcx, i64> {
477+
self.not_undef()?.to_isize(cx)
478+
}
479+
}
480+
481+
impl_stable_hash_for!(enum ::mir::interpret::ScalarMaybeUndef {
482+
Scalar(v),
483+
Undef
484+
});

0 commit comments

Comments
 (0)