Skip to content

Commit 621ca4e

Browse files
committed
Auto merge of #55260 - oli-obk:by_ref_all_the_consts, r=<try>
Get rid of the `Scalar` and `ScalarPair` variants of `ConstValue`... ... by always using `ByRef` and reading the scalars from the backing `Allocation` where desired Oh god this uncovered so many bugs and bugs-just-waiting-to-happen. I'm still not fully done, but I don't want to grow this PR any further. It is in a sane state except for the `const_field` function, which was hacky to begin with. fixes #54738 r? @RalfJung
2 parents a66dc8a + f3100df commit 621ca4e

33 files changed

+710
-457
lines changed

src/librustc/ich/impls_ty.rs

+13-15
Original file line numberDiff line numberDiff line change
@@ -375,13 +375,6 @@ for ::mir::interpret::ConstValue<'gcx> {
375375
def_id.hash_stable(hcx, hasher);
376376
substs.hash_stable(hcx, hasher);
377377
}
378-
Scalar(val) => {
379-
val.hash_stable(hcx, hasher);
380-
}
381-
ScalarPair(a, b) => {
382-
a.hash_stable(hcx, hasher);
383-
b.hash_stable(hcx, hasher);
384-
}
385378
ByRef(id, alloc, offset) => {
386379
id.hash_stable(hcx, hasher);
387380
alloc.hash_stable(hcx, hasher);
@@ -463,13 +456,18 @@ impl<'a> HashStable<StableHashingContext<'a>> for mir::interpret::Allocation {
463456
hcx: &mut StableHashingContext<'a>,
464457
hasher: &mut StableHasher<W>,
465458
) {
466-
self.bytes.hash_stable(hcx, hasher);
467-
for reloc in self.relocations.iter() {
459+
trace!("hashing allocation {:?}", self);
460+
let mir::interpret::Allocation {
461+
bytes, relocations, undef_mask, align, mutability,
462+
extra: (),
463+
} = self;
464+
bytes.hash_stable(hcx, hasher);
465+
for reloc in relocations.iter() {
468466
reloc.hash_stable(hcx, hasher);
469467
}
470-
self.undef_mask.hash_stable(hcx, hasher);
471-
self.align.hash_stable(hcx, hasher);
472-
self.mutability.hash_stable(hcx, hasher);
468+
undef_mask.hash_stable(hcx, hasher);
469+
align.hash_stable(hcx, hasher);
470+
mutability.hash_stable(hcx, hasher);
473471
}
474472
}
475473

@@ -512,7 +510,7 @@ for ::mir::interpret::EvalErrorKind<'gcx, O> {
512510
hasher: &mut StableHasher<W>) {
513511
use mir::interpret::EvalErrorKind::*;
514512

515-
mem::discriminant(&self).hash_stable(hcx, hasher);
513+
mem::discriminant(self).hash_stable(hcx, hasher);
516514

517515
match *self {
518516
FunctionArgCountMismatch |
@@ -577,11 +575,11 @@ for ::mir::interpret::EvalErrorKind<'gcx, O> {
577575
NoMirFor(ref s) => s.hash_stable(hcx, hasher),
578576
UnterminatedCString(ptr) => ptr.hash_stable(hcx, hasher),
579577
PointerOutOfBounds {
580-
ptr,
578+
offset,
581579
access,
582580
allocation_size,
583581
} => {
584-
ptr.hash_stable(hcx, hasher);
582+
offset.hash_stable(hcx, hasher);
585583
access.hash_stable(hcx, hasher);
586584
allocation_size.hash_stable(hcx, hasher)
587585
},

src/librustc/mir/interpret/error.rs

+4-4
Original file line numberDiff line numberDiff line change
@@ -197,7 +197,7 @@ pub enum EvalErrorKind<'tcx, O> {
197197
InvalidBool,
198198
InvalidDiscriminant(u128),
199199
PointerOutOfBounds {
200-
ptr: Pointer,
200+
offset: Size,
201201
access: bool,
202202
allocation_size: Size,
203203
},
@@ -440,10 +440,10 @@ impl<'tcx, O: fmt::Debug> fmt::Debug for EvalErrorKind<'tcx, O> {
440440
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
441441
use self::EvalErrorKind::*;
442442
match *self {
443-
PointerOutOfBounds { ptr, access, allocation_size } => {
444-
write!(f, "{} at offset {}, outside bounds of allocation {} which has size {}",
443+
PointerOutOfBounds { offset, access, allocation_size } => {
444+
write!(f, "{} at offset {}, outside bounds of allocation with size {}",
445445
if access { "memory access" } else { "pointer computed" },
446-
ptr.offset.bytes(), ptr.alloc_id, allocation_size.bytes())
446+
offset.bytes(), allocation_size.bytes())
447447
},
448448
MemoryLockViolation { ptr, len, frame, access, ref lock } => {
449449
write!(f, "{:?} access by frame {} at {:?}, size {}, is in conflict with lock {:?}",

src/librustc/mir/interpret/mod.rs

+197-10
Original file line numberDiff line numberDiff line change
@@ -28,13 +28,14 @@ pub use self::value::{Scalar, ConstValue};
2828
use std::fmt;
2929
use mir;
3030
use hir::def_id::DefId;
31-
use ty::{self, TyCtxt, Instance};
31+
use ty::{self, TyCtxt, Instance, Ty, ParamEnvAnd};
3232
use ty::layout::{self, Align, HasDataLayout, Size};
3333
use middle::region;
3434
use std::iter;
3535
use std::io;
3636
use std::ops::{Deref, DerefMut};
37-
use std::hash::Hash;
37+
use std::cmp;
38+
use std::hash::{Hash, Hasher};
3839
use syntax::ast::Mutability;
3940
use rustc_serialize::{Encoder, Decodable, Encodable};
4041
use rustc_data_structures::sorted_map::SortedMap;
@@ -217,9 +218,56 @@ impl<'tcx, Tag> Pointer<Tag> {
217218
}
218219

219220

220-
#[derive(Copy, Clone, Eq, Hash, Ord, PartialEq, PartialOrd, Debug)]
221+
#[derive(Copy, Clone, Debug)]
221222
pub struct AllocId(pub u64);
222223

224+
impl Eq for AllocId {}
225+
226+
impl Hash for AllocId {
227+
fn hash<H: Hasher>(&self, state: &mut H) {
228+
ty::tls::with(|tcx| {
229+
let alloc_type = tcx.alloc_map.lock().get(*self);
230+
match alloc_type {
231+
Some(alloc_type) => alloc_type.hash(state),
232+
None => self.0.hash(state),
233+
}
234+
})
235+
}
236+
}
237+
238+
impl PartialEq for AllocId {
239+
fn eq(&self, other: &Self) -> bool {
240+
ty::tls::with(|tcx| {
241+
let (s, o) = {
242+
let map = tcx.alloc_map.lock();
243+
(map.get(*self), map.get(*other))
244+
};
245+
s == o
246+
})
247+
}
248+
}
249+
250+
impl Ord for AllocId {
251+
fn cmp(&self, other: &Self) -> cmp::Ordering {
252+
match self.partial_cmp(other) {
253+
Some(ord) => ord,
254+
None => self.0.cmp(&other.0)
255+
}
256+
}
257+
}
258+
259+
impl PartialOrd for AllocId {
260+
fn partial_cmp(&self, other: &Self) -> Option<cmp::Ordering> {
261+
ty::tls::with(|tcx| {
262+
let (s, o) = {
263+
let map = tcx.alloc_map.lock();
264+
(map.get(*self)?, map.get(*other)?)
265+
};
266+
s.partial_cmp(&o)
267+
})
268+
}
269+
}
270+
223271
impl ::rustc_serialize::UseSpecializedEncodable for AllocId {}
224272
impl ::rustc_serialize::UseSpecializedDecodable for AllocId {}
225273

@@ -427,7 +475,7 @@ impl fmt::Display for AllocId {
427475
}
428476
}
429477

430-
#[derive(Debug, Clone, Eq, PartialEq, Hash, RustcDecodable, RustcEncodable)]
478+
#[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Hash, RustcDecodable, RustcEncodable)]
431479
pub enum AllocType<'tcx, M> {
432480
/// The alloc id is used as a function pointer
433481
Function(Instance<'tcx>),
@@ -440,7 +488,7 @@ pub enum AllocType<'tcx, M> {
440488

441489
pub struct AllocMap<'tcx, M> {
442490
/// Lets you know what an AllocId refers to
443-
id_to_type: FxHashMap<AllocId, AllocType<'tcx, M>>,
491+
id_to_type: FxHashMap<u64, AllocType<'tcx, M>>,
444492

445493
/// Used to ensure that functions and statics only get one associated AllocId
446494
type_interner: FxHashMap<AllocType<'tcx, M>, AllocId>,
@@ -479,7 +527,7 @@ impl<'tcx, M: fmt::Debug + Eq + Hash + Clone> AllocMap<'tcx, M> {
479527
}
480528
let id = self.reserve();
481529
debug!("creating alloc_type {:?} with id {}", alloc_type, id);
482-
self.id_to_type.insert(id, alloc_type.clone());
530+
self.id_to_type.insert(id.0, alloc_type.clone());
483531
self.type_interner.insert(alloc_type, id);
484532
id
485533
}
@@ -492,7 +540,7 @@ impl<'tcx, M: fmt::Debug + Eq + Hash + Clone> AllocMap<'tcx, M> {
492540
}
493541

494542
pub fn get(&self, id: AllocId) -> Option<AllocType<'tcx, M>> {
495-
self.id_to_type.get(&id).cloned()
543+
self.id_to_type.get(&id.0).cloned()
496544
}
497545

498546
pub fn unwrap_memory(&self, id: AllocId) -> M {
@@ -513,13 +561,13 @@ impl<'tcx, M: fmt::Debug + Eq + Hash + Clone> AllocMap<'tcx, M> {
513561
}
514562

515563
pub fn set_id_memory(&mut self, id: AllocId, mem: M) {
516-
if let Some(old) = self.id_to_type.insert(id, AllocType::Memory(mem)) {
564+
if let Some(old) = self.id_to_type.insert(id.0, AllocType::Memory(mem)) {
517565
bug!("tried to set allocation id {}, but it was already existing as {:#?}", id, old);
518566
}
519567
}
520568

521569
pub fn set_id_same_memory(&mut self, id: AllocId, mem: M) {
522-
self.id_to_type.insert_same(id, AllocType::Memory(mem));
570+
self.id_to_type.insert_same(id.0, AllocType::Memory(mem));
523571
}
524572
}
525573

@@ -545,7 +593,7 @@ pub struct Allocation<Tag=(),Extra=()> {
545593
pub extra: Extra,
546594
}
547595

548-
impl<Tag, Extra: Default> Allocation<Tag, Extra> {
596+
impl<Tag: Copy, Extra: Default> Allocation<Tag, Extra> {
549597
/// Creates a read-only allocation initialized by the given bytes
550598
pub fn from_bytes(slice: &[u8], align: Align) -> Self {
551599
let mut undef_mask = UndefMask::new(Size::ZERO);
@@ -575,6 +623,145 @@ impl<Tag, Extra: Default> Allocation<Tag, Extra> {
575623
extra: Extra::default(),
576624
}
577625
}
626+
627+
#[inline]
628+
pub fn size(&self) -> Size {
629+
Size::from_bytes(self.bytes.len() as u64)
630+
}
631+
632+
pub fn check_align(
633+
&self,
634+
offset: Size,
635+
required_align: Align,
636+
) -> EvalResult<'tcx> {
637+
if self.align.abi() < required_align.abi() {
638+
return err!(AlignmentCheckFailed {
639+
has: self.align,
640+
required: required_align,
641+
});
642+
}
643+
let offset = offset.bytes();
644+
if offset % required_align.abi() == 0 {
645+
Ok(())
646+
} else {
647+
let has = offset % required_align.abi();
648+
err!(AlignmentCheckFailed {
649+
has: Align::from_bytes(has, has).unwrap(),
650+
required: required_align,
651+
})
652+
}
653+
}
654+
655+
pub fn check_bounds(
656+
&self,
657+
offset: Size,
658+
size: Size,
659+
access: bool,
660+
) -> EvalResult<'tcx> {
661+
let end = offset + size;
662+
let allocation_size = self.size();
663+
if end > allocation_size {
664+
err!(PointerOutOfBounds { offset, access, allocation_size })
665+
} else {
666+
Ok(())
667+
}
668+
}
669+
670+
pub fn check_defined(
671+
&self,
672+
offset: Size,
673+
size: Size,
674+
) -> EvalResult<'tcx> {
675+
self.undef_mask.is_range_defined(
676+
offset,
677+
offset + size,
678+
).or_else(|idx| err!(ReadUndefBytes(idx)))
679+
}
680+
681+
pub fn check_relocations(
682+
&self,
683+
hdl: impl HasDataLayout,
684+
offset: Size,
685+
size: Size,
686+
) -> EvalResult<'tcx> {
687+
if self.relocations(hdl, offset, size).len() != 0 {
688+
err!(ReadPointerAsBytes)
689+
} else {
690+
Ok(())
691+
}
692+
}
693+
694+
fn relocations(
695+
&self,
696+
hdl: impl HasDataLayout,
697+
offset: Size,
698+
size: Size,
699+
) -> &[(Size, (Tag, AllocId))] {
700+
// We have to go back `pointer_size - 1` bytes, as that one would still overlap with
701+
// the beginning of this range.
702+
let start = offset.bytes().saturating_sub(hdl.data_layout().pointer_size.bytes() - 1);
703+
let end = offset + size; // this does overflow checking
704+
self.relocations.range(Size::from_bytes(start)..end)
705+
}
706+
707+
pub fn get_bytes(
708+
&self,
709+
hdl: impl HasDataLayout,
710+
offset: Size,
711+
size: Size,
712+
required_align: Align,
713+
) -> EvalResult<'tcx, &[u8]> {
714+
self.check_align(offset, required_align)?;
715+
self.check_bounds(offset, size, true)?;
716+
self.check_defined(offset, size)?;
717+
self.check_relocations(hdl, offset, size)?;
718+
Ok(self.bytes_ignoring_relocations_and_undef(offset, size))
719+
}
720+
721+
pub fn read_bits(
722+
&self,
723+
tcx: TyCtxt<'_, '_, 'tcx>,
724+
offset: Size,
725+
ty: ParamEnvAnd<'tcx, Ty<'tcx>>,
726+
) -> EvalResult<'tcx, u128> {
727+
let ty = tcx.lift_to_global(&ty).unwrap();
728+
let layout = tcx.layout_of(ty).unwrap_or_else(|e| {
729+
panic!("could not compute layout for {:?}: {:?}", ty, e)
730+
});
731+
let bytes = self.get_bytes(tcx, offset, layout.size, layout.align)?;
732+
Ok(read_target_uint(tcx.data_layout.endian, bytes).unwrap())
733+
}
734+
735+
pub fn read_scalar(
736+
&self,
737+
hdl: impl HasDataLayout,
738+
offset: Size,
739+
size: Size,
740+
align: Align,
741+
) -> EvalResult<'tcx, Scalar<Tag>> {
742+
self.check_align(offset, align)?;
743+
self.check_bounds(offset, size, true)?;
744+
self.check_defined(offset, size)?;
745+
let bytes = self.bytes_ignoring_relocations_and_undef(offset, size);
746+
let int = read_target_uint(hdl.data_layout().endian, &bytes).unwrap();
747+
match self.relocations(hdl, offset, size) {
748+
&[(_, (tag, alloc_id))] if size == hdl.data_layout().pointer_size => {
749+
Ok(Pointer::new_with_tag(alloc_id, Size::from_bytes(int as u64), tag).into())
750+
},
751+
&[] => Ok(Scalar::Bits {
752+
bits: int,
753+
size: size.bytes() as u8,
754+
}),
755+
_ => err!(ReadPointerAsBytes),
756+
}
757+
}
758+
759+
fn bytes_ignoring_relocations_and_undef(&self, offset: Size, size: Size) -> &[u8] {
760+
let end = offset + size;
761+
let offset = offset.bytes() as usize;
762+
let end = end.bytes() as usize;
763+
&self.bytes[offset..end]
764+
}
578765
}
579766

580767
impl<'tcx> ::serialize::UseSpecializedDecodable for &'tcx Allocation {}

0 commit comments

Comments
 (0)