Skip to content

Commit 82da68c

Browse files
committed
implement all ptr comparisons through integer casts
1 parent bb6a91d commit 82da68c

File tree

1 file changed

+23
-200
lines changed

1 file changed

+23
-200
lines changed

src/operator.rs

+23-200
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
use rustc::ty::{Ty, layout::{Size, LayoutOf}};
1+
use rustc::ty::{Ty, layout::LayoutOf};
22
use rustc::mir;
33

44
use crate::*;
@@ -16,14 +16,6 @@ pub trait EvalContextExt<'tcx> {
1616
right: ImmTy<'tcx, Tag>,
1717
) -> InterpResult<'tcx, (Scalar<Tag>, bool)>;
1818

19-
fn ptr_int_arithmetic(
20-
&self,
21-
bin_op: mir::BinOp,
22-
left: Pointer<Tag>,
23-
right: u128,
24-
signed: bool,
25-
) -> InterpResult<'tcx, (Scalar<Tag>, bool)>;
26-
2719
fn ptr_eq(
2820
&self,
2921
left: Scalar<Tag>,
@@ -56,9 +48,9 @@ impl<'mir, 'tcx> EvalContextExt<'tcx> for super::MiriEvalContext<'mir, 'tcx> {
5648

5749
trace!("ptr_op: {:?} {:?} {:?}", *left, bin_op, *right);
5850

59-
// Operations that support fat pointers
60-
match bin_op {
51+
Ok(match bin_op {
6152
Eq | Ne => {
53+
// This supports fat pointers.
6254
let eq = match (*left, *right) {
6355
(Immediate::Scalar(left), Immediate::Scalar(right)) =>
6456
self.ptr_eq(left.not_undef()?, right.not_undef()?)?,
@@ -67,114 +59,38 @@ impl<'mir, 'tcx> EvalContextExt<'tcx> for super::MiriEvalContext<'mir, 'tcx> {
6759
self.ptr_eq(left2.not_undef()?, right2.not_undef()?)?,
6860
_ => bug!("Type system should not allow comparing Scalar with ScalarPair"),
6961
};
70-
return Ok((Scalar::from_bool(if bin_op == Eq { eq } else { !eq }), false));
62+
(Scalar::from_bool(if bin_op == Eq { eq } else { !eq }), false)
7163
}
72-
_ => {},
73-
}
7464

75-
// Now we expect no more fat pointers.
76-
let left_layout = left.layout;
77-
let left = left.to_scalar()?;
78-
let right_layout = right.layout;
79-
let right = right.to_scalar()?;
65+
Lt | Le | Gt | Ge => {
66+
// Just compare the integers.
67+
// TODO: Do we really want to *always* do that, even when comparing two live in-bounds pointers?
68+
let left = self.force_bits(left.to_scalar()?, left.layout.size)?;
69+
let right = self.force_bits(right.to_scalar()?, right.layout.size)?;
70+
let res = match bin_op {
71+
Lt => left < right,
72+
Le => left <= right,
73+
Gt => left > right,
74+
Ge => left >= right,
75+
_ => bug!("We already established it has to be one of these operators."),
76+
};
77+
(Scalar::from_bool(res), false)
78+
}
8079

81-
Ok(match bin_op {
8280
Offset => {
83-
let pointee_ty = left_layout.ty
81+
let pointee_ty = left.layout.ty
8482
.builtin_deref(true)
8583
.expect("Offset called on non-ptr type")
8684
.ty;
8785
let ptr = self.pointer_offset_inbounds(
88-
left,
86+
left.to_scalar()?,
8987
pointee_ty,
90-
right.to_isize(self)?,
88+
right.to_scalar()?.to_isize(self)?,
9189
)?;
9290
(ptr, false)
9391
}
94-
// These need both to be pointer, and fail if they are not in the same location
95-
Lt | Le | Gt | Ge | Sub if left.is_ptr() && right.is_ptr() => {
96-
let left = left.assert_ptr();
97-
let right = right.assert_ptr();
98-
if left.alloc_id == right.alloc_id {
99-
let res = match bin_op {
100-
Lt => left.offset < right.offset,
101-
Le => left.offset <= right.offset,
102-
Gt => left.offset > right.offset,
103-
Ge => left.offset >= right.offset,
104-
Sub => {
105-
// subtract the offsets
106-
let left_offset = Scalar::from_uint(left.offset.bytes(), self.memory().pointer_size());
107-
let right_offset = Scalar::from_uint(right.offset.bytes(), self.memory().pointer_size());
108-
let layout = self.layout_of(self.tcx.types.usize)?;
109-
return self.binary_op(
110-
Sub,
111-
ImmTy::from_scalar(left_offset, layout),
112-
ImmTy::from_scalar(right_offset, layout),
113-
)
114-
}
115-
_ => bug!("We already established it has to be one of these operators."),
116-
};
117-
(Scalar::from_bool(res), false)
118-
} else {
119-
// Both are pointers, but from different allocations.
120-
throw_unsup!(InvalidPointerMath)
121-
}
122-
}
123-
Lt | Le | Gt | Ge if left.is_bits() && right.is_bits() => {
124-
let left = left.assert_bits(self.memory().pointer_size());
125-
let right = right.assert_bits(self.memory().pointer_size());
126-
let res = match bin_op {
127-
Lt => left < right,
128-
Le => left <= right,
129-
Gt => left > right,
130-
Ge => left >= right,
131-
_ => bug!("We already established it has to be one of these operators."),
132-
};
133-
Ok((Scalar::from_bool(res), false))
134-
}
135-
Gt | Ge if left.is_ptr() && right.is_bits() => {
136-
// "ptr >[=] integer" can be tested if the integer is small enough.
137-
let left = left.assert_ptr();
138-
let right = right.assert_bits(self.memory().pointer_size());
139-
let (_alloc_size, alloc_align) = self.memory()
140-
.get_size_and_align(left.alloc_id, AllocCheck::MaybeDead)
141-
.expect("alloc info with MaybeDead cannot fail");
142-
let min_ptr_val = u128::from(alloc_align.bytes()) + u128::from(left.offset.bytes());
143-
let result = match bin_op {
144-
Gt => min_ptr_val > right,
145-
Ge => min_ptr_val >= right,
146-
_ => bug!(),
147-
};
148-
if result {
149-
// Definitely true!
150-
(Scalar::from_bool(true), false)
151-
} else {
152-
// Sorry, can't tell.
153-
throw_unsup!(InvalidPointerMath)
154-
}
155-
}
156-
// These work if the left operand is a pointer, and the right an integer
157-
Add | BitAnd | Sub | Rem if left.is_ptr() && right.is_bits() => {
158-
// Cast to i128 is fine as we checked the kind to be ptr-sized
159-
self.ptr_int_arithmetic(
160-
bin_op,
161-
left.assert_ptr(),
162-
right.assert_bits(self.memory().pointer_size()),
163-
right_layout.abi.is_signed(),
164-
)?
165-
}
166-
// Commutative operators also work if the integer is on the left
167-
Add | BitAnd if left.is_bits() && right.is_ptr() => {
168-
// This is a commutative operation, just swap the operands
169-
self.ptr_int_arithmetic(
170-
bin_op,
171-
right.assert_ptr(),
172-
left.assert_bits(self.memory().pointer_size()),
173-
left_layout.abi.is_signed(),
174-
)?
175-
}
176-
// Nothing else works
177-
_ => throw_unsup!(InvalidPointerMath),
92+
93+
_ => bug!("Invalid operator on pointers: {:?}", bin_op)
17894
})
17995
}
18096

@@ -191,99 +107,6 @@ impl<'mir, 'tcx> EvalContextExt<'tcx> for super::MiriEvalContext<'mir, 'tcx> {
191107
Ok(left == right)
192108
}
193109

194-
fn ptr_int_arithmetic(
195-
&self,
196-
bin_op: mir::BinOp,
197-
left: Pointer<Tag>,
198-
right: u128,
199-
signed: bool,
200-
) -> InterpResult<'tcx, (Scalar<Tag>, bool)> {
201-
use rustc::mir::BinOp::*;
202-
203-
fn map_to_primval((res, over): (Pointer<Tag>, bool)) -> (Scalar<Tag>, bool) {
204-
(Scalar::Ptr(res), over)
205-
}
206-
207-
Ok(match bin_op {
208-
Sub =>
209-
// The only way this can overflow is by underflowing, so signdeness of the right
210-
// operands does not matter.
211-
map_to_primval(left.overflowing_signed_offset(-(right as i128), self)),
212-
Add if signed =>
213-
map_to_primval(left.overflowing_signed_offset(right as i128, self)),
214-
Add if !signed =>
215-
map_to_primval(left.overflowing_offset(Size::from_bytes(right as u64), self)),
216-
217-
BitAnd if !signed => {
218-
let ptr_base_align = self.memory().get_size_and_align(left.alloc_id, AllocCheck::MaybeDead)
219-
.expect("alloc info with MaybeDead cannot fail")
220-
.1.bytes();
221-
let base_mask = {
222-
// FIXME: use `interpret::truncate`, once that takes a `Size` instead of a `Layout`.
223-
let shift = 128 - self.memory().pointer_size().bits();
224-
let value = !(ptr_base_align as u128 - 1);
225-
// Truncate (shift left to drop out leftover values, shift right to fill with zeroes).
226-
(value << shift) >> shift
227-
};
228-
let ptr_size = self.memory().pointer_size();
229-
trace!("ptr BitAnd, align {}, operand {:#010x}, base_mask {:#010x}",
230-
ptr_base_align, right, base_mask);
231-
if right & base_mask == base_mask {
232-
// Case 1: the base address bits are all preserved, i.e., right is all-1 there.
233-
let offset = (left.offset.bytes() as u128 & right) as u64;
234-
(
235-
Scalar::Ptr(Pointer::new_with_tag(
236-
left.alloc_id,
237-
Size::from_bytes(offset),
238-
left.tag,
239-
)),
240-
false,
241-
)
242-
} else if right & base_mask == 0 {
243-
// Case 2: the base address bits are all taken away, i.e., right is all-0 there.
244-
let v = Scalar::from_uint((left.offset.bytes() as u128) & right, ptr_size);
245-
(v, false)
246-
} else {
247-
throw_unsup!(ReadPointerAsBytes);
248-
}
249-
}
250-
251-
Rem if !signed => {
252-
// Doing modulo a divisor of the alignment is allowed.
253-
// (Intuition: modulo a divisor leaks less information.)
254-
let ptr_base_align = self.memory().get_size_and_align(left.alloc_id, AllocCheck::MaybeDead)
255-
.expect("alloc info with MaybeDead cannot fail")
256-
.1.bytes();
257-
let right = right as u64;
258-
let ptr_size = self.memory().pointer_size();
259-
if right == 1 {
260-
// Modulo 1 is always 0.
261-
(Scalar::from_uint(0u32, ptr_size), false)
262-
} else if ptr_base_align % right == 0 {
263-
// The base address would be cancelled out by the modulo operation, so we can
264-
// just take the modulo of the offset.
265-
(
266-
Scalar::from_uint((left.offset.bytes() % right) as u128, ptr_size),
267-
false,
268-
)
269-
} else {
270-
throw_unsup!(ReadPointerAsBytes);
271-
}
272-
}
273-
274-
_ => {
275-
let msg = format!(
276-
"unimplemented binary op on pointer {:?}: {:?}, {:?} ({})",
277-
bin_op,
278-
left,
279-
right,
280-
if signed { "signed" } else { "unsigned" }
281-
);
282-
throw_unsup!(Unimplemented(msg));
283-
}
284-
})
285-
}
286-
287110
/// Raises an error if the offset moves the pointer outside of its allocation.
288111
/// We consider ZSTs their own huge allocation that doesn't overlap with anything (and nothing
289112
/// moves in there because the size is 0). We also consider the NULL pointer its own separate

0 commit comments

Comments
 (0)