Skip to content

Commit 94d93fd

Browse files
committed
[kani] Prove some PtrInner methods
Also add Kani `requires` and `ensures` annotations to some methods and to some functions in `util`. While we're here, roll Kani to 0.60.0 and fix the Kani roller. gherrit-pr-id: Ic1e352d3d92a45ff86d8ca85b8221bccfb07033b
1 parent 40f3f1e commit 94d93fd

File tree

6 files changed

+387
-6
lines changed

6 files changed

+387
-6
lines changed

.github/workflows/ci.yml

+2-2
Original file line numberDiff line numberDiff line change
@@ -592,10 +592,10 @@ jobs:
592592
# TODO(https://github.com/model-checking/kani-github-action/issues/56):
593593
# Go back to testing all features once the Kani GitHub Action supports
594594
# specifying a particular toolchain.
595-
args: "--package zerocopy --features __internal_use_only_features_that_work_on_stable --output-format=terse -Zfunction-contracts --randomize-layout --memory-safety-checks --overflow-checks --undefined-function-checks --unwinding-checks"
595+
args: "--package zerocopy --features __internal_use_only_features_that_work_on_stable --output-format=terse -Zfunction-contracts -Zmem-predicates --randomize-layout --memory-safety-checks --overflow-checks --undefined-function-checks --unwinding-checks"
596596
# This version is automatically rolled by
597597
# `roll-pinned-toolchain-versions.yml`.
598-
kani-version: 0.55.0
598+
kani-version: 0.60.0
599599

600600
# NEON intrinsics are currently broken on big-endian platforms. [1] This test ensures
601601
# that we don't accidentally attempt to compile these intrinsics on such platforms. We

.github/workflows/roll-pinned-toolchain-versions.yml

+1-1
Original file line numberDiff line numberDiff line change
@@ -144,7 +144,7 @@ jobs:
144144
runs-on: ubuntu-latest
145145
strategy:
146146
matrix:
147-
branch: ["main", "v0.7.x"]
147+
branch: ["main"]
148148
name: Roll pinned Kani version
149149
steps:
150150
- name: Checkout code

src/lib.rs

+5
Original file line numberDiff line numberDiff line change
@@ -738,8 +738,13 @@ pub unsafe trait KnownLayout {
738738
/// The type of metadata stored in a pointer to `Self`.
739739
///
740740
/// This is `()` for sized types and `usize` for slice DSTs.
741+
#[cfg(not(kani))]
741742
type PointerMetadata: PointerMetadata;
742743

744+
#[cfg(kani)]
745+
#[allow(missing_docs)]
746+
type PointerMetadata: PointerMetadata + kani::Arbitrary;
747+
743748
/// A maybe-uninitialized analog of `Self`
744749
///
745750
/// # Safety

src/pointer/inner.rs

+320-2
Original file line numberDiff line numberDiff line change
@@ -78,7 +78,32 @@ mod _def {
7878
/// address space.
7979
/// 5. If `ptr`'s referent is not zero sized, then `A` is guaranteed to
8080
/// live for at least `'a`.
81+
#[cfg_attr(
82+
kani,
83+
// TODO: Use `is_inbounds` once it's public. `can_write_unaligned`
84+
// is technically stronger than we need.
85+
kani::requires(kani::mem::can_write_unaligned(ptr.as_ptr())),
86+
kani::ensures(|p| kani::mem::can_write_unaligned(p.as_non_null().as_ptr())),
87+
kani::ensures(|p| {
88+
#[allow(ambiguous_wide_pointer_comparisons)]
89+
return p.as_non_null() == ptr;
90+
}),
91+
kani::ensures(|p| {
92+
kani::mem::checked_size_of_raw(ptr.as_ptr()).unwrap() == 0 ||
93+
kani::mem::same_allocation(p.as_non_null().as_ptr(), ptr.as_ptr())
94+
}),
95+
)]
8196
pub(crate) const unsafe fn new(ptr: NonNull<T>) -> PtrInner<'a, T> {
97+
#[cfg(kani)]
98+
#[kani::proof_for_contract(PtrInner::new)]
99+
fn proof() {
100+
fn new<T: ?Sized + KnownLayout>() {
101+
unsafe { PtrInner::<T>::new(proofs::any_nonnull()) };
102+
}
103+
104+
for_many_types!(new);
105+
}
106+
82107
// SAFETY: The caller has promised to satisfy all safety invariants
83108
// of `PtrInner`.
84109
Self { ptr, _marker: PhantomData }
@@ -146,6 +171,16 @@ impl<'a, T> PtrInner<'a, [T]> {
146171
/// # Safety
147172
///
148173
/// `range` is a valid range (`start <= end`) and `end <= self.len()`.
174+
#[cfg_attr(
175+
kani,
176+
kani::requires(range.start <= range.end && range.end <= self.len()),
177+
kani::ensures(|r| r.len() == range.end - range.start),
178+
kani::ensures(|r| {
179+
let begin = unsafe { r.as_non_null().cast::<T>().sub(range.start) };
180+
let ptr = self.as_non_null().cast::<T>();
181+
begin == ptr
182+
})
183+
)]
149184
pub(crate) unsafe fn slice_unchecked(self, range: Range<usize>) -> Self {
150185
let base = self.as_non_null().cast::<T>().as_ptr();
151186

@@ -206,8 +241,23 @@ impl<'a, T> PtrInner<'a, [T]> {
206241
///
207242
/// The caller promises that `l_len <= self.len()`.
208243
///
209-
/// Given `let (left, right) = ptr.split_at(l_len)`, it is guaranteed
210-
/// that `left` and `right` are contiguous and non-overlapping.
244+
/// Given `let (left, right) = ptr.split_at(l_len)`, it is guaranteed that:
245+
/// - `left.len() == l_len`
246+
/// - `left` and `right` are contiguous and non-overlapping
247+
/// - Together, `left` and `right` cover the exact same byte range as `self`
248+
#[cfg_attr(
249+
kani,
250+
kani::requires(l_len <= self.len()),
251+
kani::ensures(|(l, r)| l.len() == l_len && r.len() == self.len() - l_len),
252+
kani::ensures(|(l, _)| {
253+
l.as_non_null().cast::<T>() == self.as_non_null().cast::<T>()
254+
}),
255+
kani::ensures(|(l, r)| {
256+
let l = l.as_non_null().cast::<T>();
257+
let r = r.as_non_null().cast::<T>();
258+
unsafe { l.add(l_len) == r }
259+
})
260+
)]
211261
pub(crate) unsafe fn split_at(self, l_len: usize) -> (Self, Self) {
212262
// SAFETY: The caller promises that `l_len <= self.len()`.
213263
// Trivially, `0 <= l_len`.
@@ -333,6 +383,11 @@ impl<'a, T, const N: usize> PtrInner<'a, [T; N]> {
333383
/// Callers may assume that the returned `PtrInner` references the same
334384
/// address and length as `self`.
335385
#[allow(clippy::wrong_self_convention)]
386+
#[cfg_attr(
387+
kani,
388+
kani::ensures(|slc| slc.len() == N),
389+
kani::ensures(|slc| self.as_non_null().cast::<T>() == slc.as_non_null().cast::<T>())
390+
)]
336391
pub(crate) fn as_slice(self) -> PtrInner<'a, [T]> {
337392
let start = self.as_non_null().cast::<T>().as_ptr();
338393
let slice = core::ptr::slice_from_raw_parts_mut(start, N);
@@ -395,6 +450,38 @@ impl<'a> PtrInner<'a, [u8]> {
395450
/// - If this is a prefix cast, `ptr` has the same address as `self`.
396451
/// - If this is a suffix cast, `remainder` has the same address as `self`.
397452
#[inline]
453+
#[cfg_attr(
454+
kani,
455+
kani::ensures(|r| match r {
456+
Ok((t, rest)) => {
457+
let ptr_u8 = self.as_non_null().cast::<u8>();
458+
let t_u8 = t.as_non_null().cast::<u8>();
459+
let rest_u8 = rest.as_non_null().cast::<u8>();
460+
match cast_type {
461+
CastType::Prefix => {
462+
t_u8 == ptr_u8
463+
// TODO: Assert that `t` and `rest` are contiguous and
464+
// non-overlapping.
465+
}
466+
CastType::Suffix => {
467+
// The second condition asserts that `rest` and `t` are
468+
// contiguous and non-overlapping.
469+
rest_u8 == ptr_u8 && unsafe { rest_u8.add(rest.len()) == t_u8 }
470+
}
471+
}
472+
}
473+
Err(CastError::Alignment(_)) => {
474+
true
475+
// TODO: Prove that there would have been an alignment problem.
476+
}
477+
Err(CastError::Size(_)) => {
478+
true
479+
// TODO: Prove that there would have been a size problem.
480+
}
481+
// Unreachable because this error variant is uninhabited.
482+
Err(CastError::Validity(_)) => false,
483+
})
484+
)]
398485
pub(crate) fn try_cast_into<U>(
399486
self,
400487
cast_type: CastType,
@@ -519,3 +606,234 @@ mod tests {
519606
}
520607
}
521608
}
609+
610+
#[cfg(kani)]
611+
mod proofs {
612+
use crate::{SizeInfo, TrailingSliceLayout};
613+
614+
use super::*;
615+
616+
pub(super) fn any_nonnull<T: ?Sized + KnownLayout>() -> NonNull<T> {
617+
let (size, meta) = match T::LAYOUT.size_info {
618+
SizeInfo::Sized { size } => (size, T::PointerMetadata::from_elem_count(0)),
619+
SizeInfo::SliceDst(TrailingSliceLayout { .. }) => {
620+
let meta = T::PointerMetadata::from_elem_count(kani::any());
621+
let size = meta.size_for_metadata(T::LAYOUT);
622+
kani::assume(size.is_some());
623+
let size = size.unwrap();
624+
kani::assume(size <= isize::MAX as usize);
625+
626+
(size, meta)
627+
}
628+
};
629+
630+
let layout = alloc::alloc::Layout::from_size_align(size, T::LAYOUT.align.get()).unwrap();
631+
632+
let ptr = if layout.size() == 0 {
633+
NonNull::dangling()
634+
} else {
635+
let ptr = unsafe { alloc::alloc::alloc(layout) };
636+
NonNull::new(ptr).unwrap_or_else(|| alloc::alloc::handle_alloc_error(layout))
637+
};
638+
639+
T::raw_from_ptr_len(ptr, meta)
640+
}
641+
642+
impl<T> kani::Arbitrary for PtrInner<'static, T>
643+
where
644+
T: ?Sized + KnownLayout,
645+
{
646+
fn any() -> Self {
647+
unsafe { PtrInner::new(any_nonnull()) }
648+
}
649+
}
650+
651+
impl<'a, T> PtrInner<'a, T>
652+
where
653+
T: ?Sized + KnownLayout,
654+
{
655+
fn assert_invariants(&self) {
656+
let ptr = self.as_non_null();
657+
let size = T::size_of_val_raw(ptr).unwrap();
658+
assert!(size <= isize::MAX as usize);
659+
660+
let addr = ptr.addr().get();
661+
// Assert that the referent doesn't wrap around the address space.
662+
assert!(addr.checked_add(size).is_some());
663+
}
664+
}
665+
666+
macro_rules! prove_foreach {
667+
($generic:ident { $($concrete:ident = $ty:ty,)* }) => {
668+
$(
669+
#[kani::proof]
670+
fn $concrete() {
671+
$generic::<$ty>();
672+
}
673+
)*
674+
};
675+
}
676+
677+
fn prove_arbitrary<T: 'static + ?Sized + KnownLayout>() {
678+
let ptr: PtrInner<'static, T> = kani::any();
679+
ptr.assert_invariants();
680+
}
681+
682+
// On 64-bit targets, Rust uses 2^61 - 1 rather than 2^63 - 1 as the maximum
683+
// object size [1].
684+
//
685+
// [1] https://github.com/rust-lang/rust/blob/493c38ba371929579fe136df26eccd9516347c7a/compiler/rustc_abi/src/lib.rs#L410
686+
pub(super) const MAX_SIZE: usize = 1 << 61 - 1;
687+
688+
prove_foreach! {
689+
prove_arbitrary {
690+
prove_arbitrary_empty_tuple = (),
691+
prove_arbitrary_u8 = u8,
692+
prove_arbitrary_u16 = u16,
693+
prove_arbitrary_slice_u8 = [u8],
694+
prove_arbitrary_slice_u16 = [u16],
695+
prove_arbitrary_large_u8 = [u8; MAX_SIZE],
696+
prove_arbitrary_large_slice_u8 = [[u8; MAX_SIZE]],
697+
prove_arbitrary_large_u16 = [u16; (MAX_SIZE)/2],
698+
prove_arbitrary_large_slice_u16 = [[u16; (MAX_SIZE)/2]],
699+
}
700+
}
701+
702+
fn prove_slice_unchecked<T: 'static>() {
703+
let ptr: PtrInner<'static, [T]> = kani::any();
704+
let start = kani::any();
705+
let end = kani::any();
706+
707+
kani::assume(start <= end);
708+
kani::assume(end <= ptr.len());
709+
710+
let slc = unsafe { ptr.slice_unchecked(Range { start, end }) };
711+
slc.assert_invariants();
712+
713+
assert_eq!(slc.len(), end - start);
714+
715+
let begin = unsafe { slc.as_non_null().cast::<T>().sub(start) };
716+
assert_eq!(begin, ptr.as_non_null().cast::<T>());
717+
}
718+
719+
prove_foreach! {
720+
prove_slice_unchecked {
721+
prove_slice_unchecked_empty_tuple = (),
722+
prove_slice_unchecked_u8 = u8,
723+
prove_slice_unchecked_u16 = u16,
724+
prove_slice_unchecked_large_u8 = [u8; MAX_SIZE],
725+
prove_slice_unchecked_large_u16 = [u16; (MAX_SIZE)/2],
726+
}
727+
}
728+
729+
fn prove_split_at<T: 'static>() {
730+
let ptr: PtrInner<'static, [T]> = kani::any();
731+
let l_len = kani::any();
732+
733+
kani::assume(l_len <= ptr.len());
734+
735+
let (left, right) = unsafe { ptr.split_at(l_len) };
736+
left.assert_invariants();
737+
right.assert_invariants();
738+
739+
assert_eq!(left.len(), l_len);
740+
assert_eq!(left.len() + right.len(), ptr.len());
741+
assert_eq!(left.as_non_null().cast::<T>(), ptr.as_non_null().cast::<T>());
742+
}
743+
744+
prove_foreach! {
745+
prove_split_at {
746+
prove_split_at_empty_tuple = (),
747+
prove_split_at_u8 = u8,
748+
prove_split_at_u16 = u16,
749+
prove_split_at_large_u8 = [u8; MAX_SIZE],
750+
prove_split_at_large_u16 = [u16; (MAX_SIZE)/2],
751+
}
752+
}
753+
754+
#[kani::proof]
755+
fn prove_as_slice() {
756+
fn inner<T: 'static, const N: usize>() {
757+
let ptr: PtrInner<'static, [T; N]> = kani::any();
758+
let slc = ptr.as_slice();
759+
slc.assert_invariants();
760+
761+
assert_eq!(ptr.as_non_null().cast::<T>(), slc.as_non_null().cast::<T>());
762+
assert_eq!(slc.len(), N);
763+
}
764+
765+
inner::<(), 0>();
766+
inner::<(), 1>();
767+
inner::<(), MAX_SIZE>();
768+
769+
inner::<u8, 0>();
770+
inner::<u8, 1>();
771+
inner::<u8, MAX_SIZE>();
772+
773+
inner::<[u8; MAX_SIZE / 2], 0>();
774+
inner::<[u8; MAX_SIZE / 2], 1>();
775+
inner::<[u8; MAX_SIZE / 2], 2>();
776+
777+
inner::<[u8; MAX_SIZE], 0>();
778+
inner::<[u8; MAX_SIZE], 1>();
779+
}
780+
781+
impl kani::Arbitrary for CastType {
782+
fn any() -> CastType {
783+
if kani::any() {
784+
CastType::Prefix
785+
} else {
786+
CastType::Suffix
787+
}
788+
}
789+
}
790+
791+
fn prove_try_cast_into<T: 'static + ?Sized + KnownLayout>() {
792+
let ptr: PtrInner<'static, [u8]> = kani::any();
793+
let cast_type = kani::any();
794+
let meta = kani::any();
795+
796+
match ptr.try_cast_into::<T>(cast_type, meta) {
797+
Ok((t, rest)) => {
798+
t.assert_invariants();
799+
800+
let ptr_u8 = ptr.as_non_null().cast::<u8>();
801+
let t_u8 = t.as_non_null().cast::<u8>();
802+
let rest_u8 = rest.as_non_null().cast::<u8>();
803+
match cast_type {
804+
CastType::Prefix => {
805+
assert_eq!(t_u8, ptr_u8);
806+
// TODO: Assert that `t` and `rest` are contiguous and
807+
// non-overlapping.
808+
}
809+
CastType::Suffix => {
810+
assert_eq!(rest_u8, ptr_u8);
811+
// Asserts that `rest` and `t` are contiguous and
812+
// non-overlapping.
813+
assert_eq!(unsafe { rest_u8.add(rest.len()) }, t_u8);
814+
}
815+
}
816+
}
817+
Err(CastError::Alignment(_)) => {
818+
// TODO: Prove that there would have been an alignment problem.
819+
}
820+
Err(CastError::Size(_)) => {
821+
// TODO: Prove that there would have been a size problem.
822+
}
823+
}
824+
}
825+
826+
prove_foreach! {
827+
prove_try_cast_into {
828+
prove_try_cast_into_empty_tuple = (),
829+
prove_try_cast_into_u8 = u8,
830+
prove_try_cast_into_u16 = u16,
831+
prove_try_cast_into_slice_u8 = [u8],
832+
prove_try_cast_into_slice_u16 = [u16],
833+
prove_try_cast_into_large_u8 = [u8; MAX_SIZE],
834+
prove_try_cast_into_large_slice_u8 = [[u8; MAX_SIZE]],
835+
prove_try_cast_into_large_u16 = [u16; (MAX_SIZE)/2],
836+
prove_try_cast_into_large_slice_u16 = [[u16; (MAX_SIZE)/2]],
837+
}
838+
}
839+
}

0 commit comments

Comments
 (0)