Skip to content

Commit badfc70

Browse files
committed
[kani] Prove some PtrInner methods
Also add Kani `requires` and `ensures` annotations to some methods and to some functions in `util`. While we're here, roll Kani to 0.60.0 and fix the Kani roller. gherrit-pr-id: Ic1e352d3d92a45ff86d8ca85b8221bccfb07033b
1 parent d769fb9 commit badfc70

File tree

5 files changed

+318
-9
lines changed

5 files changed

+318
-9
lines changed

.github/workflows/ci.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -595,7 +595,7 @@ jobs:
595595
args: "--package zerocopy --features __internal_use_only_features_that_work_on_stable --output-format=terse --randomize-layout --memory-safety-checks --overflow-checks --undefined-function-checks --unwinding-checks"
596596
# This version is automatically rolled by
597597
# `roll-pinned-toolchain-versions.yml`.
598-
kani-version: 0.55.0
598+
kani-version: 0.60.0
599599

600600
# NEON intrinsics are currently broken on big-endian platforms. [1] This test ensures
601601
# that we don't accidentally attempt to compile these intrinsics on such platforms. We

.github/workflows/roll-pinned-toolchain-versions.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -144,7 +144,7 @@ jobs:
144144
runs-on: ubuntu-latest
145145
strategy:
146146
matrix:
147-
branch: ["main", "v0.7.x"]
147+
branch: ["main"]
148148
name: Roll pinned Kani version
149149
steps:
150150
- name: Checkout code

src/lib.rs

Lines changed: 7 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -377,12 +377,12 @@ use std::io;
377377

378378
use crate::pointer::invariant::{self, BecauseExclusive};
379379

380-
#[cfg(any(feature = "alloc", test))]
380+
#[cfg(any(feature = "alloc", test, kani))]
381381
extern crate alloc;
382382
#[cfg(any(feature = "alloc", test))]
383383
use alloc::{boxed::Box, vec::Vec};
384384

385-
#[cfg(any(feature = "alloc", test, kani))]
385+
#[cfg(any(feature = "alloc", test))]
386386
use core::alloc::Layout;
387387

388388
// Used by `TryFromBytes::is_bit_valid`.
@@ -738,8 +738,13 @@ pub unsafe trait KnownLayout {
738738
/// The type of metadata stored in a pointer to `Self`.
739739
///
740740
/// This is `()` for sized types and `usize` for slice DSTs.
741+
#[cfg(not(kani))]
741742
type PointerMetadata: PointerMetadata;
742743

744+
#[cfg(kani)]
745+
#[allow(missing_docs)]
746+
type PointerMetadata: PointerMetadata + kani::Arbitrary;
747+
743748
/// A maybe-uninitialized analog of `Self`
744749
///
745750
/// # Safety

src/pointer/inner.rs

Lines changed: 293 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -146,6 +146,16 @@ impl<'a, T> PtrInner<'a, [T]> {
146146
/// # Safety
147147
///
148148
/// `range` is a valid range (`start <= end`) and `end <= self.len()`.
149+
#[cfg_attr(
150+
kani,
151+
kani::requires(range.start <= range.end && range.end <= self.len()),
152+
kani::ensures(|r| r.len() == range.end - range.start),
153+
kani::ensures(|r| {
154+
let begin = unsafe { r.as_non_null().cast::<T>().sub(range.start) };
155+
let ptr = self.as_non_null().cast::<T>();
156+
begin == ptr
157+
})
158+
)]
149159
pub(crate) unsafe fn slice_unchecked(self, range: Range<usize>) -> Self {
150160
let base = self.as_non_null().cast::<T>().as_ptr();
151161

@@ -206,8 +216,23 @@ impl<'a, T> PtrInner<'a, [T]> {
206216
///
207217
/// The caller promises that `l_len <= self.len()`.
208218
///
209-
/// Given `let (left, right) = ptr.split_at(l_len)`, it is guaranteed
210-
/// that `left` and `right` are contiguous and non-overlapping.
219+
/// Given `let (left, right) = ptr.split_at(l_len)`, it is guaranteed that:
220+
/// - `left.len() == l_len`
221+
/// - `left` and `right` are contiguous and non-overlapping
222+
/// - Together, `left` and `right` cover the exact same byte range as `self`
223+
#[cfg_attr(
224+
kani,
225+
kani::requires(l_len <= self.len()),
226+
kani::ensures(|(l, r)| l.len() == l_len && r.len() == self.len() - l_len),
227+
kani::ensures(|(l, _)| {
228+
l.as_non_null().cast::<T>() == self.as_non_null().cast::<T>()
229+
}),
230+
kani::ensures(|(l, r)| {
231+
let l = l.as_non_null().cast::<T>();
232+
let r = r.as_non_null().cast::<T>();
233+
unsafe { l.add(l_len) == r }
234+
})
235+
)]
211236
pub(crate) unsafe fn split_at(self, l_len: usize) -> (Self, Self) {
212237
// SAFETY: The caller promises that `l_len <= self.len()`.
213238
// Trivially, `0 <= l_len`.
@@ -333,6 +358,11 @@ impl<'a, T, const N: usize> PtrInner<'a, [T; N]> {
333358
/// Callers may assume that the returned `PtrInner` references the same
334359
/// address and length as `self`.
335360
#[allow(clippy::wrong_self_convention)]
361+
#[cfg_attr(
362+
kani,
363+
kani::ensures(|slc| slc.len() == N),
364+
kani::ensures(|slc| self.as_non_null().cast::<T>() == slc.as_non_null().cast::<T>())
365+
)]
336366
pub(crate) fn as_slice(self) -> PtrInner<'a, [T]> {
337367
let start = self.as_non_null().cast::<T>().as_ptr();
338368
let slice = core::ptr::slice_from_raw_parts_mut(start, N);
@@ -395,6 +425,38 @@ impl<'a> PtrInner<'a, [u8]> {
395425
/// - If this is a prefix cast, `ptr` has the same address as `self`.
396426
/// - If this is a suffix cast, `remainder` has the same address as `self`.
397427
#[inline]
428+
#[cfg_attr(
429+
kani,
430+
kani::ensures(|r| match r {
431+
Ok((t, rest)) => {
432+
let ptr_u8 = self.as_non_null().cast::<u8>();
433+
let t_u8 = t.as_non_null().cast::<u8>();
434+
let rest_u8 = rest.as_non_null().cast::<u8>();
435+
match cast_type {
436+
CastType::Prefix => {
437+
t_u8 == ptr_u8
438+
// TODO: Assert that `t` and `rest` are contiguous and
439+
// non-overlapping.
440+
}
441+
CastType::Suffix => {
442+
// The second condition asserts that `rest` and `t` are
443+
// contiguous and non-overlapping.
444+
rest_u8 == ptr_u8 && unsafe { rest_u8.add(rest.len()) == t_u8 }
445+
}
446+
}
447+
}
448+
Err(CastError::Alignment(_)) => {
449+
true
450+
// TODO: Prove that there would have been an alignment problem.
451+
}
452+
Err(CastError::Size(_)) => {
453+
true
454+
// TODO: Prove that there would have been a size problem.
455+
}
456+
// Unreachable because this error variant is uninhabited.
457+
Err(CastError::Validity(_)) => false,
458+
})
459+
)]
398460
pub(crate) fn try_cast_into<U>(
399461
self,
400462
cast_type: CastType,
@@ -519,3 +581,232 @@ mod tests {
519581
}
520582
}
521583
}
584+
585+
#[cfg(kani)]
586+
mod proofs {
587+
use crate::{SizeInfo, TrailingSliceLayout};
588+
589+
use super::*;
590+
591+
impl<T> kani::Arbitrary for PtrInner<'static, T>
592+
where
593+
T: ?Sized + KnownLayout,
594+
{
595+
fn any() -> Self {
596+
let (size, meta) = match T::LAYOUT.size_info {
597+
SizeInfo::Sized { size } => (size, T::PointerMetadata::from_elem_count(0)),
598+
SizeInfo::SliceDst(TrailingSliceLayout { .. }) => {
599+
let meta = T::PointerMetadata::from_elem_count(kani::any());
600+
let size = meta.size_for_metadata(T::LAYOUT);
601+
kani::assume(size.is_some());
602+
let size = size.unwrap();
603+
kani::assume(size <= isize::MAX as usize);
604+
605+
(size, meta)
606+
}
607+
};
608+
609+
let layout =
610+
alloc::alloc::Layout::from_size_align(size, T::LAYOUT.align.get()).unwrap();
611+
612+
let ptr = if layout.size() == 0 {
613+
NonNull::dangling()
614+
} else {
615+
let ptr = unsafe { alloc::alloc::alloc(layout) };
616+
NonNull::new(ptr).unwrap_or_else(|| alloc::alloc::handle_alloc_error(layout))
617+
};
618+
619+
let ptr = T::raw_from_ptr_len(ptr, meta);
620+
unsafe { PtrInner::new(ptr) }
621+
}
622+
}
623+
624+
impl<'a, T> PtrInner<'a, T>
625+
where
626+
T: ?Sized + KnownLayout,
627+
{
628+
fn assert_invariants(&self) {
629+
let ptr = self.as_non_null();
630+
let size = T::size_of_val_raw(ptr).unwrap();
631+
assert!(size <= isize::MAX as usize);
632+
633+
let addr = ptr.addr().get();
634+
// Assert that the referent doesn't wrap around the address space.
635+
assert!(addr.checked_add(size).is_some());
636+
}
637+
}
638+
639+
macro_rules! prove_foreach {
640+
($generic:ident { $($concrete:ident = $ty:ty,)* }) => {
641+
$(
642+
#[kani::proof]
643+
fn $concrete() {
644+
$generic::<$ty>();
645+
}
646+
)*
647+
};
648+
}
649+
650+
fn prove_arbitrary<T: 'static + ?Sized + KnownLayout>() {
651+
let ptr: PtrInner<'static, T> = kani::any();
652+
ptr.assert_invariants();
653+
}
654+
655+
// On 64-bit targets, Rust uses 2^61 - 1 rather than 2^63 - 1 as the maximum
656+
// object size [1].
657+
//
658+
// [1] https://github.com/rust-lang/rust/blob/493c38ba371929579fe136df26eccd9516347c7a/compiler/rustc_abi/src/lib.rs#L410
659+
const MAX_SIZE: usize = 1 << 61 - 1;
660+
661+
prove_foreach! {
662+
prove_arbitrary {
663+
prove_arbitrary_empty_tuple = (),
664+
prove_arbitrary_u8 = u8,
665+
prove_arbitrary_u16 = u16,
666+
prove_arbitrary_slice_u8 = [u8],
667+
prove_arbitrary_slice_u16 = [u16],
668+
prove_arbitrary_large_u8 = [u8; MAX_SIZE],
669+
prove_arbitrary_large_slice_u8 = [[u8; MAX_SIZE]],
670+
prove_arbitrary_large_u16 = [u16; (MAX_SIZE)/2],
671+
prove_arbitrary_large_slice_u16 = [[u16; (MAX_SIZE)/2]],
672+
}
673+
}
674+
675+
fn prove_slice_unchecked<T: 'static>() {
676+
let ptr: PtrInner<'static, [T]> = kani::any();
677+
let start = kani::any();
678+
let end = kani::any();
679+
680+
kani::assume(start <= end);
681+
kani::assume(end <= ptr.len());
682+
683+
let slc = unsafe { ptr.slice_unchecked(Range { start, end }) };
684+
slc.assert_invariants();
685+
686+
assert_eq!(slc.len(), end - start);
687+
688+
let begin = unsafe { slc.as_non_null().cast::<T>().sub(start) };
689+
assert_eq!(begin, ptr.as_non_null().cast::<T>());
690+
}
691+
692+
prove_foreach! {
693+
prove_slice_unchecked {
694+
prove_slice_unchecked_empty_tuple = (),
695+
prove_slice_unchecked_u8 = u8,
696+
prove_slice_unchecked_u16 = u16,
697+
prove_slice_unchecked_large_u8 = [u8; MAX_SIZE],
698+
prove_slice_unchecked_large_u16 = [u16; (MAX_SIZE)/2],
699+
}
700+
}
701+
702+
fn prove_split_at<T: 'static>() {
703+
let ptr: PtrInner<'static, [T]> = kani::any();
704+
let l_len = kani::any();
705+
706+
kani::assume(l_len <= ptr.len());
707+
708+
let (left, right) = unsafe { ptr.split_at(l_len) };
709+
left.assert_invariants();
710+
right.assert_invariants();
711+
712+
assert_eq!(left.len(), l_len);
713+
assert_eq!(left.len() + right.len(), ptr.len());
714+
assert_eq!(left.as_non_null().cast::<T>(), ptr.as_non_null().cast::<T>());
715+
}
716+
717+
prove_foreach! {
718+
prove_split_at {
719+
prove_split_at_empty_tuple = (),
720+
prove_split_at_u8 = u8,
721+
prove_split_at_u16 = u16,
722+
prove_split_at_large_u8 = [u8; MAX_SIZE],
723+
prove_split_at_large_u16 = [u16; (MAX_SIZE)/2],
724+
}
725+
}
726+
727+
#[kani::proof]
728+
fn prove_as_slice() {
729+
fn inner<T: 'static, const N: usize>() {
730+
let ptr: PtrInner<'static, [T; N]> = kani::any();
731+
let slc = ptr.as_slice();
732+
slc.assert_invariants();
733+
734+
assert_eq!(ptr.as_non_null().cast::<T>(), slc.as_non_null().cast::<T>());
735+
assert_eq!(slc.len(), N);
736+
}
737+
738+
inner::<(), 0>();
739+
inner::<(), 1>();
740+
inner::<(), MAX_SIZE>();
741+
742+
inner::<u8, 0>();
743+
inner::<u8, 1>();
744+
inner::<u8, MAX_SIZE>();
745+
746+
inner::<[u8; MAX_SIZE / 2], 0>();
747+
inner::<[u8; MAX_SIZE / 2], 1>();
748+
inner::<[u8; MAX_SIZE / 2], 2>();
749+
750+
inner::<[u8; MAX_SIZE], 0>();
751+
inner::<[u8; MAX_SIZE], 1>();
752+
}
753+
754+
impl kani::Arbitrary for CastType {
755+
fn any() -> CastType {
756+
if kani::any() {
757+
CastType::Prefix
758+
} else {
759+
CastType::Suffix
760+
}
761+
}
762+
}
763+
764+
fn prove_try_cast_into<T: 'static + ?Sized + KnownLayout>() {
765+
let ptr: PtrInner<'static, [u8]> = kani::any();
766+
let cast_type = kani::any();
767+
let meta = kani::any();
768+
769+
match ptr.try_cast_into::<T>(cast_type, meta) {
770+
Ok((t, rest)) => {
771+
t.assert_invariants();
772+
773+
let ptr_u8 = ptr.as_non_null().cast::<u8>();
774+
let t_u8 = t.as_non_null().cast::<u8>();
775+
let rest_u8 = rest.as_non_null().cast::<u8>();
776+
match cast_type {
777+
CastType::Prefix => {
778+
assert_eq!(t_u8, ptr_u8);
779+
// TODO: Assert that `t` and `rest` are contiguous and
780+
// non-overlapping.
781+
}
782+
CastType::Suffix => {
783+
assert_eq!(rest_u8, ptr_u8);
784+
// Asserts that `rest` and `t` are contiguous and
785+
// non-overlapping.
786+
assert_eq!(unsafe { rest_u8.add(rest.len()) }, t_u8);
787+
}
788+
}
789+
}
790+
Err(CastError::Alignment(_)) => {
791+
// TODO: Prove that there would have been an alignment problem.
792+
}
793+
Err(CastError::Size(_)) => {
794+
// TODO: Prove that there would have been a size problem.
795+
}
796+
}
797+
}
798+
799+
prove_foreach! {
800+
prove_try_cast_into {
801+
prove_try_cast_into_empty_tuple = (),
802+
prove_try_cast_into_u8 = u8,
803+
prove_try_cast_into_u16 = u16,
804+
prove_try_cast_into_slice_u8 = [u8],
805+
prove_try_cast_into_slice_u16 = [u16],
806+
prove_try_cast_into_large_u8 = [u8; MAX_SIZE],
807+
prove_try_cast_into_large_slice_u8 = [[u8; MAX_SIZE]],
808+
prove_try_cast_into_large_u16 = [u16; (MAX_SIZE)/2],
809+
prove_try_cast_into_large_slice_u16 = [[u16; (MAX_SIZE)/2]],
810+
}
811+
}
812+
}

0 commit comments

Comments
 (0)