From 59288b3396ceae13e1e248610253e7d725a82a44 Mon Sep 17 00:00:00 2001 From: Shoyu Vanilla Date: Sat, 28 Dec 2024 02:05:08 +0900 Subject: [PATCH] WIP(will be squashed) --- .../core/src/slice/sort/shared/smallsort.rs | 182 +++++++++++++++++- 1 file changed, 181 insertions(+), 1 deletion(-) diff --git a/library/core/src/slice/sort/shared/smallsort.rs b/library/core/src/slice/sort/shared/smallsort.rs index 09f898309bd65..7a361419c27b0 100644 --- a/library/core/src/slice/sort/shared/smallsort.rs +++ b/library/core/src/slice/sort/shared/smallsort.rs @@ -3,6 +3,10 @@ use crate::mem::{self, ManuallyDrop, MaybeUninit}; use crate::slice::sort::shared::FreezeMarker; use crate::{intrinsics, ptr, slice}; +use safety::{ensures, requires}; + +#[cfg(kani)] +use crate::{kani, ub_checks}; // It's important to differentiate between SMALL_SORT_THRESHOLD performance for // small slices and small-sort performance sorting small sub-slices as part of @@ -539,6 +543,21 @@ where /// /// # Safety /// begin < tail and p must be valid and initialized for all begin <= p <= tail. +#[requires(begin.addr() < tail.addr() && { + let len = tail.addr() - begin.addr(); + let is_less: &mut F = unsafe { mem::transmute(&is_less) }; + (0..=len).into_iter().all(|i| { + let p = begin.add(i); + ub_checks::can_dereference(p as *const _) && + ub_checks::can_write(p) && + ub_checks::same_allocation(begin as *const _, p as *const _) + }) && (0..(len - 1)).into_iter().all(|i| !is_less(&*begin.add(i + 1), &*begin.add(i))) +})] +#[ensures(|_| { + let len = tail.addr() - begin.addr(); + let is_less: &mut F = unsafe { mem::transmute(&is_less) }; + (0..len).into_iter().all(|i| !is_less(&*begin.add(i + 1), &*begin.add(i))) +})] unsafe fn insert_tail bool>(begin: *mut T, tail: *mut T, is_less: &mut F) { // SAFETY: see individual comments. unsafe { @@ -556,7 +575,13 @@ unsafe fn insert_tail bool>(begin: *mut T, tail: *mut T, let tmp = ManuallyDrop::new(tail.read()); let mut gap_guard = CopyOnDrop { src: &*tmp, dst: tail, len: 1 }; - loop { + #[safety::loop_invariant( + sift.addr() >= begin.addr() && sift.addr() < tail.addr() + )] + // FIXME: This should be `loop` but kani's loop contract doesn't support `loop`. + // Once it is supported, replace `while true` with the original `loop` + #[allow(while_true)] + while true { // SAFETY: we move sift into the gap (which is valid), and point the // gap guard destination at sift, ensuring that if a panic occurs the // gap is once again filled. @@ -577,6 +602,14 @@ unsafe fn insert_tail bool>(begin: *mut T, tail: *mut T, } /// Sort `v` assuming `v[..offset]` is already sorted. +#[requires(offset != 0 && offset <= v.len() && { + let is_less: &mut F = unsafe { mem::transmute(&is_less) }; + v[..offset].is_sorted_by(|a, b| !is_less(b, a)) +})] +#[ensures(|_| { + let is_less: &mut F = unsafe { mem::transmute(&is_less) }; + v.is_sorted_by(|a, b| !is_less(b, a)) +})] pub fn insertion_sort_shift_left bool>( v: &mut [T], offset: usize, @@ -596,6 +629,9 @@ pub fn insertion_sort_shift_left bool>( let v_base = v.as_mut_ptr(); let v_end = v_base.add(len); let mut tail = v_base.add(offset); + #[safety::loop_invariant( + tail.addr() > v_base.addr() && tail.addr() <= v_end.addr() + )] while tail != v_end { // SAFETY: v_base and tail are both valid pointers to elements, and // v_base < tail since we checked offset != 0. @@ -609,6 +645,27 @@ pub fn insertion_sort_shift_left bool>( /// SAFETY: The caller MUST guarantee that `v_base` is valid for 4 reads and /// `dst` is valid for 4 writes. The result will be stored in `dst[0..4]`. +#[requires( + (0..4).into_iter().all(|i| { + let p = v_base.add(i); + ub_checks::can_dereference(p) && + ub_checks::same_allocation(v_base, p) + }) +)] +#[requires( + (0..4).into_iter().all(|i| { + let p = dst.add(i); + ub_checks::can_write(p) && + ub_checks::same_allocation(dst, p) + }) +)] +#[ensures(|_| { + let is_less: &mut F = unsafe { mem::transmute(&is_less) }; + (0..3).into_iter().all(|i| !is_less( + &*dst.add(i + 1), + &*dst.add(i), + )) +})] pub unsafe fn sort4_stable bool>( v_base: *const T, dst: *mut T, @@ -870,3 +927,126 @@ pub(crate) const fn has_efficient_in_place_swap() -> bool { // Heuristic that holds true on all tested 64-bit capable architectures. mem::size_of::() <= 8 // mem::size_of::() } + +#[cfg(kani)] +#[unstable(feature = "kani", issue = "none")] +mod verify { + use super::*; + + // The maximum length of the slice that `insertion_sort_shift_left` + // is called upon. + // The value is from the following line; + // https://github.com/model-checking/verify-rust-std/blob/1a38674ad6753e3a78e0181d1fe613f3b25ebacd/library/core/src/slice/sort/shared/smallsort.rs#L330 + const INSERTION_SORT_MAX_LEN: usize = 17; + + #[kani::proof] + pub fn check_swap_if_less() { + let mut array: [u8; SMALL_SORT_GENERAL_THRESHOLD] = kani::any(); + let a_pos = kani::any_where(|x: &usize| *x < SMALL_SORT_GENERAL_THRESHOLD); + let b_pos = kani::any_where(|x: &usize| *x < SMALL_SORT_GENERAL_THRESHOLD); + let mut is_less = |x: &u8, y: &u8| x < y; + let expected = { + let mut array = array.clone(); + let a: u8 = array[a_pos]; + let b: u8 = array[b_pos]; + if is_less(&b, &a) { + array[a_pos] = b; + array[b_pos] = a; + } + array + }; + unsafe { + swap_if_less(array.as_mut_ptr(), a_pos, b_pos, &mut is_less); + } + kani::assert( + array == expected, + "Swapped slice is different from the expectation" + ); + } + + // FIXME: Ideally, this should be `proof_for_contract(insert_tail)`, + // but `CopyOnDrop` inside the `insert_tail`'s function body makes + // the following false positive check failure; + // + // - Check that array_replace((const void *)(char *)dst, ...) is allowd by the assigns clause + // + // See https://github.com/model-checking/kani/issues/3798 + #[kani::proof] + #[kani::unwind(17)] + pub fn check_insert_tail() { + let tail = kani::any_where(|x: &usize| *x < INSERTION_SORT_MAX_LEN); + let mut is_less = |x: &u8, y: &u8| x < y; + let mut array = kani::any_where(|x: &[u8; INSERTION_SORT_MAX_LEN]| { + x[..tail].is_sorted_by(|a, b| !is_less(b, a)) + }); + unsafe { + let begin = array.as_mut_ptr(); + let tail = begin.add(tail); + insert_tail(begin, tail, &mut is_less); + } + kani::assert( + array[..=tail].is_sorted_by(|a, b| !is_less(b, a)), + "Slice is not sorted", + ); + } + + // FIXME: Ideally, this should be `proof_for_contract(insertion_sort_shift_left)`, + // but this failes with OOM due to `proof_for_contract`'s perf issue. + // + // See https://github.com/model-checking/kani/issues/3797 + #[kani::proof] + #[kani::stub_verified(insert_tail)] + #[kani::unwind(17)] + pub fn check_insertion_sort_shift_left() { + let slice_len = kani::any_where(|x: &usize| { + *x != 0 && *x <= INSERTION_SORT_MAX_LEN + }); + let offset = kani::any_where(|x: &usize| *x != 0 && *x <= slice_len); + let mut is_less = |x: &u8, y: &u8| x < y; + let mut array = kani::any_where(|x: &[u8; INSERTION_SORT_MAX_LEN]| { + x[..offset].is_sorted_by(|a, b| !is_less(b, a)) + }); + insertion_sort_shift_left(&mut array[..slice_len], offset, &mut is_less); + kani::assert( + array[..slice_len].is_sorted_by(|a, b| !is_less(b, a)), + "Slice is not sorted", + ); + } + + #[kani::proof_for_contract(sort4_stable)] + pub fn check_sort4_stable() { + let src: [u8; 4] = kani::any(); + let mut dst = MaybeUninit::<[u8; 4]>::uninit(); + let mut is_less = |x: &u8, y: &u8| x < y; + unsafe { + sort4_stable(src.as_ptr(), dst.as_mut_ptr() as *mut _, &mut is_less); + } + } + + #[kani::proof] + pub fn check_sort4_stable_stability() { + let src: [(u8, u8); 4] = [ + (kani::any(), 0), + (kani::any(), 1), + (kani::any(), 2), + (kani::any(), 3), + ]; + let mut dst = MaybeUninit::<[(u8, u8); 4]>::uninit(); + let mut is_less = |x: &u8, y: &u8| x < y; + unsafe { + sort4_stable(src.as_ptr(), dst.as_mut_ptr() as *mut _, &mut is_less); + } + let dst = unsafe { dst.assume_init() }; + let mut is_stably_less = |x: &u8, y: &u8| { + if x.0 == y.0 { + x.1 < y.1 + } else { + x.0 < y.0 + } + }; + kani::assert( + dst.is_sorted_by(|a, b| !is_stably_less(b, a)), + "Slice is not stably sorted", + ); + } +}