Add assert_unsafe_precondition to unchecked_{add,sub,neg,mul,shl,shr} methods

This commit is contained in:
ltdk 2024-04-09 02:01:21 -04:00
parent 9c8a58fdb8
commit 72b7171031
23 changed files with 289 additions and 193 deletions

View file

@ -191,6 +191,7 @@
#![feature(str_split_remainder)]
#![feature(strict_provenance)]
#![feature(ub_checks)]
#![feature(unchecked_neg)]
#![feature(unchecked_shifts)]
#![feature(utf16_extra)]
#![feature(utf16_extra_const)]

View file

@ -488,9 +488,19 @@ pub const fn strict_add(self, rhs: Self) -> Self {
#[inline(always)]
#[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
pub const unsafe fn unchecked_add(self, rhs: Self) -> Self {
// SAFETY: the caller must uphold the safety contract for
// `unchecked_add`.
unsafe { intrinsics::unchecked_add(self, rhs) }
assert_unsafe_precondition!(
check_language_ub,
concat!(stringify!($SelfT), "::unchecked_add cannot overflow"),
(
lhs: $SelfT = self,
rhs: $SelfT = rhs,
) => !lhs.overflowing_add(rhs).1,
);
// SAFETY: this is guaranteed to be safe by the caller.
unsafe {
intrinsics::unchecked_add(self, rhs)
}
}
/// Checked addition with an unsigned integer. Computes `self + rhs`,
@ -630,9 +640,19 @@ pub const fn strict_sub(self, rhs: Self) -> Self {
#[inline(always)]
#[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
pub const unsafe fn unchecked_sub(self, rhs: Self) -> Self {
// SAFETY: the caller must uphold the safety contract for
// `unchecked_sub`.
unsafe { intrinsics::unchecked_sub(self, rhs) }
assert_unsafe_precondition!(
check_language_ub,
concat!(stringify!($SelfT), "::unchecked_sub cannot overflow"),
(
lhs: $SelfT = self,
rhs: $SelfT = rhs,
) => !lhs.overflowing_sub(rhs).1,
);
// SAFETY: this is guaranteed to be safe by the caller.
unsafe {
intrinsics::unchecked_sub(self, rhs)
}
}
/// Checked subtraction with an unsigned integer. Computes `self - rhs`,
@ -772,9 +792,19 @@ pub const fn strict_mul(self, rhs: Self) -> Self {
#[inline(always)]
#[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
pub const unsafe fn unchecked_mul(self, rhs: Self) -> Self {
// SAFETY: the caller must uphold the safety contract for
// `unchecked_mul`.
unsafe { intrinsics::unchecked_mul(self, rhs) }
assert_unsafe_precondition!(
check_language_ub,
concat!(stringify!($SelfT), "::unchecked_mul cannot overflow"),
(
lhs: $SelfT = self,
rhs: $SelfT = rhs,
) => !lhs.overflowing_mul(rhs).1,
);
// SAFETY: this is guaranteed to be safe by the caller.
unsafe {
intrinsics::unchecked_mul(self, rhs)
}
}
/// Checked integer division. Computes `self / rhs`, returning `None` if `rhs == 0`
@ -1111,9 +1141,22 @@ pub const fn checked_neg(self) -> Option<Self> {
#[inline(always)]
#[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
pub const unsafe fn unchecked_neg(self) -> Self {
// SAFETY: the caller must uphold the safety contract for
// `unchecked_neg`.
unsafe { intrinsics::unchecked_sub(0, self) }
// ICE resolved by #125184 isn't in bootstrap compiler
#[cfg(not(bootstrap))]
{
assert_unsafe_precondition!(
check_language_ub,
concat!(stringify!($SelfT), "::unchecked_neg cannot overflow"),
(
lhs: $SelfT = self,
) => !lhs.overflowing_neg().1,
);
}
// SAFETY: this is guaranteed to be safe by the caller.
unsafe {
intrinsics::unchecked_sub(0, self)
}
}
/// Strict negation. Computes `-self`, panicking if `self == MIN`.
@ -1234,9 +1277,19 @@ pub const fn strict_shl(self, rhs: u32) -> Self {
#[inline(always)]
#[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
pub const unsafe fn unchecked_shl(self, rhs: u32) -> Self {
// SAFETY: the caller must uphold the safety contract for
// `unchecked_shl`.
unsafe { intrinsics::unchecked_shl(self, rhs) }
assert_unsafe_precondition!(
check_language_ub,
concat!(stringify!($SelfT), "::unchecked_shl cannot overflow"),
(
rhs: u32 = rhs,
bits: u32 = Self::BITS,
) => rhs < bits,
);
// SAFETY: this is guaranteed to be safe by the caller.
unsafe {
intrinsics::unchecked_shl(self, rhs)
}
}
/// Checked shift right. Computes `self >> rhs`, returning `None` if `rhs` is
@ -1323,9 +1376,19 @@ pub const fn strict_shr(self, rhs: u32) -> Self {
#[inline(always)]
#[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
pub const unsafe fn unchecked_shr(self, rhs: u32) -> Self {
// SAFETY: the caller must uphold the safety contract for
// `unchecked_shr`.
unsafe { intrinsics::unchecked_shr(self, rhs) }
assert_unsafe_precondition!(
check_language_ub,
concat!(stringify!($SelfT), "::unchecked_shr cannot overflow"),
(
rhs: u32 = rhs,
bits: u32 = Self::BITS,
) => rhs < bits,
);
// SAFETY: this is guaranteed to be safe by the caller.
unsafe {
intrinsics::unchecked_shr(self, rhs)
}
}
/// Checked absolute value. Computes `self.abs()`, returning `None` if

View file

@ -7,6 +7,7 @@
use crate::intrinsics;
use crate::mem;
use crate::str::FromStr;
use crate::ub_checks::assert_unsafe_precondition;
// Used because the `?` operator is not allowed in a const context.
macro_rules! try_opt {

View file

@ -495,9 +495,19 @@ pub const fn strict_add(self, rhs: Self) -> Self {
#[inline(always)]
#[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
pub const unsafe fn unchecked_add(self, rhs: Self) -> Self {
// SAFETY: the caller must uphold the safety contract for
// `unchecked_add`.
unsafe { intrinsics::unchecked_add(self, rhs) }
assert_unsafe_precondition!(
check_language_ub,
concat!(stringify!($SelfT), "::unchecked_add cannot overflow"),
(
lhs: $SelfT = self,
rhs: $SelfT = rhs,
) => !lhs.overflowing_add(rhs).1,
);
// SAFETY: this is guaranteed to be safe by the caller.
unsafe {
intrinsics::unchecked_add(self, rhs)
}
}
/// Checked addition with a signed integer. Computes `self + rhs`,
@ -677,9 +687,19 @@ pub const fn strict_sub(self, rhs: Self) -> Self {
#[inline(always)]
#[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
pub const unsafe fn unchecked_sub(self, rhs: Self) -> Self {
// SAFETY: the caller must uphold the safety contract for
// `unchecked_sub`.
unsafe { intrinsics::unchecked_sub(self, rhs) }
assert_unsafe_precondition!(
check_language_ub,
concat!(stringify!($SelfT), "::unchecked_sub cannot overflow"),
(
lhs: $SelfT = self,
rhs: $SelfT = rhs,
) => !lhs.overflowing_sub(rhs).1,
);
// SAFETY: this is guaranteed to be safe by the caller.
unsafe {
intrinsics::unchecked_sub(self, rhs)
}
}
/// Checked integer multiplication. Computes `self * rhs`, returning
@ -763,9 +783,19 @@ pub const fn strict_mul(self, rhs: Self) -> Self {
#[inline(always)]
#[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
pub const unsafe fn unchecked_mul(self, rhs: Self) -> Self {
// SAFETY: the caller must uphold the safety contract for
// `unchecked_mul`.
unsafe { intrinsics::unchecked_mul(self, rhs) }
assert_unsafe_precondition!(
check_language_ub,
concat!(stringify!($SelfT), "::unchecked_mul cannot overflow"),
(
lhs: $SelfT = self,
rhs: $SelfT = rhs,
) => !lhs.overflowing_mul(rhs).1,
);
// SAFETY: this is guaranteed to be safe by the caller.
unsafe {
intrinsics::unchecked_mul(self, rhs)
}
}
/// Checked integer division. Computes `self / rhs`, returning `None`
@ -1334,9 +1364,19 @@ pub const fn strict_shl(self, rhs: u32) -> Self {
#[inline(always)]
#[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
pub const unsafe fn unchecked_shl(self, rhs: u32) -> Self {
// SAFETY: the caller must uphold the safety contract for
// `unchecked_shl`.
unsafe { intrinsics::unchecked_shl(self, rhs) }
assert_unsafe_precondition!(
check_language_ub,
concat!(stringify!($SelfT), "::unchecked_shl cannot overflow"),
(
rhs: u32 = rhs,
bits: u32 = Self::BITS,
) => rhs < bits,
);
// SAFETY: this is guaranteed to be safe by the caller.
unsafe {
intrinsics::unchecked_shl(self, rhs)
}
}
/// Checked shift right. Computes `self >> rhs`, returning `None`
@ -1423,9 +1463,19 @@ pub const fn strict_shr(self, rhs: u32) -> Self {
#[inline(always)]
#[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
pub const unsafe fn unchecked_shr(self, rhs: u32) -> Self {
// SAFETY: the caller must uphold the safety contract for
// `unchecked_shr`.
unsafe { intrinsics::unchecked_shr(self, rhs) }
assert_unsafe_precondition!(
check_language_ub,
concat!(stringify!($SelfT), "::unchecked_shr cannot overflow"),
(
rhs: u32 = rhs,
bits: u32 = Self::BITS,
) => rhs < bits,
);
// SAFETY: this is guaranteed to be safe by the caller.
unsafe {
intrinsics::unchecked_shr(self, rhs)
}
}
/// Checked exponentiation. Computes `self.pow(exp)`, returning `None` if

View file

@ -1,4 +1,3 @@
use crate::intrinsics::{unchecked_add, unchecked_sub};
use crate::iter::{FusedIterator, TrustedLen};
use crate::num::NonZero;
use crate::ub_checks;
@ -46,7 +45,7 @@ pub const fn end(&self) -> usize {
#[inline]
pub const fn len(&self) -> usize {
// SAFETY: By invariant, this cannot wrap
unsafe { unchecked_sub(self.end, self.start) }
unsafe { self.end.unchecked_sub(self.start) }
}
/// # Safety
@ -57,7 +56,7 @@ unsafe fn next_unchecked(&mut self) -> usize {
let value = self.start;
// SAFETY: The range isn't empty, so this cannot overflow
self.start = unsafe { unchecked_add(value, 1) };
self.start = unsafe { value.unchecked_add(1) };
value
}
@ -68,7 +67,7 @@ unsafe fn next_back_unchecked(&mut self) -> usize {
debug_assert!(self.start < self.end);
// SAFETY: The range isn't empty, so this cannot overflow
let value = unsafe { unchecked_sub(self.end, 1) };
let value = unsafe { self.end.unchecked_sub(1) };
self.end = value;
value
}
@ -83,7 +82,7 @@ pub fn take_prefix(&mut self, n: usize) -> Self {
let mid = if n <= self.len() {
// SAFETY: We just checked that this will be between start and end,
// and thus the addition cannot overflow.
unsafe { unchecked_add(self.start, n) }
unsafe { self.start.unchecked_add(n) }
} else {
self.end
};
@ -102,7 +101,7 @@ pub fn take_suffix(&mut self, n: usize) -> Self {
let mid = if n <= self.len() {
// SAFETY: We just checked that this will be between start and end,
// and thus the addition cannot overflow.
unsafe { unchecked_sub(self.end, n) }
unsafe { self.end.unchecked_sub(n) }
} else {
self.start
};

View file

@ -1080,6 +1080,7 @@ pub const fn guaranteed_ne(self, other: *const T) -> Option<bool>
#[stable(feature = "pointer_methods", since = "1.26.0")]
#[must_use = "returns a new pointer rather than modifying its argument"]
#[rustc_const_stable(feature = "const_ptr_offset", since = "1.61.0")]
#[rustc_allow_const_fn_unstable(unchecked_neg)]
#[inline(always)]
#[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
pub const unsafe fn sub(self, count: usize) -> Self
@ -1093,7 +1094,7 @@ pub const fn guaranteed_ne(self, other: *const T) -> Option<bool>
// SAFETY: the caller must uphold the safety contract for `offset`.
// Because the pointee is *not* a ZST, that means that `count` is
// at most `isize::MAX`, and thus the negation cannot overflow.
unsafe { self.offset(intrinsics::unchecked_sub(0, count as isize)) }
unsafe { self.offset((count as isize).unchecked_neg()) }
}
}

View file

@ -1224,6 +1224,7 @@ pub const fn guaranteed_ne(self, other: *mut T) -> Option<bool>
#[stable(feature = "pointer_methods", since = "1.26.0")]
#[must_use = "returns a new pointer rather than modifying its argument"]
#[rustc_const_stable(feature = "const_ptr_offset", since = "1.61.0")]
#[rustc_allow_const_fn_unstable(unchecked_neg)]
#[inline(always)]
#[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
pub const unsafe fn sub(self, count: usize) -> Self
@ -1237,7 +1238,7 @@ pub const fn guaranteed_ne(self, other: *mut T) -> Option<bool>
// SAFETY: the caller must uphold the safety contract for `offset`.
// Because the pointee is *not* a ZST, that means that `count` is
// at most `isize::MAX`, and thus the negation cannot overflow.
unsafe { self.offset(intrinsics::unchecked_sub(0, count as isize)) }
unsafe { self.offset((count as isize).unchecked_neg()) }
}
}

View file

@ -701,6 +701,7 @@ pub const fn cast<U>(self) -> NonNull<U> {
#[must_use = "returns a new pointer rather than modifying its argument"]
#[stable(feature = "non_null_convenience", since = "CURRENT_RUSTC_VERSION")]
#[rustc_const_stable(feature = "non_null_convenience", since = "CURRENT_RUSTC_VERSION")]
#[rustc_allow_const_fn_unstable(unchecked_neg)]
pub const unsafe fn sub(self, count: usize) -> Self
where
T: Sized,
@ -712,7 +713,7 @@ pub const fn cast<U>(self) -> NonNull<U> {
// SAFETY: the caller must uphold the safety contract for `offset`.
// Because the pointee is *not* a ZST, that means that `count` is
// at most `isize::MAX`, and thus the negation cannot overflow.
unsafe { self.offset(intrinsics::unchecked_sub(0, count as isize)) }
unsafe { self.offset((count as isize).unchecked_neg()) }
}
}

View file

@ -1,7 +1,6 @@
//! Indexing implementations for `[T]`.
use crate::intrinsics::const_eval_select;
use crate::intrinsics::unchecked_sub;
use crate::ops;
use crate::ptr;
use crate::ub_checks::assert_unsafe_precondition;
@ -374,7 +373,7 @@ unsafe fn get_unchecked(self, slice: *const [T]) -> *const [T] {
// `self` is in bounds of `slice` so `self` cannot overflow an `isize`,
// so the call to `add` is safe and the length calculation cannot overflow.
unsafe {
let new_len = unchecked_sub(self.end, self.start);
let new_len = self.end.unchecked_sub(self.start);
ptr::slice_from_raw_parts(slice.as_ptr().add(self.start), new_len)
}
}
@ -392,7 +391,7 @@ unsafe fn get_unchecked_mut(self, slice: *mut [T]) -> *mut [T] {
);
// SAFETY: see comments for `get_unchecked` above.
unsafe {
let new_len = unchecked_sub(self.end, self.start);
let new_len = self.end.unchecked_sub(self.start);
ptr::slice_from_raw_parts_mut(slice.as_mut_ptr().add(self.start), new_len)
}
}

View file

@ -8,6 +8,12 @@
let mut _3: u16;
let mut _4: u32;
+ scope 1 (inlined core::num::<impl u16>::unchecked_shl) {
+ let mut _5: bool;
+ let _6: ();
+ scope 2 (inlined core::ub_checks::check_language_ub) {
+ scope 3 (inlined core::ub_checks::check_language_ub::runtime) {
+ }
+ }
+ }
bb0: {
@ -16,10 +22,20 @@
StorageLive(_4);
_4 = _2;
- _0 = core::num::<impl u16>::unchecked_shl(move _3, move _4) -> [return: bb1, unwind unreachable];
- }
-
- bb1: {
+ StorageLive(_6);
+ StorageLive(_5);
+ _5 = UbChecks();
+ switchInt(move _5) -> [0: bb2, otherwise: bb1];
}
bb1: {
+ _6 = core::num::<impl u16>::unchecked_shl::precondition_check(_4, const core::num::<impl u16>::BITS) -> [return: bb2, unwind unreachable];
+ }
+
+ bb2: {
+ StorageDead(_5);
+ _0 = ShlUnchecked(_3, _4);
+ StorageDead(_6);
StorageDead(_4);
StorageDead(_3);
return;

View file

@ -8,6 +8,12 @@
let mut _3: u16;
let mut _4: u32;
+ scope 1 (inlined core::num::<impl u16>::unchecked_shl) {
+ let mut _5: bool;
+ let _6: ();
+ scope 2 (inlined core::ub_checks::check_language_ub) {
+ scope 3 (inlined core::ub_checks::check_language_ub::runtime) {
+ }
+ }
+ }
bb0: {
@ -16,10 +22,20 @@
StorageLive(_4);
_4 = _2;
- _0 = core::num::<impl u16>::unchecked_shl(move _3, move _4) -> [return: bb1, unwind continue];
- }
-
- bb1: {
+ StorageLive(_6);
+ StorageLive(_5);
+ _5 = UbChecks();
+ switchInt(move _5) -> [0: bb2, otherwise: bb1];
}
bb1: {
+ _6 = core::num::<impl u16>::unchecked_shl::precondition_check(_4, const core::num::<impl u16>::BITS) -> [return: bb2, unwind unreachable];
+ }
+
+ bb2: {
+ StorageDead(_5);
+ _0 = ShlUnchecked(_3, _4);
+ StorageDead(_6);
StorageDead(_4);
StorageDead(_3);
return;

View file

@ -5,6 +5,10 @@ fn unchecked_shl_unsigned_smaller(_1: u16, _2: u32) -> u16 {
debug b => _2;
let mut _0: u16;
scope 1 (inlined core::num::<impl u16>::unchecked_shl) {
scope 2 (inlined core::ub_checks::check_language_ub) {
scope 3 (inlined core::ub_checks::check_language_ub::runtime) {
}
}
}
bb0: {

View file

@ -5,6 +5,10 @@ fn unchecked_shl_unsigned_smaller(_1: u16, _2: u32) -> u16 {
debug b => _2;
let mut _0: u16;
scope 1 (inlined core::num::<impl u16>::unchecked_shl) {
scope 2 (inlined core::ub_checks::check_language_ub) {
scope 3 (inlined core::ub_checks::check_language_ub::runtime) {
}
}
}
bb0: {

View file

@ -8,6 +8,12 @@
let mut _3: i64;
let mut _4: u32;
+ scope 1 (inlined core::num::<impl i64>::unchecked_shr) {
+ let mut _5: bool;
+ let _6: ();
+ scope 2 (inlined core::ub_checks::check_language_ub) {
+ scope 3 (inlined core::ub_checks::check_language_ub::runtime) {
+ }
+ }
+ }
bb0: {
@ -16,10 +22,20 @@
StorageLive(_4);
_4 = _2;
- _0 = core::num::<impl i64>::unchecked_shr(move _3, move _4) -> [return: bb1, unwind unreachable];
- }
-
- bb1: {
+ StorageLive(_6);
+ StorageLive(_5);
+ _5 = UbChecks();
+ switchInt(move _5) -> [0: bb2, otherwise: bb1];
}
bb1: {
+ _6 = core::num::<impl i64>::unchecked_shr::precondition_check(_4, const core::num::<impl i64>::BITS) -> [return: bb2, unwind unreachable];
+ }
+
+ bb2: {
+ StorageDead(_5);
+ _0 = ShrUnchecked(_3, _4);
+ StorageDead(_6);
StorageDead(_4);
StorageDead(_3);
return;

View file

@ -8,6 +8,12 @@
let mut _3: i64;
let mut _4: u32;
+ scope 1 (inlined core::num::<impl i64>::unchecked_shr) {
+ let mut _5: bool;
+ let _6: ();
+ scope 2 (inlined core::ub_checks::check_language_ub) {
+ scope 3 (inlined core::ub_checks::check_language_ub::runtime) {
+ }
+ }
+ }
bb0: {
@ -16,10 +22,20 @@
StorageLive(_4);
_4 = _2;
- _0 = core::num::<impl i64>::unchecked_shr(move _3, move _4) -> [return: bb1, unwind continue];
- }
-
- bb1: {
+ StorageLive(_6);
+ StorageLive(_5);
+ _5 = UbChecks();
+ switchInt(move _5) -> [0: bb2, otherwise: bb1];
}
bb1: {
+ _6 = core::num::<impl i64>::unchecked_shr::precondition_check(_4, const core::num::<impl i64>::BITS) -> [return: bb2, unwind unreachable];
+ }
+
+ bb2: {
+ StorageDead(_5);
+ _0 = ShrUnchecked(_3, _4);
+ StorageDead(_6);
StorageDead(_4);
StorageDead(_3);
return;

View file

@ -5,6 +5,10 @@ fn unchecked_shr_signed_bigger(_1: i64, _2: u32) -> i64 {
debug b => _2;
let mut _0: i64;
scope 1 (inlined core::num::<impl i64>::unchecked_shr) {
scope 2 (inlined core::ub_checks::check_language_ub) {
scope 3 (inlined core::ub_checks::check_language_ub::runtime) {
}
}
}
bb0: {

View file

@ -5,6 +5,10 @@ fn unchecked_shr_signed_bigger(_1: i64, _2: u32) -> i64 {
debug b => _2;
let mut _0: i64;
scope 1 (inlined core::num::<impl i64>::unchecked_shr) {
scope 2 (inlined core::ub_checks::check_language_ub) {
scope 3 (inlined core::ub_checks::check_language_ub::runtime) {
}
}
}
bb0: {

View file

@ -8,6 +8,10 @@ fn checked_shl(_1: u32, _2: u32) -> Option<u32> {
let mut _3: bool;
let mut _4: u32;
scope 2 (inlined core::num::<impl u32>::unchecked_shl) {
scope 3 (inlined core::ub_checks::check_language_ub) {
scope 4 (inlined core::ub_checks::check_language_ub::runtime) {
}
}
}
}

View file

@ -8,6 +8,10 @@ fn checked_shl(_1: u32, _2: u32) -> Option<u32> {
let mut _3: bool;
let mut _4: u32;
scope 2 (inlined core::num::<impl u32>::unchecked_shl) {
scope 3 (inlined core::ub_checks::check_language_ub) {
scope 4 (inlined core::ub_checks::check_language_ub::runtime) {
}
}
}
}

View file

@ -4,47 +4,20 @@ fn slice_get_unchecked_mut_range(_1: &mut [u32], _2: std::ops::Range<usize>) ->
debug slice => _1;
debug index => _2;
let mut _0: &mut [u32];
let mut _3: usize;
let mut _4: usize;
scope 1 (inlined core::slice::<impl [u32]>::get_unchecked_mut::<std::ops::Range<usize>>) {
let mut _5: *mut [u32];
let mut _9: *mut [u32];
scope 2 (inlined <std::ops::Range<usize> as SliceIndex<[u32]>>::get_unchecked_mut) {
let _6: usize;
let mut _7: *mut u32;
let mut _8: *mut u32;
scope 3 {
scope 6 (inlined std::ptr::mut_ptr::<impl *mut [u32]>::as_mut_ptr) {
}
scope 7 (inlined std::ptr::mut_ptr::<impl *mut u32>::add) {
}
scope 8 (inlined slice_from_raw_parts_mut::<u32>) {
}
}
scope 4 (inlined std::ptr::mut_ptr::<impl *mut [u32]>::len) {
scope 5 (inlined std::ptr::metadata::<[u32]>) {
}
}
}
let mut _3: *mut [u32];
let mut _4: *mut [u32];
}
bb0: {
_3 = move (_2.0: usize);
_4 = move (_2.1: usize);
StorageLive(_5);
_5 = &raw mut (*_1);
StorageLive(_6);
_6 = SubUnchecked(_4, _3);
StorageLive(_8);
StorageLive(_7);
_7 = _5 as *mut u32 (PtrToPtr);
_8 = Offset(_7, _3);
StorageDead(_7);
_9 = *mut [u32] from (_8, _6);
StorageDead(_8);
StorageDead(_6);
StorageDead(_5);
_0 = &mut (*_9);
StorageLive(_3);
_3 = &raw mut (*_1);
_4 = <std::ops::Range<usize> as SliceIndex<[u32]>>::get_unchecked_mut(move _2, move _3) -> [return: bb1, unwind unreachable];
}
bb1: {
StorageDead(_3);
_0 = &mut (*_4);
return;
}
}

View file

@ -4,47 +4,20 @@ fn slice_get_unchecked_mut_range(_1: &mut [u32], _2: std::ops::Range<usize>) ->
debug slice => _1;
debug index => _2;
let mut _0: &mut [u32];
let mut _3: usize;
let mut _4: usize;
scope 1 (inlined core::slice::<impl [u32]>::get_unchecked_mut::<std::ops::Range<usize>>) {
let mut _5: *mut [u32];
let mut _9: *mut [u32];
scope 2 (inlined <std::ops::Range<usize> as SliceIndex<[u32]>>::get_unchecked_mut) {
let _6: usize;
let mut _7: *mut u32;
let mut _8: *mut u32;
scope 3 {
scope 6 (inlined std::ptr::mut_ptr::<impl *mut [u32]>::as_mut_ptr) {
}
scope 7 (inlined std::ptr::mut_ptr::<impl *mut u32>::add) {
}
scope 8 (inlined slice_from_raw_parts_mut::<u32>) {
}
}
scope 4 (inlined std::ptr::mut_ptr::<impl *mut [u32]>::len) {
scope 5 (inlined std::ptr::metadata::<[u32]>) {
}
}
}
let mut _3: *mut [u32];
let mut _4: *mut [u32];
}
bb0: {
_3 = move (_2.0: usize);
_4 = move (_2.1: usize);
StorageLive(_5);
_5 = &raw mut (*_1);
StorageLive(_6);
_6 = SubUnchecked(_4, _3);
StorageLive(_8);
StorageLive(_7);
_7 = _5 as *mut u32 (PtrToPtr);
_8 = Offset(_7, _3);
StorageDead(_7);
_9 = *mut [u32] from (_8, _6);
StorageDead(_8);
StorageDead(_6);
StorageDead(_5);
_0 = &mut (*_9);
StorageLive(_3);
_3 = &raw mut (*_1);
_4 = <std::ops::Range<usize> as SliceIndex<[u32]>>::get_unchecked_mut(move _2, move _3) -> [return: bb1, unwind continue];
}
bb1: {
StorageDead(_3);
_0 = &mut (*_4);
return;
}
}

View file

@ -4,41 +4,14 @@ fn slice_ptr_get_unchecked_range(_1: *const [u32], _2: std::ops::Range<usize>) -
debug slice => _1;
debug index => _2;
let mut _0: *const [u32];
let mut _3: usize;
let mut _4: usize;
scope 1 (inlined std::ptr::const_ptr::<impl *const [u32]>::get_unchecked::<std::ops::Range<usize>>) {
scope 2 (inlined <std::ops::Range<usize> as SliceIndex<[u32]>>::get_unchecked) {
let _5: usize;
let mut _6: *const u32;
let mut _7: *const u32;
scope 3 {
scope 6 (inlined std::ptr::const_ptr::<impl *const [u32]>::as_ptr) {
}
scope 7 (inlined std::ptr::const_ptr::<impl *const u32>::add) {
}
scope 8 (inlined slice_from_raw_parts::<u32>) {
}
}
scope 4 (inlined std::ptr::const_ptr::<impl *const [u32]>::len) {
scope 5 (inlined std::ptr::metadata::<[u32]>) {
}
}
}
}
bb0: {
_3 = move (_2.0: usize);
_4 = move (_2.1: usize);
StorageLive(_5);
_5 = SubUnchecked(_4, _3);
StorageLive(_7);
StorageLive(_6);
_6 = _1 as *const u32 (PtrToPtr);
_7 = Offset(_6, _3);
StorageDead(_6);
_0 = *const [u32] from (_7, _5);
StorageDead(_7);
StorageDead(_5);
_0 = <std::ops::Range<usize> as SliceIndex<[u32]>>::get_unchecked(move _2, move _1) -> [return: bb1, unwind unreachable];
}
bb1: {
return;
}
}

View file

@ -4,41 +4,14 @@ fn slice_ptr_get_unchecked_range(_1: *const [u32], _2: std::ops::Range<usize>) -
debug slice => _1;
debug index => _2;
let mut _0: *const [u32];
let mut _3: usize;
let mut _4: usize;
scope 1 (inlined std::ptr::const_ptr::<impl *const [u32]>::get_unchecked::<std::ops::Range<usize>>) {
scope 2 (inlined <std::ops::Range<usize> as SliceIndex<[u32]>>::get_unchecked) {
let _5: usize;
let mut _6: *const u32;
let mut _7: *const u32;
scope 3 {
scope 6 (inlined std::ptr::const_ptr::<impl *const [u32]>::as_ptr) {
}
scope 7 (inlined std::ptr::const_ptr::<impl *const u32>::add) {
}
scope 8 (inlined slice_from_raw_parts::<u32>) {
}
}
scope 4 (inlined std::ptr::const_ptr::<impl *const [u32]>::len) {
scope 5 (inlined std::ptr::metadata::<[u32]>) {
}
}
}
}
bb0: {
_3 = move (_2.0: usize);
_4 = move (_2.1: usize);
StorageLive(_5);
_5 = SubUnchecked(_4, _3);
StorageLive(_7);
StorageLive(_6);
_6 = _1 as *const u32 (PtrToPtr);
_7 = Offset(_6, _3);
StorageDead(_6);
_0 = *const [u32] from (_7, _5);
StorageDead(_7);
StorageDead(_5);
_0 = <std::ops::Range<usize> as SliceIndex<[u32]>>::get_unchecked(move _2, move _1) -> [return: bb1, unwind continue];
}
bb1: {
return;
}
}