Rollup merge of #145137 - Kmeakin:km/optimize-slice-index-panicking, r=jhpratt

Consolidate panicking functions in `slice/index.rs`

Consolidate all the panicking functions in `slice/index.rs` to use a single `slice_index_fail` function, similar to how it is done in `str/traits.rs`.

Split off from https://github.com/rust-lang/rust/pull/145024
This commit is contained in:
Jacob Pratt 2025-08-21 17:57:51 -04:00 committed by GitHub
commit 02deabb779
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
13 changed files with 269 additions and 137 deletions

View file

@ -34,53 +34,44 @@ where
#[cfg_attr(not(feature = "panic_immediate_abort"), inline(never), cold)]
#[cfg_attr(feature = "panic_immediate_abort", inline)]
#[track_caller]
const fn slice_start_index_len_fail(index: usize, len: usize) -> ! {
const_panic!(
"slice start index is out of range for slice",
"range start index {index} out of range for slice of length {len}",
index: usize,
len: usize,
)
}
const fn slice_index_fail(start: usize, end: usize, len: usize) -> ! {
if start > len {
const_panic!(
"slice start index is out of range for slice",
"range start index {start} out of range for slice of length {len}",
start: usize,
len: usize,
)
}
#[cfg_attr(not(feature = "panic_immediate_abort"), inline(never), cold)]
#[cfg_attr(feature = "panic_immediate_abort", inline)]
#[track_caller]
const fn slice_end_index_len_fail(index: usize, len: usize) -> ! {
if end > len {
const_panic!(
"slice end index is out of range for slice",
"range end index {end} out of range for slice of length {len}",
end: usize,
len: usize,
)
}
if start > end {
const_panic!(
"slice index start is larger than end",
"slice index starts at {start} but ends at {end}",
start: usize,
end: usize,
)
}
// Only reachable if the range was a `RangeInclusive` or a
// `RangeToInclusive`, with `end == len`.
const_panic!(
"slice end index is out of range for slice",
"range end index {index} out of range for slice of length {len}",
index: usize,
"range end index {end} out of range for slice of length {len}",
end: usize,
len: usize,
)
}
#[cfg_attr(not(feature = "panic_immediate_abort"), inline(never), cold)]
#[cfg_attr(feature = "panic_immediate_abort", inline)]
#[track_caller]
const fn slice_index_order_fail(index: usize, end: usize) -> ! {
const_panic!(
"slice index start is larger than end",
"slice index starts at {index} but ends at {end}",
index: usize,
end: usize,
)
}
#[cfg_attr(not(feature = "panic_immediate_abort"), inline(never), cold)]
#[cfg_attr(feature = "panic_immediate_abort", inline)]
#[track_caller]
const fn slice_start_index_overflow_fail() -> ! {
panic!("attempted to index slice from after maximum usize");
}
#[cfg_attr(not(feature = "panic_immediate_abort"), inline(never), cold)]
#[cfg_attr(feature = "panic_immediate_abort", inline)]
#[track_caller]
const fn slice_end_index_overflow_fail() -> ! {
panic!("attempted to index slice up to maximum usize");
}
// The UbChecks are great for catching bugs in the unsafe methods, but including
// them in safe indexing is unnecessary and hurts inlining and debug runtime perf.
// Both the safe and unsafe public methods share these helpers,
@ -341,7 +332,7 @@ unsafe impl<T> const SliceIndex<[T]> for ops::IndexRange {
// SAFETY: `self` is checked to be valid and in bounds above.
unsafe { &*get_offset_len_noubcheck(slice, self.start(), self.len()) }
} else {
slice_end_index_len_fail(self.end(), slice.len())
slice_index_fail(self.start(), self.end(), slice.len())
}
}
@ -351,7 +342,7 @@ unsafe impl<T> const SliceIndex<[T]> for ops::IndexRange {
// SAFETY: `self` is checked to be valid and in bounds above.
unsafe { &mut *get_offset_len_mut_noubcheck(slice, self.start(), self.len()) }
} else {
slice_end_index_len_fail(self.end(), slice.len())
slice_index_fail(self.start(), self.end(), slice.len())
}
}
}
@ -436,26 +427,27 @@ unsafe impl<T> const SliceIndex<[T]> for ops::Range<usize> {
#[inline(always)]
fn index(self, slice: &[T]) -> &[T] {
// Using checked_sub is a safe way to get `SubUnchecked` in MIR
let Some(new_len) = usize::checked_sub(self.end, self.start) else {
slice_index_order_fail(self.start, self.end)
};
if self.end > slice.len() {
slice_end_index_len_fail(self.end, slice.len());
if let Some(new_len) = usize::checked_sub(self.end, self.start)
&& self.end <= slice.len()
{
// SAFETY: `self` is checked to be valid and in bounds above.
unsafe { &*get_offset_len_noubcheck(slice, self.start, new_len) }
} else {
slice_index_fail(self.start, self.end, slice.len())
}
// SAFETY: `self` is checked to be valid and in bounds above.
unsafe { &*get_offset_len_noubcheck(slice, self.start, new_len) }
}
#[inline]
fn index_mut(self, slice: &mut [T]) -> &mut [T] {
let Some(new_len) = usize::checked_sub(self.end, self.start) else {
slice_index_order_fail(self.start, self.end)
};
if self.end > slice.len() {
slice_end_index_len_fail(self.end, slice.len());
// Using checked_sub is a safe way to get `SubUnchecked` in MIR
if let Some(new_len) = usize::checked_sub(self.end, self.start)
&& self.end <= slice.len()
{
// SAFETY: `self` is checked to be valid and in bounds above.
unsafe { &mut *get_offset_len_mut_noubcheck(slice, self.start, new_len) }
} else {
slice_index_fail(self.start, self.end, slice.len())
}
// SAFETY: `self` is checked to be valid and in bounds above.
unsafe { &mut *get_offset_len_mut_noubcheck(slice, self.start, new_len) }
}
}
@ -567,7 +559,7 @@ unsafe impl<T> const SliceIndex<[T]> for ops::RangeFrom<usize> {
#[inline]
fn index(self, slice: &[T]) -> &[T] {
if self.start > slice.len() {
slice_start_index_len_fail(self.start, slice.len());
slice_index_fail(self.start, slice.len(), slice.len())
}
// SAFETY: `self` is checked to be valid and in bounds above.
unsafe { &*self.get_unchecked(slice) }
@ -576,7 +568,7 @@ unsafe impl<T> const SliceIndex<[T]> for ops::RangeFrom<usize> {
#[inline]
fn index_mut(self, slice: &mut [T]) -> &mut [T] {
if self.start > slice.len() {
slice_start_index_len_fail(self.start, slice.len());
slice_index_fail(self.start, slice.len(), slice.len())
}
// SAFETY: `self` is checked to be valid and in bounds above.
unsafe { &mut *self.get_unchecked_mut(slice) }
@ -690,18 +682,32 @@ unsafe impl<T> const SliceIndex<[T]> for ops::RangeInclusive<usize> {
#[inline]
fn index(self, slice: &[T]) -> &[T] {
if *self.end() == usize::MAX {
slice_end_index_overflow_fail();
let Self { mut start, mut end, exhausted } = self;
let len = slice.len();
if end < len {
end = end + 1;
start = if exhausted { end } else { start };
if let Some(new_len) = usize::checked_sub(end, start) {
// SAFETY: `self` is checked to be valid and in bounds above.
unsafe { return &*get_offset_len_noubcheck(slice, start, new_len) }
}
}
self.into_slice_range().index(slice)
slice_index_fail(start, end, slice.len())
}
#[inline]
fn index_mut(self, slice: &mut [T]) -> &mut [T] {
if *self.end() == usize::MAX {
slice_end_index_overflow_fail();
let Self { mut start, mut end, exhausted } = self;
let len = slice.len();
if end < len {
end = end + 1;
start = if exhausted { end } else { start };
if let Some(new_len) = usize::checked_sub(end, start) {
// SAFETY: `self` is checked to be valid and in bounds above.
unsafe { return &mut *get_offset_len_mut_noubcheck(slice, start, new_len) }
}
}
self.into_slice_range().index_mut(slice)
slice_index_fail(start, end, slice.len())
}
}
@ -852,28 +858,26 @@ where
{
let len = bounds.end;
let start = match range.start_bound() {
ops::Bound::Included(&start) => start,
ops::Bound::Excluded(start) => {
start.checked_add(1).unwrap_or_else(|| slice_start_index_overflow_fail())
}
ops::Bound::Unbounded => 0,
};
let end = match range.end_bound() {
ops::Bound::Included(end) => {
end.checked_add(1).unwrap_or_else(|| slice_end_index_overflow_fail())
}
ops::Bound::Included(&end) if end >= len => slice_index_fail(0, end, len),
// Cannot overflow because `end < len` implies `end < usize::MAX`.
ops::Bound::Included(&end) => end + 1,
ops::Bound::Excluded(&end) if end > len => slice_index_fail(0, end, len),
ops::Bound::Excluded(&end) => end,
ops::Bound::Unbounded => len,
};
if start > end {
slice_index_order_fail(start, end);
}
if end > len {
slice_end_index_len_fail(end, len);
}
let start = match range.start_bound() {
ops::Bound::Excluded(&start) if start >= end => slice_index_fail(start, end, len),
// Cannot overflow because `start < end` implies `start < usize::MAX`.
ops::Bound::Excluded(&start) => start + 1,
ops::Bound::Included(&start) if start > end => slice_index_fail(start, end, len),
ops::Bound::Included(&start) => start,
ops::Bound::Unbounded => 0,
};
ops::Range { start, end }
}
@ -982,25 +986,27 @@ pub(crate) fn into_slice_range(
len: usize,
(start, end): (ops::Bound<usize>, ops::Bound<usize>),
) -> ops::Range<usize> {
use ops::Bound;
let start = match start {
Bound::Included(start) => start,
Bound::Excluded(start) => {
start.checked_add(1).unwrap_or_else(|| slice_start_index_overflow_fail())
}
Bound::Unbounded => 0,
};
let end = match end {
Bound::Included(end) => {
end.checked_add(1).unwrap_or_else(|| slice_end_index_overflow_fail())
}
Bound::Excluded(end) => end,
Bound::Unbounded => len,
ops::Bound::Included(end) if end >= len => slice_index_fail(0, end, len),
// Cannot overflow because `end < len` implies `end < usize::MAX`.
ops::Bound::Included(end) => end + 1,
ops::Bound::Excluded(end) if end > len => slice_index_fail(0, end, len),
ops::Bound::Excluded(end) => end,
ops::Bound::Unbounded => len,
};
// Don't bother with checking `start < end` and `end <= len`
// since these checks are handled by `Range` impls
let start = match start {
ops::Bound::Excluded(start) if start >= end => slice_index_fail(start, end, len),
// Cannot overflow because `start < end` implies `start < usize::MAX`.
ops::Bound::Excluded(start) => start + 1,
ops::Bound::Included(start) if start > end => slice_index_fail(start, end, len),
ops::Bound::Included(start) => start,
ops::Bound::Unbounded => 0,
};
start..end
}

View file

@ -1492,28 +1492,28 @@ mod slice_index {
// note: using 0 specifically ensures that the result of overflowing is 0..0,
// so that `get` doesn't simply return None for the wrong reason.
bad: data[0 ..= usize::MAX];
message: "maximum usize";
message: "out of range";
}
in mod rangetoinclusive_overflow {
data: [0, 1];
bad: data[..= usize::MAX];
message: "maximum usize";
message: "out of range";
}
in mod boundpair_overflow_end {
data: [0; 1];
bad: data[(Bound::Unbounded, Bound::Included(usize::MAX))];
message: "maximum usize";
message: "out of range";
}
in mod boundpair_overflow_start {
data: [0; 1];
bad: data[(Bound::Excluded(usize::MAX), Bound::Unbounded)];
message: "maximum usize";
message: "out of range";
}
} // panic_cases!
}
@ -2008,7 +2008,7 @@ fn test_copy_within_panics_src_inverted() {
bytes.copy_within(2..1, 0);
}
#[test]
#[should_panic(expected = "attempted to index slice up to maximum usize")]
#[should_panic(expected = "out of range")]
fn test_copy_within_panics_src_out_of_bounds() {
let mut bytes = *b"Hello, World!";
// an inclusive range ending at usize::MAX would make src_end overflow

View file

@ -1,5 +1,5 @@
thread 'main' ($TID) panicked at tests/panic/oob_subslice.rs:LL:CC:
range end index 5 out of range for slice of length 4
range end index 4 out of range for slice of length 4
note: run with `RUST_BACKTRACE=1` environment variable to display a backtrace
note: in Miri, you may have to set `MIRIFLAGS=-Zmiri-env-forward=RUST_BACKTRACE` for the environment variable to have an effect

View file

@ -8,8 +8,7 @@
#[no_mangle]
pub fn binary_search_index_no_bounds_check(s: &[u8]) -> u8 {
// CHECK-NOT: panic
// CHECK-NOT: slice_start_index_len_fail
// CHECK-NOT: slice_end_index_len_fail
// CHECK-NOT: slice_index_fail
// CHECK-NOT: panic_bounds_check
if let Ok(idx) = s.binary_search(&b'\\') { s[idx] } else { 42 }
}

View file

@ -10,7 +10,7 @@ pub struct S1<'a> {
// CHECK-LABEL: @slice_no_index_order
#[no_mangle]
pub fn slice_no_index_order<'a>(s: &'a mut S1, n: usize) -> &'a [u8] {
// CHECK-NOT: slice_index_order_fail
// CHECK-COUNT-1: slice_index_fail
let d = &s.data[s.position..s.position + n];
s.position += n;
return d;
@ -19,6 +19,6 @@ pub fn slice_no_index_order<'a>(s: &'a mut S1, n: usize) -> &'a [u8] {
// CHECK-LABEL: @test_check
#[no_mangle]
pub fn test_check<'a>(s: &'a mut S1, x: usize, y: usize) -> &'a [u8] {
// CHECK: slice_index_order_fail
// CHECK-COUNT-1: slice_index_fail
&s.data[x..y]
}

View file

@ -5,7 +5,7 @@
use std::cmp::max;
// CHECK-LABEL: @foo
// CHECK-NOT: slice_start_index_len_fail
// CHECK-NOT: slice_index_fail
// CHECK-NOT: unreachable
#[no_mangle]
pub fn foo(v: &mut Vec<u8>, size: usize) -> Option<&mut [u8]> {

View file

@ -6,7 +6,7 @@
#[no_mangle]
pub fn trim_in_place(a: &mut &[u8]) {
while a.first() == Some(&42) {
// CHECK-NOT: slice_index_order_fail
// CHECK-NOT: slice_index_fail
*a = &a[1..];
}
}
@ -15,7 +15,7 @@ pub fn trim_in_place(a: &mut &[u8]) {
#[no_mangle]
pub fn trim_in_place2(a: &mut &[u8]) {
while let Some(&42) = a.first() {
// CHECK-NOT: slice_index_order_fail
// CHECK-COUNT-1: slice_index_fail
*a = &a[2..];
}
}

View file

@ -10,7 +10,7 @@
// CHECK-LABEL: @already_sliced_no_bounds_check
#[no_mangle]
pub fn already_sliced_no_bounds_check(a: &[u8], b: &[u8], c: &mut [u8]) {
// CHECK: slice_end_index_len_fail
// CHECK: slice_index_fail
// CHECK-NOT: panic_bounds_check
let _ = (&a[..2048], &b[..2048], &mut c[..2048]);
for i in 0..1024 {
@ -21,7 +21,7 @@ pub fn already_sliced_no_bounds_check(a: &[u8], b: &[u8], c: &mut [u8]) {
// CHECK-LABEL: @already_sliced_no_bounds_check_exact
#[no_mangle]
pub fn already_sliced_no_bounds_check_exact(a: &[u8], b: &[u8], c: &mut [u8]) {
// CHECK: slice_end_index_len_fail
// CHECK: slice_index_fail
// CHECK-NOT: panic_bounds_check
let _ = (&a[..1024], &b[..1024], &mut c[..1024]);
for i in 0..1024 {
@ -33,7 +33,7 @@ pub fn already_sliced_no_bounds_check_exact(a: &[u8], b: &[u8], c: &mut [u8]) {
// CHECK-LABEL: @already_sliced_bounds_check
#[no_mangle]
pub fn already_sliced_bounds_check(a: &[u8], b: &[u8], c: &mut [u8]) {
// CHECK: slice_end_index_len_fail
// CHECK: slice_index_fail
// CHECK: panic_bounds_check
let _ = (&a[..1023], &b[..2048], &mut c[..2048]);
for i in 0..1024 {

View file

@ -8,8 +8,7 @@
#[no_mangle]
pub fn position_slice_to_no_bounds_check(s: &[u8]) -> &[u8] {
// CHECK-NOT: panic
// CHECK-NOT: slice_start_index_len_fail
// CHECK-NOT: slice_end_index_len_fail
// CHECK-NOT: slice_index_fail
// CHECK-NOT: panic_bounds_check
// CHECK-NOT: unreachable
if let Some(idx) = s.iter().position(|b| *b == b'\\') { &s[..idx] } else { s }
@ -19,8 +18,7 @@ pub fn position_slice_to_no_bounds_check(s: &[u8]) -> &[u8] {
#[no_mangle]
pub fn position_slice_from_no_bounds_check(s: &[u8]) -> &[u8] {
// CHECK-NOT: panic
// CHECK-NOT: slice_start_index_len_fail
// CHECK-NOT: slice_end_index_len_fail
// CHECK-NOT: slice_index_fail
// CHECK-NOT: panic_bounds_check
// CHECK-NOT: unreachable
if let Some(idx) = s.iter().position(|b| *b == b'\\') { &s[idx..] } else { s }
@ -30,8 +28,7 @@ pub fn position_slice_from_no_bounds_check(s: &[u8]) -> &[u8] {
#[no_mangle]
pub fn position_index_no_bounds_check(s: &[u8]) -> u8 {
// CHECK-NOT: panic
// CHECK-NOT: slice_start_index_len_fail
// CHECK-NOT: slice_end_index_len_fail
// CHECK-NOT: slice_index_fail
// CHECK-NOT: panic_bounds_check
// CHECK-NOT: unreachable
if let Some(idx) = s.iter().position(|b| *b == b'\\') { s[idx] } else { 42 }
@ -40,8 +37,7 @@ pub fn position_index_no_bounds_check(s: &[u8]) -> u8 {
#[no_mangle]
pub fn rposition_slice_to_no_bounds_check(s: &[u8]) -> &[u8] {
// CHECK-NOT: panic
// CHECK-NOT: slice_start_index_len_fail
// CHECK-NOT: slice_end_index_len_fail
// CHECK-NOT: slice_index_fail
// CHECK-NOT: panic_bounds_check
// CHECK-NOT: unreachable
if let Some(idx) = s.iter().rposition(|b| *b == b'\\') { &s[..idx] } else { s }
@ -51,8 +47,7 @@ pub fn rposition_slice_to_no_bounds_check(s: &[u8]) -> &[u8] {
#[no_mangle]
pub fn rposition_slice_from_no_bounds_check(s: &[u8]) -> &[u8] {
// CHECK-NOT: panic
// CHECK-NOT: slice_start_index_len_fail
// CHECK-NOT: slice_end_index_len_fail
// CHECK-NOT: slice_index_fail
// CHECK-NOT: panic_bounds_check
// CHECK-NOT: unreachable
if let Some(idx) = s.iter().rposition(|b| *b == b'\\') { &s[idx..] } else { s }
@ -62,8 +57,7 @@ pub fn rposition_slice_from_no_bounds_check(s: &[u8]) -> &[u8] {
#[no_mangle]
pub fn rposition_index_no_bounds_check(s: &[u8]) -> u8 {
// CHECK-NOT: panic
// CHECK-NOT: slice_start_index_len_fail
// CHECK-NOT: slice_end_index_len_fail
// CHECK-NOT: slice_index_fail
// CHECK-NOT: panic_bounds_check
// CHECK-NOT: unreachable
if let Some(idx) = s.iter().rposition(|b| *b == b'\\') { s[idx] } else { 42 }

View file

@ -1,19 +1,18 @@
//@ compile-flags: -Copt-level=3
//@ only-x86_64
//@ min-llvm-version: 20
#![crate_type = "lib"]
// This test verifies that LLVM 20 properly optimizes the bounds check
// when accessing the last few elements of a slice with proper conditions.
// Previously, this would generate an unreachable branch to
// slice_start_index_len_fail even when the bounds check was provably safe.
// slice_index_fail even when the bounds check was provably safe.
// CHECK-LABEL: @last_four_initial(
#[no_mangle]
pub fn last_four_initial(s: &[u8]) -> &[u8] {
// Previously this would generate a branch to slice_start_index_len_fail
// Previously this would generate a branch to slice_index_fail
// that is unreachable. The LLVM 20 fix should eliminate this branch.
// CHECK-NOT: slice_start_index_len_fail
// CHECK-NOT: slice_index_fail
// CHECK-NOT: unreachable
let start = if s.len() <= 4 { 0 } else { s.len() - 4 };
&s[start..]
@ -23,7 +22,7 @@ pub fn last_four_initial(s: &[u8]) -> &[u8] {
#[no_mangle]
pub fn last_four_optimized(s: &[u8]) -> &[u8] {
// This version was already correctly optimized before the fix in LLVM 20.
// CHECK-NOT: slice_start_index_len_fail
// CHECK-NOT: slice_index_fail
// CHECK-NOT: unreachable
if s.len() <= 4 { &s[0..] } else { &s[s.len() - 4..] }
}
@ -32,6 +31,6 @@ pub fn last_four_optimized(s: &[u8]) -> &[u8] {
// CHECK-LABEL: @test_bounds_check_happens(
#[no_mangle]
pub fn test_bounds_check_happens(s: &[u8], i: usize) -> &[u8] {
// CHECK: slice_start_index_len_fail
// CHECK: slice_index_fail
&s[i..]
}

View file

@ -8,10 +8,10 @@
#[no_mangle]
pub fn slice_reverse_u8(slice: &mut [u8]) {
// CHECK-NOT: panic_bounds_check
// CHECK-NOT: slice_end_index_len_fail
// CHECK-NOT: slice_index_fail
// CHECK: shufflevector <{{[0-9]+}} x i8>
// CHECK-NOT: panic_bounds_check
// CHECK-NOT: slice_end_index_len_fail
// CHECK-NOT: slice_index_fail
slice.reverse();
}
@ -19,9 +19,9 @@ pub fn slice_reverse_u8(slice: &mut [u8]) {
#[no_mangle]
pub fn slice_reverse_i32(slice: &mut [i32]) {
// CHECK-NOT: panic_bounds_check
// CHECK-NOT: slice_end_index_len_fail
// CHECK-NOT: slice_index_fail
// CHECK: shufflevector <{{[0-9]+}} x i32>
// CHECK-NOT: panic_bounds_check
// CHECK-NOT: slice_end_index_len_fail
// CHECK-NOT: slice_index_fail
slice.reverse();
}

View file

@ -4,14 +4,81 @@ fn slice_index_range(_1: &[u32], _2: std::ops::Range<usize>) -> &[u32] {
debug slice => _1;
debug index => _2;
let mut _0: &[u32];
let mut _3: usize;
let mut _4: usize;
scope 1 (inlined #[track_caller] core::slice::index::<impl Index<std::ops::Range<usize>> for [u32]>::index) {
scope 2 (inlined #[track_caller] <std::ops::Range<usize> as SliceIndex<[u32]>>::index) {
let mut _7: usize;
let mut _8: bool;
let mut _9: *const [u32];
let _12: *const [u32];
let mut _13: usize;
let mut _14: !;
scope 3 (inlined core::num::<impl usize>::checked_sub) {
let mut _5: bool;
let mut _6: usize;
}
scope 4 (inlined core::slice::index::get_offset_len_noubcheck::<u32>) {
let _10: *const u32;
scope 5 {
let _11: *const u32;
scope 6 {
}
}
}
}
}
bb0: {
_0 = <std::ops::Range<usize> as SliceIndex<[u32]>>::index(move _2, move _1) -> [return: bb1, unwind unreachable];
_3 = move (_2.0: usize);
_4 = move (_2.1: usize);
StorageLive(_5);
_5 = Lt(copy _4, copy _3);
switchInt(move _5) -> [0: bb1, otherwise: bb4];
}
bb1: {
_6 = SubUnchecked(copy _4, copy _3);
StorageDead(_5);
StorageLive(_8);
StorageLive(_7);
_7 = PtrMetadata(copy _1);
_8 = Le(copy _4, move _7);
switchInt(move _8) -> [0: bb2, otherwise: bb3];
}
bb2: {
StorageDead(_7);
goto -> bb5;
}
bb3: {
StorageDead(_7);
StorageLive(_12);
StorageLive(_9);
_9 = &raw const (*_1);
StorageLive(_10);
StorageLive(_11);
_10 = copy _9 as *const u32 (PtrToPtr);
_11 = Offset(copy _10, copy _3);
_12 = *const [u32] from (copy _11, copy _6);
StorageDead(_11);
StorageDead(_10);
StorageDead(_9);
_0 = &(*_12);
StorageDead(_12);
StorageDead(_8);
return;
}
bb4: {
StorageDead(_5);
goto -> bb5;
}
bb5: {
StorageLive(_13);
_13 = PtrMetadata(copy _1);
_14 = core::slice::index::slice_index_fail(move _3, move _4, move _13) -> unwind unreachable;
}
}

View file

@ -4,14 +4,81 @@ fn slice_index_range(_1: &[u32], _2: std::ops::Range<usize>) -> &[u32] {
debug slice => _1;
debug index => _2;
let mut _0: &[u32];
let mut _3: usize;
let mut _4: usize;
scope 1 (inlined #[track_caller] core::slice::index::<impl Index<std::ops::Range<usize>> for [u32]>::index) {
scope 2 (inlined #[track_caller] <std::ops::Range<usize> as SliceIndex<[u32]>>::index) {
let mut _7: usize;
let mut _8: bool;
let mut _9: *const [u32];
let _12: *const [u32];
let mut _13: usize;
let mut _14: !;
scope 3 (inlined core::num::<impl usize>::checked_sub) {
let mut _5: bool;
let mut _6: usize;
}
scope 4 (inlined core::slice::index::get_offset_len_noubcheck::<u32>) {
let _10: *const u32;
scope 5 {
let _11: *const u32;
scope 6 {
}
}
}
}
}
bb0: {
_0 = <std::ops::Range<usize> as SliceIndex<[u32]>>::index(move _2, move _1) -> [return: bb1, unwind continue];
_3 = move (_2.0: usize);
_4 = move (_2.1: usize);
StorageLive(_5);
_5 = Lt(copy _4, copy _3);
switchInt(move _5) -> [0: bb1, otherwise: bb4];
}
bb1: {
_6 = SubUnchecked(copy _4, copy _3);
StorageDead(_5);
StorageLive(_8);
StorageLive(_7);
_7 = PtrMetadata(copy _1);
_8 = Le(copy _4, move _7);
switchInt(move _8) -> [0: bb2, otherwise: bb3];
}
bb2: {
StorageDead(_7);
goto -> bb5;
}
bb3: {
StorageDead(_7);
StorageLive(_12);
StorageLive(_9);
_9 = &raw const (*_1);
StorageLive(_10);
StorageLive(_11);
_10 = copy _9 as *const u32 (PtrToPtr);
_11 = Offset(copy _10, copy _3);
_12 = *const [u32] from (copy _11, copy _6);
StorageDead(_11);
StorageDead(_10);
StorageDead(_9);
_0 = &(*_12);
StorageDead(_12);
StorageDead(_8);
return;
}
bb4: {
StorageDead(_5);
goto -> bb5;
}
bb5: {
StorageLive(_13);
_13 = PtrMetadata(copy _1);
_14 = core::slice::index::slice_index_fail(move _3, move _4, move _13) -> unwind continue;
}
}