Auto merge of #147893 - fee1-dead-contrib:constheapheapheap, r=oli-obk
`Vec::push` in consts MVP
Example:
```rust
const X: &'static [u32] = {
let mut v = Vec::with_capacity(6);
let mut x = 1;
while x < 42 {
v.push(x);
x *= 2;
}
assert!(v.len() == 6);
v.const_make_global()
};
assert_eq!([1, 2, 4, 8, 16, 32], X);
```
Oh this is fun...
* We split out the implementation of `Global` such that it calls `intrinsics::const_allocate` and `intrinsics::const_deallocate` during compile time. This is achieved using `const_eval_select`
* This allows us to `impl const Allocator for Global`
* We then constify everything necessary for `Vec::with_capacity` and `Vec::push`.
* Added `Vec::const_make_global` to leak and intern the final value via `intrinsics::const_make_global`. If we see any pointer in the final value of a `const` that did not call `const_make_global`, we error as implemented in rust-lang/rust#143595.
r? `@rust-lang/wg-const-eval`
To-do for me:
* [x] Assess the rustdoc impact of additional bounds in the method
* [x] ~~Increase test coverage~~ I think this is enough for an unstable feature.
This commit is contained in:
commit
74fd7516da
19 changed files with 669 additions and 421 deletions
|
|
@ -5,8 +5,8 @@
|
|||
#[stable(feature = "alloc_module", since = "1.28.0")]
|
||||
#[doc(inline)]
|
||||
pub use core::alloc::*;
|
||||
use core::hint;
|
||||
use core::ptr::{self, NonNull};
|
||||
use core::{cmp, hint};
|
||||
|
||||
unsafe extern "Rust" {
|
||||
// These are the magic symbols to call the global allocator. rustc generates
|
||||
|
|
@ -182,7 +182,7 @@ pub unsafe fn alloc_zeroed(layout: Layout) -> *mut u8 {
|
|||
impl Global {
|
||||
#[inline]
|
||||
#[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
|
||||
fn alloc_impl(&self, layout: Layout, zeroed: bool) -> Result<NonNull<[u8]>, AllocError> {
|
||||
fn alloc_impl_runtime(layout: Layout, zeroed: bool) -> Result<NonNull<[u8]>, AllocError> {
|
||||
match layout.size() {
|
||||
0 => Ok(NonNull::slice_from_raw_parts(layout.dangling(), 0)),
|
||||
// SAFETY: `layout` is non-zero in size,
|
||||
|
|
@ -194,10 +194,26 @@ impl Global {
|
|||
}
|
||||
}
|
||||
|
||||
#[inline]
|
||||
#[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
|
||||
fn deallocate_impl_runtime(ptr: NonNull<u8>, layout: Layout) {
|
||||
if layout.size() != 0 {
|
||||
// SAFETY:
|
||||
// * We have checked that `layout` is non-zero in size.
|
||||
// * The caller is obligated to provide a layout that "fits", and in this case,
|
||||
// "fit" always means a layout that is equal to the original, because our
|
||||
// `allocate()`, `grow()`, and `shrink()` implementations never returns a larger
|
||||
// allocation than requested.
|
||||
// * Other conditions must be upheld by the caller, as per `Allocator::deallocate()`'s
|
||||
// safety documentation.
|
||||
unsafe { dealloc(ptr.as_ptr(), layout) }
|
||||
}
|
||||
}
|
||||
|
||||
// SAFETY: Same as `Allocator::grow`
|
||||
#[inline]
|
||||
#[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
|
||||
unsafe fn grow_impl(
|
||||
fn grow_impl_runtime(
|
||||
&self,
|
||||
ptr: NonNull<u8>,
|
||||
old_layout: Layout,
|
||||
|
|
@ -241,69 +257,16 @@ impl Global {
|
|||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[unstable(feature = "allocator_api", issue = "32838")]
|
||||
unsafe impl Allocator for Global {
|
||||
// SAFETY: Same as `Allocator::grow`
|
||||
#[inline]
|
||||
#[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
|
||||
fn allocate(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
|
||||
self.alloc_impl(layout, false)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
#[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
|
||||
fn allocate_zeroed(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
|
||||
self.alloc_impl(layout, true)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
#[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
|
||||
unsafe fn deallocate(&self, ptr: NonNull<u8>, layout: Layout) {
|
||||
if layout.size() != 0 {
|
||||
// SAFETY:
|
||||
// * We have checked that `layout` is non-zero in size.
|
||||
// * The caller is obligated to provide a layout that "fits", and in this case,
|
||||
// "fit" always means a layout that is equal to the original, because our
|
||||
// `allocate()`, `grow()`, and `shrink()` implementations never returns a larger
|
||||
// allocation than requested.
|
||||
// * Other conditions must be upheld by the caller, as per `Allocator::deallocate()`'s
|
||||
// safety documentation.
|
||||
unsafe { dealloc(ptr.as_ptr(), layout) }
|
||||
}
|
||||
}
|
||||
|
||||
#[inline]
|
||||
#[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
|
||||
unsafe fn grow(
|
||||
&self,
|
||||
ptr: NonNull<u8>,
|
||||
old_layout: Layout,
|
||||
new_layout: Layout,
|
||||
) -> Result<NonNull<[u8]>, AllocError> {
|
||||
// SAFETY: all conditions must be upheld by the caller
|
||||
unsafe { self.grow_impl(ptr, old_layout, new_layout, false) }
|
||||
}
|
||||
|
||||
#[inline]
|
||||
#[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
|
||||
unsafe fn grow_zeroed(
|
||||
&self,
|
||||
ptr: NonNull<u8>,
|
||||
old_layout: Layout,
|
||||
new_layout: Layout,
|
||||
) -> Result<NonNull<[u8]>, AllocError> {
|
||||
// SAFETY: all conditions must be upheld by the caller
|
||||
unsafe { self.grow_impl(ptr, old_layout, new_layout, true) }
|
||||
}
|
||||
|
||||
#[inline]
|
||||
#[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
|
||||
unsafe fn shrink(
|
||||
fn shrink_impl_runtime(
|
||||
&self,
|
||||
ptr: NonNull<u8>,
|
||||
old_layout: Layout,
|
||||
new_layout: Layout,
|
||||
_zeroed: bool,
|
||||
) -> Result<NonNull<[u8]>, AllocError> {
|
||||
debug_assert!(
|
||||
new_layout.size() <= old_layout.size(),
|
||||
|
|
@ -340,6 +303,180 @@ unsafe impl Allocator for Global {
|
|||
},
|
||||
}
|
||||
}
|
||||
|
||||
// SAFETY: Same as `Allocator::allocate`
|
||||
#[inline]
|
||||
#[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
|
||||
#[rustc_const_unstable(feature = "const_heap", issue = "79597")]
|
||||
const fn alloc_impl(&self, layout: Layout, zeroed: bool) -> Result<NonNull<[u8]>, AllocError> {
|
||||
core::intrinsics::const_eval_select(
|
||||
(layout, zeroed),
|
||||
Global::alloc_impl_const,
|
||||
Global::alloc_impl_runtime,
|
||||
)
|
||||
}
|
||||
|
||||
// SAFETY: Same as `Allocator::deallocate`
|
||||
#[inline]
|
||||
#[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
|
||||
#[rustc_const_unstable(feature = "const_heap", issue = "79597")]
|
||||
const unsafe fn deallocate_impl(&self, ptr: NonNull<u8>, layout: Layout) {
|
||||
core::intrinsics::const_eval_select(
|
||||
(ptr, layout),
|
||||
Global::deallocate_impl_const,
|
||||
Global::deallocate_impl_runtime,
|
||||
)
|
||||
}
|
||||
|
||||
// SAFETY: Same as `Allocator::grow`
|
||||
#[inline]
|
||||
#[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
|
||||
#[rustc_const_unstable(feature = "const_heap", issue = "79597")]
|
||||
const unsafe fn grow_impl(
|
||||
&self,
|
||||
ptr: NonNull<u8>,
|
||||
old_layout: Layout,
|
||||
new_layout: Layout,
|
||||
zeroed: bool,
|
||||
) -> Result<NonNull<[u8]>, AllocError> {
|
||||
core::intrinsics::const_eval_select(
|
||||
(self, ptr, old_layout, new_layout, zeroed),
|
||||
Global::grow_shrink_impl_const,
|
||||
Global::grow_impl_runtime,
|
||||
)
|
||||
}
|
||||
|
||||
// SAFETY: Same as `Allocator::shrink`
|
||||
#[inline]
|
||||
#[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
|
||||
#[rustc_const_unstable(feature = "const_heap", issue = "79597")]
|
||||
const unsafe fn shrink_impl(
|
||||
&self,
|
||||
ptr: NonNull<u8>,
|
||||
old_layout: Layout,
|
||||
new_layout: Layout,
|
||||
) -> Result<NonNull<[u8]>, AllocError> {
|
||||
core::intrinsics::const_eval_select(
|
||||
(self, ptr, old_layout, new_layout, false),
|
||||
Global::grow_shrink_impl_const,
|
||||
Global::shrink_impl_runtime,
|
||||
)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
#[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
|
||||
#[rustc_const_unstable(feature = "const_heap", issue = "79597")]
|
||||
const fn alloc_impl_const(layout: Layout, zeroed: bool) -> Result<NonNull<[u8]>, AllocError> {
|
||||
match layout.size() {
|
||||
0 => Ok(NonNull::slice_from_raw_parts(layout.dangling(), 0)),
|
||||
// SAFETY: `layout` is non-zero in size,
|
||||
size => unsafe {
|
||||
let raw_ptr = core::intrinsics::const_allocate(layout.size(), layout.align());
|
||||
let ptr = NonNull::new(raw_ptr).ok_or(AllocError)?;
|
||||
if zeroed {
|
||||
// SAFETY: the pointer returned by `const_allocate` is valid to write to.
|
||||
ptr.write_bytes(0, size);
|
||||
}
|
||||
Ok(NonNull::slice_from_raw_parts(ptr, size))
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
#[inline]
|
||||
#[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
|
||||
#[rustc_const_unstable(feature = "const_heap", issue = "79597")]
|
||||
const fn deallocate_impl_const(ptr: NonNull<u8>, layout: Layout) {
|
||||
if layout.size() != 0 {
|
||||
// SAFETY: We checked for nonzero size; other preconditions must be upheld by caller.
|
||||
unsafe {
|
||||
core::intrinsics::const_deallocate(ptr.as_ptr(), layout.size(), layout.align());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[inline]
|
||||
#[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
|
||||
#[rustc_const_unstable(feature = "const_heap", issue = "79597")]
|
||||
const fn grow_shrink_impl_const(
|
||||
&self,
|
||||
ptr: NonNull<u8>,
|
||||
old_layout: Layout,
|
||||
new_layout: Layout,
|
||||
zeroed: bool,
|
||||
) -> Result<NonNull<[u8]>, AllocError> {
|
||||
let new_ptr = self.alloc_impl(new_layout, zeroed)?;
|
||||
// SAFETY: both pointers are valid and this operations is in bounds.
|
||||
unsafe {
|
||||
ptr::copy_nonoverlapping(
|
||||
ptr.as_ptr(),
|
||||
new_ptr.as_mut_ptr(),
|
||||
cmp::min(old_layout.size(), new_layout.size()),
|
||||
);
|
||||
}
|
||||
unsafe {
|
||||
self.deallocate_impl(ptr, old_layout);
|
||||
}
|
||||
Ok(new_ptr)
|
||||
}
|
||||
}
|
||||
|
||||
#[unstable(feature = "allocator_api", issue = "32838")]
|
||||
#[rustc_const_unstable(feature = "const_heap", issue = "79597")]
|
||||
unsafe impl const Allocator for Global {
|
||||
#[inline]
|
||||
#[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
|
||||
fn allocate(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
|
||||
self.alloc_impl(layout, false)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
#[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
|
||||
fn allocate_zeroed(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
|
||||
self.alloc_impl(layout, true)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
#[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
|
||||
unsafe fn deallocate(&self, ptr: NonNull<u8>, layout: Layout) {
|
||||
// SAFETY: all conditions must be upheld by the caller
|
||||
unsafe { self.deallocate_impl(ptr, layout) }
|
||||
}
|
||||
|
||||
#[inline]
|
||||
#[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
|
||||
unsafe fn grow(
|
||||
&self,
|
||||
ptr: NonNull<u8>,
|
||||
old_layout: Layout,
|
||||
new_layout: Layout,
|
||||
) -> Result<NonNull<[u8]>, AllocError> {
|
||||
// SAFETY: all conditions must be upheld by the caller
|
||||
unsafe { self.grow_impl(ptr, old_layout, new_layout, false) }
|
||||
}
|
||||
|
||||
#[inline]
|
||||
#[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
|
||||
unsafe fn grow_zeroed(
|
||||
&self,
|
||||
ptr: NonNull<u8>,
|
||||
old_layout: Layout,
|
||||
new_layout: Layout,
|
||||
) -> Result<NonNull<[u8]>, AllocError> {
|
||||
// SAFETY: all conditions must be upheld by the caller
|
||||
unsafe { self.grow_impl(ptr, old_layout, new_layout, true) }
|
||||
}
|
||||
|
||||
#[inline]
|
||||
#[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
|
||||
unsafe fn shrink(
|
||||
&self,
|
||||
ptr: NonNull<u8>,
|
||||
old_layout: Layout,
|
||||
new_layout: Layout,
|
||||
) -> Result<NonNull<[u8]>, AllocError> {
|
||||
// SAFETY: all conditions must be upheld by the caller
|
||||
unsafe { self.shrink_impl(ptr, old_layout, new_layout) }
|
||||
}
|
||||
}
|
||||
|
||||
/// The allocator for `Box`.
|
||||
|
|
|
|||
|
|
@ -84,13 +84,14 @@ impl TryReserveError {
|
|||
reason = "Uncertain how much info should be exposed",
|
||||
issue = "48043"
|
||||
)]
|
||||
pub fn kind(&self) -> TryReserveErrorKind {
|
||||
#[rustc_const_unstable(feature = "const_heap", issue = "79597")]
|
||||
pub const fn kind(&self) -> TryReserveErrorKind {
|
||||
self.kind.clone()
|
||||
}
|
||||
}
|
||||
|
||||
/// Details of the allocation that caused a `TryReserveError`
|
||||
#[derive(Clone, PartialEq, Eq, Debug)]
|
||||
#[derive(PartialEq, Eq, Debug)]
|
||||
#[unstable(
|
||||
feature = "try_reserve_kind",
|
||||
reason = "Uncertain how much info should be exposed",
|
||||
|
|
@ -120,6 +121,24 @@ pub enum TryReserveErrorKind {
|
|||
},
|
||||
}
|
||||
|
||||
#[unstable(
|
||||
feature = "try_reserve_kind",
|
||||
reason = "Uncertain how much info should be exposed",
|
||||
issue = "48043"
|
||||
)]
|
||||
#[rustc_const_unstable(feature = "const_heap", issue = "79597")]
|
||||
#[cfg(not(test))]
|
||||
impl const Clone for TryReserveErrorKind {
|
||||
fn clone(&self) -> Self {
|
||||
match self {
|
||||
TryReserveErrorKind::CapacityOverflow => TryReserveErrorKind::CapacityOverflow,
|
||||
TryReserveErrorKind::AllocError { layout, non_exhaustive: () } => {
|
||||
TryReserveErrorKind::AllocError { layout: *layout, non_exhaustive: () }
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
pub use realalloc::collections::TryReserveErrorKind;
|
||||
|
||||
|
|
|
|||
|
|
@ -101,10 +101,15 @@
|
|||
#![feature(char_internals)]
|
||||
#![feature(clone_to_uninit)]
|
||||
#![feature(coerce_unsized)]
|
||||
#![feature(const_clone)]
|
||||
#![feature(const_cmp)]
|
||||
#![feature(const_convert)]
|
||||
#![feature(const_default)]
|
||||
#![feature(const_destruct)]
|
||||
#![feature(const_eval_select)]
|
||||
#![feature(const_heap)]
|
||||
#![feature(const_option_ops)]
|
||||
#![feature(const_try)]
|
||||
#![feature(copied_into_inner)]
|
||||
#![feature(core_intrinsics)]
|
||||
#![feature(deprecated_suggestion)]
|
||||
|
|
@ -119,6 +124,7 @@
|
|||
#![feature(fmt_internals)]
|
||||
#![feature(fn_traits)]
|
||||
#![feature(formatting_options)]
|
||||
#![feature(freeze)]
|
||||
#![feature(generic_atomic)]
|
||||
#![feature(hasher_prefixfree_extras)]
|
||||
#![feature(inplace_iteration)]
|
||||
|
|
@ -172,6 +178,7 @@
|
|||
#![feature(const_trait_impl)]
|
||||
#![feature(coroutine_trait)]
|
||||
#![feature(decl_macro)]
|
||||
#![feature(derive_const)]
|
||||
#![feature(dropck_eyepatch)]
|
||||
#![feature(fundamental)]
|
||||
#![feature(hashmap_internals)]
|
||||
|
|
|
|||
|
|
@ -4,7 +4,7 @@
|
|||
// Note: This module is also included in the alloctests crate using #[path] to
|
||||
// run the tests. See the comment there for an explanation why this is the case.
|
||||
|
||||
use core::marker::PhantomData;
|
||||
use core::marker::{Destruct, PhantomData};
|
||||
use core::mem::{ManuallyDrop, MaybeUninit, SizedTypeProperties};
|
||||
use core::ptr::{self, Alignment, NonNull, Unique};
|
||||
use core::{cmp, hint};
|
||||
|
|
@ -24,7 +24,7 @@ mod tests;
|
|||
// only one location which panics rather than a bunch throughout the module.
|
||||
#[cfg(not(no_global_oom_handling))]
|
||||
#[cfg_attr(not(panic = "immediate-abort"), inline(never))]
|
||||
fn capacity_overflow() -> ! {
|
||||
const fn capacity_overflow() -> ! {
|
||||
panic!("capacity overflow");
|
||||
}
|
||||
|
||||
|
|
@ -165,6 +165,30 @@ const fn min_non_zero_cap(size: usize) -> usize {
|
|||
}
|
||||
}
|
||||
|
||||
#[rustc_const_unstable(feature = "const_heap", issue = "79597")]
|
||||
#[rustfmt::skip] // FIXME(fee1-dead): temporary measure before rustfmt is bumped
|
||||
const impl<T, A: [const] Allocator + [const] Destruct> RawVec<T, A> {
|
||||
/// Like `with_capacity`, but parameterized over the choice of
|
||||
/// allocator for the returned `RawVec`.
|
||||
#[cfg(not(no_global_oom_handling))]
|
||||
#[inline]
|
||||
pub(crate) fn with_capacity_in(capacity: usize, alloc: A) -> Self {
|
||||
Self {
|
||||
inner: RawVecInner::with_capacity_in(capacity, alloc, T::LAYOUT),
|
||||
_marker: PhantomData,
|
||||
}
|
||||
}
|
||||
|
||||
/// A specialized version of `self.reserve(len, 1)` which requires the
|
||||
/// caller to ensure `len == self.capacity()`.
|
||||
#[cfg(not(no_global_oom_handling))]
|
||||
#[inline(never)]
|
||||
pub(crate) fn grow_one(&mut self) {
|
||||
// SAFETY: All calls on self.inner pass T::LAYOUT as the elem_layout
|
||||
unsafe { self.inner.grow_one(T::LAYOUT) }
|
||||
}
|
||||
}
|
||||
|
||||
impl<T, A: Allocator> RawVec<T, A> {
|
||||
#[cfg(not(no_global_oom_handling))]
|
||||
pub(crate) const MIN_NON_ZERO_CAP: usize = min_non_zero_cap(size_of::<T>());
|
||||
|
|
@ -178,17 +202,6 @@ impl<T, A: Allocator> RawVec<T, A> {
|
|||
Self { inner: RawVecInner::new_in(alloc, Alignment::of::<T>()), _marker: PhantomData }
|
||||
}
|
||||
|
||||
/// Like `with_capacity`, but parameterized over the choice of
|
||||
/// allocator for the returned `RawVec`.
|
||||
#[cfg(not(no_global_oom_handling))]
|
||||
#[inline]
|
||||
pub(crate) fn with_capacity_in(capacity: usize, alloc: A) -> Self {
|
||||
Self {
|
||||
inner: RawVecInner::with_capacity_in(capacity, alloc, T::LAYOUT),
|
||||
_marker: PhantomData,
|
||||
}
|
||||
}
|
||||
|
||||
/// Like `try_with_capacity`, but parameterized over the choice of
|
||||
/// allocator for the returned `RawVec`.
|
||||
#[inline]
|
||||
|
|
@ -327,15 +340,6 @@ impl<T, A: Allocator> RawVec<T, A> {
|
|||
unsafe { self.inner.reserve(len, additional, T::LAYOUT) }
|
||||
}
|
||||
|
||||
/// A specialized version of `self.reserve(len, 1)` which requires the
|
||||
/// caller to ensure `len == self.capacity()`.
|
||||
#[cfg(not(no_global_oom_handling))]
|
||||
#[inline(never)]
|
||||
pub(crate) fn grow_one(&mut self) {
|
||||
// SAFETY: All calls on self.inner pass T::LAYOUT as the elem_layout
|
||||
unsafe { self.inner.grow_one(T::LAYOUT) }
|
||||
}
|
||||
|
||||
/// The same as `reserve`, but returns on errors instead of panicking or aborting.
|
||||
pub(crate) fn try_reserve(
|
||||
&mut self,
|
||||
|
|
@ -405,14 +409,9 @@ unsafe impl<#[may_dangle] T, A: Allocator> Drop for RawVec<T, A> {
|
|||
}
|
||||
}
|
||||
|
||||
impl<A: Allocator> RawVecInner<A> {
|
||||
#[inline]
|
||||
const fn new_in(alloc: A, align: Alignment) -> Self {
|
||||
let ptr = Unique::from_non_null(NonNull::without_provenance(align.as_nonzero()));
|
||||
// `cap: 0` means "unallocated". zero-sized types are ignored.
|
||||
Self { ptr, cap: ZERO_CAP, alloc }
|
||||
}
|
||||
|
||||
#[rustc_const_unstable(feature = "const_heap", issue = "79597")]
|
||||
#[rustfmt::skip] // FIXME(fee1-dead): temporary measure before rustfmt is bumped
|
||||
const impl<A: [const] Allocator + [const] Destruct> RawVecInner<A> {
|
||||
#[cfg(not(no_global_oom_handling))]
|
||||
#[inline]
|
||||
fn with_capacity_in(capacity: usize, alloc: A, elem_layout: Layout) -> Self {
|
||||
|
|
@ -428,24 +427,6 @@ impl<A: Allocator> RawVecInner<A> {
|
|||
}
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn try_with_capacity_in(
|
||||
capacity: usize,
|
||||
alloc: A,
|
||||
elem_layout: Layout,
|
||||
) -> Result<Self, TryReserveError> {
|
||||
Self::try_allocate_in(capacity, AllocInit::Uninitialized, alloc, elem_layout)
|
||||
}
|
||||
|
||||
#[cfg(not(no_global_oom_handling))]
|
||||
#[inline]
|
||||
fn with_capacity_zeroed_in(capacity: usize, alloc: A, elem_layout: Layout) -> Self {
|
||||
match Self::try_allocate_in(capacity, AllocInit::Zeroed, alloc, elem_layout) {
|
||||
Ok(res) => res,
|
||||
Err(err) => handle_error(err),
|
||||
}
|
||||
}
|
||||
|
||||
fn try_allocate_in(
|
||||
capacity: usize,
|
||||
init: AllocInit,
|
||||
|
|
@ -484,6 +465,118 @@ impl<A: Allocator> RawVecInner<A> {
|
|||
})
|
||||
}
|
||||
|
||||
/// # Safety
|
||||
/// - `elem_layout` must be valid for `self`, i.e. it must be the same `elem_layout` used to
|
||||
/// initially construct `self`
|
||||
/// - `elem_layout`'s size must be a multiple of its alignment
|
||||
#[cfg(not(no_global_oom_handling))]
|
||||
#[inline]
|
||||
unsafe fn grow_one(&mut self, elem_layout: Layout) {
|
||||
// SAFETY: Precondition passed to caller
|
||||
if let Err(err) = unsafe { self.grow_amortized(self.cap.as_inner(), 1, elem_layout) } {
|
||||
handle_error(err);
|
||||
}
|
||||
}
|
||||
|
||||
/// # Safety
|
||||
/// - `elem_layout` must be valid for `self`, i.e. it must be the same `elem_layout` used to
|
||||
/// initially construct `self`
|
||||
/// - `elem_layout`'s size must be a multiple of its alignment
|
||||
/// - The sum of `len` and `additional` must be greater than the current capacity
|
||||
unsafe fn grow_amortized(
|
||||
&mut self,
|
||||
len: usize,
|
||||
additional: usize,
|
||||
elem_layout: Layout,
|
||||
) -> Result<(), TryReserveError> {
|
||||
// This is ensured by the calling contexts.
|
||||
debug_assert!(additional > 0);
|
||||
|
||||
if elem_layout.size() == 0 {
|
||||
// Since we return a capacity of `usize::MAX` when `elem_size` is
|
||||
// 0, getting to here necessarily means the `RawVec` is overfull.
|
||||
return Err(CapacityOverflow.into());
|
||||
}
|
||||
|
||||
// Nothing we can really do about these checks, sadly.
|
||||
let required_cap = len.checked_add(additional).ok_or(CapacityOverflow)?;
|
||||
|
||||
// This guarantees exponential growth. The doubling cannot overflow
|
||||
// because `cap <= isize::MAX` and the type of `cap` is `usize`.
|
||||
let cap = cmp::max(self.cap.as_inner() * 2, required_cap);
|
||||
let cap = cmp::max(min_non_zero_cap(elem_layout.size()), cap);
|
||||
|
||||
// SAFETY:
|
||||
// - cap >= len + additional
|
||||
// - other preconditions passed to caller
|
||||
let ptr = unsafe { self.finish_grow(cap, elem_layout)? };
|
||||
|
||||
// SAFETY: `finish_grow` would have failed if `cap > isize::MAX`
|
||||
unsafe { self.set_ptr_and_cap(ptr, cap) };
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// # Safety
|
||||
/// - `elem_layout` must be valid for `self`, i.e. it must be the same `elem_layout` used to
|
||||
/// initially construct `self`
|
||||
/// - `elem_layout`'s size must be a multiple of its alignment
|
||||
/// - `cap` must be greater than the current capacity
|
||||
// not marked inline(never) since we want optimizers to be able to observe the specifics of this
|
||||
// function, see tests/codegen-llvm/vec-reserve-extend.rs.
|
||||
#[cold]
|
||||
unsafe fn finish_grow(
|
||||
&self,
|
||||
cap: usize,
|
||||
elem_layout: Layout,
|
||||
) -> Result<NonNull<[u8]>, TryReserveError> {
|
||||
let new_layout = layout_array(cap, elem_layout)?;
|
||||
|
||||
let memory = if let Some((ptr, old_layout)) = unsafe { self.current_memory(elem_layout) } {
|
||||
// FIXME(const-hack): switch to `debug_assert_eq`
|
||||
debug_assert!(old_layout.align() == new_layout.align());
|
||||
unsafe {
|
||||
// The allocator checks for alignment equality
|
||||
hint::assert_unchecked(old_layout.align() == new_layout.align());
|
||||
self.alloc.grow(ptr, old_layout, new_layout)
|
||||
}
|
||||
} else {
|
||||
self.alloc.allocate(new_layout)
|
||||
};
|
||||
|
||||
// FIXME(const-hack): switch back to `map_err`
|
||||
match memory {
|
||||
Ok(memory) => Ok(memory),
|
||||
Err(_) => Err(AllocError { layout: new_layout, non_exhaustive: () }.into()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<A: Allocator> RawVecInner<A> {
|
||||
#[inline]
|
||||
const fn new_in(alloc: A, align: Alignment) -> Self {
|
||||
let ptr = Unique::from_non_null(NonNull::without_provenance(align.as_nonzero()));
|
||||
// `cap: 0` means "unallocated". zero-sized types are ignored.
|
||||
Self { ptr, cap: ZERO_CAP, alloc }
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn try_with_capacity_in(
|
||||
capacity: usize,
|
||||
alloc: A,
|
||||
elem_layout: Layout,
|
||||
) -> Result<Self, TryReserveError> {
|
||||
Self::try_allocate_in(capacity, AllocInit::Uninitialized, alloc, elem_layout)
|
||||
}
|
||||
|
||||
#[cfg(not(no_global_oom_handling))]
|
||||
#[inline]
|
||||
fn with_capacity_zeroed_in(capacity: usize, alloc: A, elem_layout: Layout) -> Self {
|
||||
match Self::try_allocate_in(capacity, AllocInit::Zeroed, alloc, elem_layout) {
|
||||
Ok(res) => res,
|
||||
Err(err) => handle_error(err),
|
||||
}
|
||||
}
|
||||
|
||||
#[inline]
|
||||
unsafe fn from_raw_parts_in(ptr: *mut u8, cap: Cap, alloc: A) -> Self {
|
||||
Self { ptr: unsafe { Unique::new_unchecked(ptr) }, cap, alloc }
|
||||
|
|
@ -519,7 +612,8 @@ impl<A: Allocator> RawVecInner<A> {
|
|||
/// initially construct `self`
|
||||
/// - `elem_layout`'s size must be a multiple of its alignment
|
||||
#[inline]
|
||||
unsafe fn current_memory(&self, elem_layout: Layout) -> Option<(NonNull<u8>, Layout)> {
|
||||
#[rustc_const_unstable(feature = "const_heap", issue = "79597")]
|
||||
const unsafe fn current_memory(&self, elem_layout: Layout) -> Option<(NonNull<u8>, Layout)> {
|
||||
if elem_layout.size() == 0 || self.cap.as_inner() == 0 {
|
||||
None
|
||||
} else {
|
||||
|
|
@ -566,19 +660,6 @@ impl<A: Allocator> RawVecInner<A> {
|
|||
}
|
||||
}
|
||||
|
||||
/// # Safety
|
||||
/// - `elem_layout` must be valid for `self`, i.e. it must be the same `elem_layout` used to
|
||||
/// initially construct `self`
|
||||
/// - `elem_layout`'s size must be a multiple of its alignment
|
||||
#[cfg(not(no_global_oom_handling))]
|
||||
#[inline]
|
||||
unsafe fn grow_one(&mut self, elem_layout: Layout) {
|
||||
// SAFETY: Precondition passed to caller
|
||||
if let Err(err) = unsafe { self.grow_amortized(self.cap.as_inner(), 1, elem_layout) } {
|
||||
handle_error(err);
|
||||
}
|
||||
}
|
||||
|
||||
/// # Safety
|
||||
/// - `elem_layout` must be valid for `self`, i.e. it must be the same `elem_layout` used to
|
||||
/// initially construct `self`
|
||||
|
|
@ -651,12 +732,13 @@ impl<A: Allocator> RawVecInner<A> {
|
|||
}
|
||||
|
||||
#[inline]
|
||||
fn needs_to_grow(&self, len: usize, additional: usize, elem_layout: Layout) -> bool {
|
||||
const fn needs_to_grow(&self, len: usize, additional: usize, elem_layout: Layout) -> bool {
|
||||
additional > self.capacity(elem_layout.size()).wrapping_sub(len)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
unsafe fn set_ptr_and_cap(&mut self, ptr: NonNull<[u8]>, cap: usize) {
|
||||
#[rustc_const_unstable(feature = "const_heap", issue = "79597")]
|
||||
const unsafe fn set_ptr_and_cap(&mut self, ptr: NonNull<[u8]>, cap: usize) {
|
||||
// Allocators currently return a `NonNull<[u8]>` whose length matches
|
||||
// the size requested. If that ever changes, the capacity here should
|
||||
// change to `ptr.len() / size_of::<T>()`.
|
||||
|
|
@ -664,44 +746,6 @@ impl<A: Allocator> RawVecInner<A> {
|
|||
self.cap = unsafe { Cap::new_unchecked(cap) };
|
||||
}
|
||||
|
||||
/// # Safety
|
||||
/// - `elem_layout` must be valid for `self`, i.e. it must be the same `elem_layout` used to
|
||||
/// initially construct `self`
|
||||
/// - `elem_layout`'s size must be a multiple of its alignment
|
||||
/// - The sum of `len` and `additional` must be greater than the current capacity
|
||||
unsafe fn grow_amortized(
|
||||
&mut self,
|
||||
len: usize,
|
||||
additional: usize,
|
||||
elem_layout: Layout,
|
||||
) -> Result<(), TryReserveError> {
|
||||
// This is ensured by the calling contexts.
|
||||
debug_assert!(additional > 0);
|
||||
|
||||
if elem_layout.size() == 0 {
|
||||
// Since we return a capacity of `usize::MAX` when `elem_size` is
|
||||
// 0, getting to here necessarily means the `RawVec` is overfull.
|
||||
return Err(CapacityOverflow.into());
|
||||
}
|
||||
|
||||
// Nothing we can really do about these checks, sadly.
|
||||
let required_cap = len.checked_add(additional).ok_or(CapacityOverflow)?;
|
||||
|
||||
// This guarantees exponential growth. The doubling cannot overflow
|
||||
// because `cap <= isize::MAX` and the type of `cap` is `usize`.
|
||||
let cap = cmp::max(self.cap.as_inner() * 2, required_cap);
|
||||
let cap = cmp::max(min_non_zero_cap(elem_layout.size()), cap);
|
||||
|
||||
// SAFETY:
|
||||
// - cap >= len + additional
|
||||
// - other preconditions passed to caller
|
||||
let ptr = unsafe { self.finish_grow(cap, elem_layout)? };
|
||||
|
||||
// SAFETY: `finish_grow` would have failed if `cap > isize::MAX`
|
||||
unsafe { self.set_ptr_and_cap(ptr, cap) };
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// # Safety
|
||||
/// - `elem_layout` must be valid for `self`, i.e. it must be the same `elem_layout` used to
|
||||
/// initially construct `self`
|
||||
|
|
@ -729,35 +773,6 @@ impl<A: Allocator> RawVecInner<A> {
|
|||
Ok(())
|
||||
}
|
||||
|
||||
/// # Safety
|
||||
/// - `elem_layout` must be valid for `self`, i.e. it must be the same `elem_layout` used to
|
||||
/// initially construct `self`
|
||||
/// - `elem_layout`'s size must be a multiple of its alignment
|
||||
/// - `cap` must be greater than the current capacity
|
||||
// not marked inline(never) since we want optimizers to be able to observe the specifics of this
|
||||
// function, see tests/codegen-llvm/vec-reserve-extend.rs.
|
||||
#[cold]
|
||||
unsafe fn finish_grow(
|
||||
&self,
|
||||
cap: usize,
|
||||
elem_layout: Layout,
|
||||
) -> Result<NonNull<[u8]>, TryReserveError> {
|
||||
let new_layout = layout_array(cap, elem_layout)?;
|
||||
|
||||
let memory = if let Some((ptr, old_layout)) = unsafe { self.current_memory(elem_layout) } {
|
||||
debug_assert_eq!(old_layout.align(), new_layout.align());
|
||||
unsafe {
|
||||
// The allocator checks for alignment equality
|
||||
hint::assert_unchecked(old_layout.align() == new_layout.align());
|
||||
self.alloc.grow(ptr, old_layout, new_layout)
|
||||
}
|
||||
} else {
|
||||
self.alloc.allocate(new_layout)
|
||||
};
|
||||
|
||||
memory.map_err(|_| AllocError { layout: new_layout, non_exhaustive: () }.into())
|
||||
}
|
||||
|
||||
/// # Safety
|
||||
/// - `elem_layout` must be valid for `self`, i.e. it must be the same `elem_layout` used to
|
||||
/// initially construct `self`
|
||||
|
|
@ -839,7 +854,8 @@ impl<A: Allocator> RawVecInner<A> {
|
|||
#[cfg(not(no_global_oom_handling))]
|
||||
#[cold]
|
||||
#[optimize(size)]
|
||||
fn handle_error(e: TryReserveError) -> ! {
|
||||
#[rustc_const_unstable(feature = "const_heap", issue = "79597")]
|
||||
const fn handle_error(e: TryReserveError) -> ! {
|
||||
match e.kind() {
|
||||
CapacityOverflow => capacity_overflow(),
|
||||
AllocError { layout, .. } => handle_alloc_error(layout),
|
||||
|
|
@ -847,6 +863,11 @@ fn handle_error(e: TryReserveError) -> ! {
|
|||
}
|
||||
|
||||
#[inline]
|
||||
fn layout_array(cap: usize, elem_layout: Layout) -> Result<Layout, TryReserveError> {
|
||||
elem_layout.repeat(cap).map(|(layout, _pad)| layout).map_err(|_| CapacityOverflow.into())
|
||||
#[rustc_const_unstable(feature = "const_heap", issue = "79597")]
|
||||
const fn layout_array(cap: usize, elem_layout: Layout) -> Result<Layout, TryReserveError> {
|
||||
// FIXME(const-hack) return to using `map` and `map_err` once `const_closures` is implemented
|
||||
match elem_layout.repeat(cap) {
|
||||
Ok((layout, _pad)) => Ok(layout),
|
||||
Err(_) => Err(CapacityOverflow.into()),
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -81,7 +81,9 @@ use core::cmp::Ordering;
|
|||
use core::hash::{Hash, Hasher};
|
||||
#[cfg(not(no_global_oom_handling))]
|
||||
use core::iter;
|
||||
use core::marker::PhantomData;
|
||||
#[cfg(not(no_global_oom_handling))]
|
||||
use core::marker::Destruct;
|
||||
use core::marker::{Freeze, PhantomData};
|
||||
use core::mem::{self, Assume, ManuallyDrop, MaybeUninit, SizedTypeProperties, TransmuteFrom};
|
||||
use core::ops::{self, Index, IndexMut, Range, RangeBounds};
|
||||
use core::ptr::{self, NonNull};
|
||||
|
|
@ -519,7 +521,8 @@ impl<T> Vec<T> {
|
|||
#[stable(feature = "rust1", since = "1.0.0")]
|
||||
#[must_use]
|
||||
#[rustc_diagnostic_item = "vec_with_capacity"]
|
||||
pub fn with_capacity(capacity: usize) -> Self {
|
||||
#[rustc_const_unstable(feature = "const_heap", issue = "79597")]
|
||||
pub const fn with_capacity(capacity: usize) -> Self {
|
||||
Self::with_capacity_in(capacity, Global)
|
||||
}
|
||||
|
||||
|
|
@ -881,29 +884,28 @@ impl<T> Vec<T> {
|
|||
// SAFETY: A `Vec` always has a non-null pointer.
|
||||
(unsafe { NonNull::new_unchecked(ptr) }, len, capacity)
|
||||
}
|
||||
|
||||
/// Interns the `Vec<T>`, making the underlying memory read-only. This method should be
|
||||
/// called during compile time. (This is a no-op if called during runtime)
|
||||
///
|
||||
/// This method must be called if the memory used by `Vec` needs to appear in the final
|
||||
/// values of constants.
|
||||
#[unstable(feature = "const_heap", issue = "79597")]
|
||||
#[rustc_const_unstable(feature = "const_heap", issue = "79597")]
|
||||
pub const fn const_make_global(mut self) -> &'static [T]
|
||||
where
|
||||
T: Freeze,
|
||||
{
|
||||
unsafe { core::intrinsics::const_make_global(self.as_mut_ptr().cast()) };
|
||||
let me = ManuallyDrop::new(self);
|
||||
unsafe { slice::from_raw_parts(me.as_ptr(), me.len) }
|
||||
}
|
||||
}
|
||||
|
||||
impl<T, A: Allocator> Vec<T, A> {
|
||||
/// Constructs a new, empty `Vec<T, A>`.
|
||||
///
|
||||
/// The vector will not allocate until elements are pushed onto it.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// #![feature(allocator_api)]
|
||||
///
|
||||
/// use std::alloc::System;
|
||||
///
|
||||
/// # #[allow(unused_mut)]
|
||||
/// let mut vec: Vec<i32, _> = Vec::new_in(System);
|
||||
/// ```
|
||||
#[inline]
|
||||
#[unstable(feature = "allocator_api", issue = "32838")]
|
||||
pub const fn new_in(alloc: A) -> Self {
|
||||
Vec { buf: RawVec::new_in(alloc), len: 0 }
|
||||
}
|
||||
|
||||
#[cfg(not(no_global_oom_handling))]
|
||||
#[rustc_const_unstable(feature = "const_heap", issue = "79597")]
|
||||
#[rustfmt::skip] // FIXME(fee1-dead): temporary measure before rustfmt is bumped
|
||||
const impl<T, A: [const] Allocator + [const] Destruct> Vec<T, A> {
|
||||
/// Constructs a new, empty `Vec<T, A>` with at least the specified capacity
|
||||
/// with the provided allocator.
|
||||
///
|
||||
|
|
@ -959,13 +961,109 @@ impl<T, A: Allocator> Vec<T, A> {
|
|||
/// let vec_units = Vec::<(), System>::with_capacity_in(10, System);
|
||||
/// assert_eq!(vec_units.capacity(), usize::MAX);
|
||||
/// ```
|
||||
#[cfg(not(no_global_oom_handling))]
|
||||
#[inline]
|
||||
#[unstable(feature = "allocator_api", issue = "32838")]
|
||||
pub fn with_capacity_in(capacity: usize, alloc: A) -> Self {
|
||||
Vec { buf: RawVec::with_capacity_in(capacity, alloc), len: 0 }
|
||||
}
|
||||
|
||||
/// Appends an element to the back of a collection.
|
||||
///
|
||||
/// # Panics
|
||||
///
|
||||
/// Panics if the new capacity exceeds `isize::MAX` _bytes_.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// let mut vec = vec![1, 2];
|
||||
/// vec.push(3);
|
||||
/// assert_eq!(vec, [1, 2, 3]);
|
||||
/// ```
|
||||
///
|
||||
/// # Time complexity
|
||||
///
|
||||
/// Takes amortized *O*(1) time. If the vector's length would exceed its
|
||||
/// capacity after the push, *O*(*capacity*) time is taken to copy the
|
||||
/// vector's elements to a larger allocation. This expensive operation is
|
||||
/// offset by the *capacity* *O*(1) insertions it allows.
|
||||
#[inline]
|
||||
#[stable(feature = "rust1", since = "1.0.0")]
|
||||
#[rustc_confusables("push_back", "put", "append")]
|
||||
pub fn push(&mut self, value: T) {
|
||||
let _ = self.push_mut(value);
|
||||
}
|
||||
|
||||
/// Appends an element to the back of a collection, returning a reference to it.
|
||||
///
|
||||
/// # Panics
|
||||
///
|
||||
/// Panics if the new capacity exceeds `isize::MAX` _bytes_.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// #![feature(push_mut)]
|
||||
///
|
||||
///
|
||||
/// let mut vec = vec![1, 2];
|
||||
/// let last = vec.push_mut(3);
|
||||
/// assert_eq!(*last, 3);
|
||||
/// assert_eq!(vec, [1, 2, 3]);
|
||||
///
|
||||
/// let last = vec.push_mut(3);
|
||||
/// *last += 1;
|
||||
/// assert_eq!(vec, [1, 2, 3, 4]);
|
||||
/// ```
|
||||
///
|
||||
/// # Time complexity
|
||||
///
|
||||
/// Takes amortized *O*(1) time. If the vector's length would exceed its
|
||||
/// capacity after the push, *O*(*capacity*) time is taken to copy the
|
||||
/// vector's elements to a larger allocation. This expensive operation is
|
||||
/// offset by the *capacity* *O*(1) insertions it allows.
|
||||
#[inline]
|
||||
#[unstable(feature = "push_mut", issue = "135974")]
|
||||
#[must_use = "if you don't need a reference to the value, use `Vec::push` instead"]
|
||||
pub fn push_mut(&mut self, value: T) -> &mut T {
|
||||
// Inform codegen that the length does not change across grow_one().
|
||||
let len = self.len;
|
||||
// This will panic or abort if we would allocate > isize::MAX bytes
|
||||
// or if the length increment would overflow for zero-sized types.
|
||||
if len == self.buf.capacity() {
|
||||
self.buf.grow_one();
|
||||
}
|
||||
unsafe {
|
||||
let end = self.as_mut_ptr().add(len);
|
||||
ptr::write(end, value);
|
||||
self.len = len + 1;
|
||||
// SAFETY: We just wrote a value to the pointer that will live the lifetime of the reference.
|
||||
&mut *end
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<T, A: Allocator> Vec<T, A> {
|
||||
/// Constructs a new, empty `Vec<T, A>`.
|
||||
///
|
||||
/// The vector will not allocate until elements are pushed onto it.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// #![feature(allocator_api)]
|
||||
///
|
||||
/// use std::alloc::System;
|
||||
///
|
||||
/// # #[allow(unused_mut)]
|
||||
/// let mut vec: Vec<i32, _> = Vec::new_in(System);
|
||||
/// ```
|
||||
#[inline]
|
||||
#[unstable(feature = "allocator_api", issue = "32838")]
|
||||
pub const fn new_in(alloc: A) -> Self {
|
||||
Vec { buf: RawVec::new_in(alloc), len: 0 }
|
||||
}
|
||||
|
||||
/// Constructs a new, empty `Vec<T, A>` with at least the specified capacity
|
||||
/// with the provided allocator.
|
||||
///
|
||||
|
|
@ -2546,34 +2644,6 @@ impl<T, A: Allocator> Vec<T, A> {
|
|||
}
|
||||
}
|
||||
|
||||
/// Appends an element to the back of a collection.
|
||||
///
|
||||
/// # Panics
|
||||
///
|
||||
/// Panics if the new capacity exceeds `isize::MAX` _bytes_.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// let mut vec = vec![1, 2];
|
||||
/// vec.push(3);
|
||||
/// assert_eq!(vec, [1, 2, 3]);
|
||||
/// ```
|
||||
///
|
||||
/// # Time complexity
|
||||
///
|
||||
/// Takes amortized *O*(1) time. If the vector's length would exceed its
|
||||
/// capacity after the push, *O*(*capacity*) time is taken to copy the
|
||||
/// vector's elements to a larger allocation. This expensive operation is
|
||||
/// offset by the *capacity* *O*(1) insertions it allows.
|
||||
#[cfg(not(no_global_oom_handling))]
|
||||
#[inline]
|
||||
#[stable(feature = "rust1", since = "1.0.0")]
|
||||
#[rustc_confusables("push_back", "put", "append")]
|
||||
pub fn push(&mut self, value: T) {
|
||||
let _ = self.push_mut(value);
|
||||
}
|
||||
|
||||
/// Appends an element and returns a reference to it if there is sufficient spare capacity,
|
||||
/// otherwise an error is returned with the element.
|
||||
///
|
||||
|
|
@ -2627,55 +2697,6 @@ impl<T, A: Allocator> Vec<T, A> {
|
|||
}
|
||||
}
|
||||
|
||||
/// Appends an element to the back of a collection, returning a reference to it.
|
||||
///
|
||||
/// # Panics
|
||||
///
|
||||
/// Panics if the new capacity exceeds `isize::MAX` _bytes_.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// #![feature(push_mut)]
|
||||
///
|
||||
///
|
||||
/// let mut vec = vec![1, 2];
|
||||
/// let last = vec.push_mut(3);
|
||||
/// assert_eq!(*last, 3);
|
||||
/// assert_eq!(vec, [1, 2, 3]);
|
||||
///
|
||||
/// let last = vec.push_mut(3);
|
||||
/// *last += 1;
|
||||
/// assert_eq!(vec, [1, 2, 3, 4]);
|
||||
/// ```
|
||||
///
|
||||
/// # Time complexity
|
||||
///
|
||||
/// Takes amortized *O*(1) time. If the vector's length would exceed its
|
||||
/// capacity after the push, *O*(*capacity*) time is taken to copy the
|
||||
/// vector's elements to a larger allocation. This expensive operation is
|
||||
/// offset by the *capacity* *O*(1) insertions it allows.
|
||||
#[cfg(not(no_global_oom_handling))]
|
||||
#[inline]
|
||||
#[unstable(feature = "push_mut", issue = "135974")]
|
||||
#[must_use = "if you don't need a reference to the value, use `Vec::push` instead"]
|
||||
pub fn push_mut(&mut self, value: T) -> &mut T {
|
||||
// Inform codegen that the length does not change across grow_one().
|
||||
let len = self.len;
|
||||
// This will panic or abort if we would allocate > isize::MAX bytes
|
||||
// or if the length increment would overflow for zero-sized types.
|
||||
if len == self.buf.capacity() {
|
||||
self.buf.grow_one();
|
||||
}
|
||||
unsafe {
|
||||
let end = self.as_mut_ptr().add(len);
|
||||
ptr::write(end, value);
|
||||
self.len = len + 1;
|
||||
// SAFETY: We just wrote a value to the pointer that will live the lifetime of the reference.
|
||||
&mut *end
|
||||
}
|
||||
}
|
||||
|
||||
/// Removes the last element from a vector and returns it, or [`None`] if it
|
||||
/// is empty.
|
||||
///
|
||||
|
|
|
|||
|
|
@ -20,6 +20,13 @@
|
|||
#![feature(assert_matches)]
|
||||
#![feature(box_vec_non_null)]
|
||||
#![feature(char_internals)]
|
||||
#![feature(const_alloc_error)]
|
||||
#![feature(const_cmp)]
|
||||
#![feature(const_convert)]
|
||||
#![feature(const_destruct)]
|
||||
#![feature(const_heap)]
|
||||
#![feature(const_option_ops)]
|
||||
#![feature(const_try)]
|
||||
#![feature(copied_into_inner)]
|
||||
#![feature(core_intrinsics)]
|
||||
#![feature(exact_size_is_empty)]
|
||||
|
|
|
|||
|
|
@ -1,5 +1,6 @@
|
|||
#![feature(allocator_api)]
|
||||
#![feature(alloc_layout_extra)]
|
||||
#![feature(const_heap)]
|
||||
#![feature(deque_extend_front)]
|
||||
#![feature(iter_array_chunks)]
|
||||
#![feature(assert_matches)]
|
||||
|
|
|
|||
|
|
@ -2749,3 +2749,19 @@ fn zst_collections_iter_nth_back_regression() {
|
|||
list.push_back(Thing);
|
||||
let _ = list.into_iter().nth_back(1);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn const_heap() {
|
||||
const X: &'static [u32] = {
|
||||
let mut v = Vec::with_capacity(6);
|
||||
let mut x = 1;
|
||||
while x < 42 {
|
||||
v.push(x);
|
||||
x *= 2;
|
||||
}
|
||||
assert!(v.len() == 6);
|
||||
v.const_make_global()
|
||||
};
|
||||
|
||||
assert_eq!([1, 2, 4, 8, 16, 32], X);
|
||||
}
|
||||
|
|
|
|||
|
|
@ -102,7 +102,8 @@ impl fmt::Display for AllocError {
|
|||
///
|
||||
/// [*currently allocated*]: #currently-allocated-memory
|
||||
#[unstable(feature = "allocator_api", issue = "32838")]
|
||||
pub unsafe trait Allocator {
|
||||
#[rustc_const_unstable(feature = "const_heap", issue = "79597")]
|
||||
pub const unsafe trait Allocator {
|
||||
/// Attempts to allocate a block of memory.
|
||||
///
|
||||
/// On success, returns a [`NonNull<[u8]>`][NonNull] meeting the size and alignment guarantees of `layout`.
|
||||
|
|
@ -368,9 +369,10 @@ pub unsafe trait Allocator {
|
|||
}
|
||||
|
||||
#[unstable(feature = "allocator_api", issue = "32838")]
|
||||
unsafe impl<A> Allocator for &A
|
||||
#[rustc_const_unstable(feature = "const_heap", issue = "79597")]
|
||||
unsafe impl<A> const Allocator for &A
|
||||
where
|
||||
A: Allocator + ?Sized,
|
||||
A: [const] Allocator + ?Sized,
|
||||
{
|
||||
#[inline]
|
||||
fn allocate(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
|
||||
|
|
|
|||
|
|
@ -44,7 +44,7 @@ pub fn foo2() -> Box<dyn TestTrait2> {
|
|||
//~ MONO_ITEM fn <TestStruct2 as TestTrait2>::test_func2
|
||||
//~ MONO_ITEM fn alloc::alloc::exchange_malloc
|
||||
//~ MONO_ITEM fn foo2
|
||||
//~ MONO_ITEM fn std::alloc::Global::alloc_impl
|
||||
//~ MONO_ITEM fn std::alloc::Global::alloc_impl_runtime
|
||||
//~ MONO_ITEM fn std::boxed::Box::<TestStruct2>::new
|
||||
//~ MONO_ITEM fn std::alloc::Layout::from_size_align_unchecked::precondition_check
|
||||
//~ MONO_ITEM fn std::ptr::Alignment::new_unchecked::precondition_check
|
||||
|
|
|
|||
|
|
@ -26,12 +26,12 @@
|
|||
scope 4 {
|
||||
debug _x => _8;
|
||||
}
|
||||
scope 18 (inlined foo) {
|
||||
scope 19 (inlined foo) {
|
||||
let mut _27: *const [()];
|
||||
}
|
||||
}
|
||||
scope 16 (inlined slice_from_raw_parts::<()>) {
|
||||
scope 17 (inlined std::ptr::from_raw_parts::<[()], ()>) {
|
||||
scope 17 (inlined slice_from_raw_parts::<()>) {
|
||||
scope 18 (inlined std::ptr::from_raw_parts::<[()], ()>) {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -49,19 +49,21 @@
|
|||
scope 7 {
|
||||
let _21: std::ptr::NonNull<[u8]>;
|
||||
scope 8 {
|
||||
scope 11 (inlined NonNull::<[u8]>::as_mut_ptr) {
|
||||
scope 12 (inlined NonNull::<[u8]>::as_non_null_ptr) {
|
||||
scope 13 (inlined NonNull::<[u8]>::cast::<u8>) {
|
||||
scope 12 (inlined NonNull::<[u8]>::as_mut_ptr) {
|
||||
scope 13 (inlined NonNull::<[u8]>::as_non_null_ptr) {
|
||||
scope 14 (inlined NonNull::<[u8]>::cast::<u8>) {
|
||||
let mut _25: *mut [u8];
|
||||
scope 14 (inlined NonNull::<[u8]>::as_ptr) {
|
||||
scope 15 (inlined NonNull::<[u8]>::as_ptr) {
|
||||
}
|
||||
}
|
||||
}
|
||||
scope 15 (inlined NonNull::<u8>::as_ptr) {
|
||||
scope 16 (inlined NonNull::<u8>::as_ptr) {
|
||||
}
|
||||
}
|
||||
}
|
||||
scope 10 (inlined <std::alloc::Global as Allocator>::allocate) {
|
||||
scope 11 (inlined std::alloc::Global::alloc_impl) {
|
||||
}
|
||||
}
|
||||
}
|
||||
scope 9 (inlined #[track_caller] Layout::from_size_align_unchecked) {
|
||||
|
|
@ -192,8 +194,8 @@
|
|||
+ _18 = const Layout {{ size: 0_usize, align: std::ptr::Alignment(std::ptr::alignment::AlignmentEnum::_Align1Shl0) }};
|
||||
StorageDead(_24);
|
||||
StorageLive(_19);
|
||||
- _19 = std::alloc::Global::alloc_impl(const alloc::alloc::exchange_malloc::promoted[0], copy _18, const false) -> [return: bb7, unwind unreachable];
|
||||
+ _19 = std::alloc::Global::alloc_impl(const alloc::alloc::exchange_malloc::promoted[0], const Layout {{ size: 0_usize, align: std::ptr::Alignment(std::ptr::alignment::AlignmentEnum::_Align1Shl0) }}, const false) -> [return: bb7, unwind unreachable];
|
||||
- _19 = std::alloc::Global::alloc_impl_runtime(copy _18, const false) -> [return: bb7, unwind unreachable];
|
||||
+ _19 = std::alloc::Global::alloc_impl_runtime(const Layout {{ size: 0_usize, align: std::ptr::Alignment(std::ptr::alignment::AlignmentEnum::_Align1Shl0) }}, const false) -> [return: bb7, unwind unreachable];
|
||||
}
|
||||
|
||||
bb7: {
|
||||
|
|
|
|||
|
|
@ -25,17 +25,21 @@ fn generic_in_place(_1: *mut Box<[T]>) -> () {
|
|||
}
|
||||
}
|
||||
scope 18 (inlined <std::alloc::Global as Allocator>::deallocate) {
|
||||
let mut _9: *mut u8;
|
||||
scope 19 (inlined Layout::size) {
|
||||
}
|
||||
scope 20 (inlined NonNull::<u8>::as_ptr) {
|
||||
}
|
||||
scope 21 (inlined std::alloc::dealloc) {
|
||||
let mut _10: usize;
|
||||
scope 22 (inlined Layout::size) {
|
||||
}
|
||||
scope 23 (inlined Layout::align) {
|
||||
scope 24 (inlined std::ptr::Alignment::as_usize) {
|
||||
scope 19 (inlined std::alloc::Global::deallocate_impl) {
|
||||
scope 20 (inlined std::alloc::Global::deallocate_impl_runtime) {
|
||||
let mut _9: *mut u8;
|
||||
scope 21 (inlined Layout::size) {
|
||||
}
|
||||
scope 22 (inlined NonNull::<u8>::as_ptr) {
|
||||
}
|
||||
scope 23 (inlined std::alloc::dealloc) {
|
||||
let mut _10: usize;
|
||||
scope 24 (inlined Layout::size) {
|
||||
}
|
||||
scope 25 (inlined Layout::align) {
|
||||
scope 26 (inlined std::ptr::Alignment::as_usize) {
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -25,17 +25,21 @@ fn generic_in_place(_1: *mut Box<[T]>) -> () {
|
|||
}
|
||||
}
|
||||
scope 18 (inlined <std::alloc::Global as Allocator>::deallocate) {
|
||||
let mut _9: *mut u8;
|
||||
scope 19 (inlined Layout::size) {
|
||||
}
|
||||
scope 20 (inlined NonNull::<u8>::as_ptr) {
|
||||
}
|
||||
scope 21 (inlined std::alloc::dealloc) {
|
||||
let mut _10: usize;
|
||||
scope 22 (inlined Layout::size) {
|
||||
}
|
||||
scope 23 (inlined Layout::align) {
|
||||
scope 24 (inlined std::ptr::Alignment::as_usize) {
|
||||
scope 19 (inlined std::alloc::Global::deallocate_impl) {
|
||||
scope 20 (inlined std::alloc::Global::deallocate_impl_runtime) {
|
||||
let mut _9: *mut u8;
|
||||
scope 21 (inlined Layout::size) {
|
||||
}
|
||||
scope 22 (inlined NonNull::<u8>::as_ptr) {
|
||||
}
|
||||
scope 23 (inlined std::alloc::dealloc) {
|
||||
let mut _10: usize;
|
||||
scope 24 (inlined Layout::size) {
|
||||
}
|
||||
scope 25 (inlined Layout::align) {
|
||||
scope 26 (inlined std::ptr::Alignment::as_usize) {
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -25,17 +25,21 @@ fn generic_in_place(_1: *mut Box<[T]>) -> () {
|
|||
}
|
||||
}
|
||||
scope 18 (inlined <std::alloc::Global as Allocator>::deallocate) {
|
||||
let mut _9: *mut u8;
|
||||
scope 19 (inlined Layout::size) {
|
||||
}
|
||||
scope 20 (inlined NonNull::<u8>::as_ptr) {
|
||||
}
|
||||
scope 21 (inlined std::alloc::dealloc) {
|
||||
let mut _10: usize;
|
||||
scope 22 (inlined Layout::size) {
|
||||
}
|
||||
scope 23 (inlined Layout::align) {
|
||||
scope 24 (inlined std::ptr::Alignment::as_usize) {
|
||||
scope 19 (inlined std::alloc::Global::deallocate_impl) {
|
||||
scope 20 (inlined std::alloc::Global::deallocate_impl_runtime) {
|
||||
let mut _9: *mut u8;
|
||||
scope 21 (inlined Layout::size) {
|
||||
}
|
||||
scope 22 (inlined NonNull::<u8>::as_ptr) {
|
||||
}
|
||||
scope 23 (inlined std::alloc::dealloc) {
|
||||
let mut _10: usize;
|
||||
scope 24 (inlined Layout::size) {
|
||||
}
|
||||
scope 25 (inlined Layout::align) {
|
||||
scope 26 (inlined std::ptr::Alignment::as_usize) {
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -9,33 +9,33 @@
|
|||
let mut _4: *mut [u8];
|
||||
let mut _5: std::ptr::NonNull<[u8]>;
|
||||
let mut _6: std::result::Result<std::ptr::NonNull<[u8]>, std::alloc::AllocError>;
|
||||
let mut _7: &std::alloc::Global;
|
||||
let mut _8: std::alloc::Layout;
|
||||
let mut _7: std::alloc::Layout;
|
||||
scope 1 {
|
||||
debug layout => _1;
|
||||
let mut _9: &std::alloc::Global;
|
||||
scope 2 {
|
||||
debug ptr => _3;
|
||||
}
|
||||
scope 5 (inlined <std::alloc::Global as Allocator>::allocate) {
|
||||
}
|
||||
scope 6 (inlined #[track_caller] Result::<NonNull<[u8]>, std::alloc::AllocError>::unwrap) {
|
||||
let mut _12: isize;
|
||||
let _13: std::alloc::AllocError;
|
||||
let mut _14: !;
|
||||
let mut _15: &dyn std::fmt::Debug;
|
||||
let _16: &std::alloc::AllocError;
|
||||
scope 7 {
|
||||
scope 6 (inlined std::alloc::Global::alloc_impl) {
|
||||
}
|
||||
}
|
||||
scope 7 (inlined #[track_caller] Result::<NonNull<[u8]>, std::alloc::AllocError>::unwrap) {
|
||||
let mut _10: isize;
|
||||
let _11: std::alloc::AllocError;
|
||||
let mut _12: !;
|
||||
let mut _13: &dyn std::fmt::Debug;
|
||||
let _14: &std::alloc::AllocError;
|
||||
scope 8 {
|
||||
}
|
||||
scope 9 {
|
||||
}
|
||||
}
|
||||
scope 9 (inlined NonNull::<[u8]>::as_ptr) {
|
||||
scope 10 (inlined NonNull::<[u8]>::as_ptr) {
|
||||
}
|
||||
}
|
||||
scope 3 (inlined #[track_caller] Option::<Layout>::unwrap) {
|
||||
let mut _10: isize;
|
||||
let mut _11: !;
|
||||
let mut _8: isize;
|
||||
let mut _9: !;
|
||||
scope 4 {
|
||||
}
|
||||
}
|
||||
|
|
@ -46,10 +46,10 @@
|
|||
StorageLive(_2);
|
||||
- _2 = Option::<Layout>::None;
|
||||
+ _2 = const Option::<Layout>::None;
|
||||
StorageLive(_10);
|
||||
- _10 = discriminant(_2);
|
||||
- switchInt(move _10) -> [0: bb2, 1: bb3, otherwise: bb1];
|
||||
+ _10 = const 0_isize;
|
||||
StorageLive(_8);
|
||||
- _8 = discriminant(_2);
|
||||
- switchInt(move _8) -> [0: bb2, 1: bb3, otherwise: bb1];
|
||||
+ _8 = const 0_isize;
|
||||
+ switchInt(const 0_isize) -> [0: bb2, 1: bb3, otherwise: bb1];
|
||||
}
|
||||
|
||||
|
|
@ -58,48 +58,44 @@
|
|||
}
|
||||
|
||||
bb2: {
|
||||
_11 = option::unwrap_failed() -> unwind unreachable;
|
||||
_9 = option::unwrap_failed() -> unwind unreachable;
|
||||
}
|
||||
|
||||
bb3: {
|
||||
- _1 = move ((_2 as Some).0: std::alloc::Layout);
|
||||
+ _1 = const Layout {{ size: Indirect { alloc_id: ALLOC0, offset: Size(4 bytes) }: usize, align: std::ptr::Alignment(Scalar(0x00000000): std::ptr::alignment::AlignmentEnum) }};
|
||||
StorageDead(_10);
|
||||
StorageDead(_8);
|
||||
StorageDead(_2);
|
||||
StorageLive(_3);
|
||||
StorageLive(_4);
|
||||
StorageLive(_5);
|
||||
StorageLive(_6);
|
||||
StorageLive(_7);
|
||||
_9 = const main::promoted[0];
|
||||
_7 = copy _9;
|
||||
StorageLive(_8);
|
||||
- _8 = copy _1;
|
||||
- _6 = std::alloc::Global::alloc_impl(move _7, move _8, const false) -> [return: bb4, unwind unreachable];
|
||||
+ _8 = const Layout {{ size: Indirect { alloc_id: ALLOC0, offset: Size(4 bytes) }: usize, align: std::ptr::Alignment(Scalar(0x00000000): std::ptr::alignment::AlignmentEnum) }};
|
||||
+ _6 = std::alloc::Global::alloc_impl(copy _9, const Layout {{ size: Indirect { alloc_id: ALLOC0, offset: Size(4 bytes) }: usize, align: std::ptr::Alignment(Scalar(0x00000000): std::ptr::alignment::AlignmentEnum) }}, const false) -> [return: bb4, unwind unreachable];
|
||||
- _7 = copy _1;
|
||||
- _6 = std::alloc::Global::alloc_impl_runtime(move _7, const false) -> [return: bb4, unwind unreachable];
|
||||
+ _7 = const Layout {{ size: Indirect { alloc_id: ALLOC0, offset: Size(4 bytes) }: usize, align: std::ptr::Alignment(Scalar(0x00000000): std::ptr::alignment::AlignmentEnum) }};
|
||||
+ _6 = std::alloc::Global::alloc_impl_runtime(const Layout {{ size: Indirect { alloc_id: ALLOC0, offset: Size(4 bytes) }: usize, align: std::ptr::Alignment(Scalar(0x00000000): std::ptr::alignment::AlignmentEnum) }}, const false) -> [return: bb4, unwind unreachable];
|
||||
}
|
||||
|
||||
bb4: {
|
||||
StorageDead(_8);
|
||||
StorageDead(_7);
|
||||
StorageLive(_12);
|
||||
StorageLive(_16);
|
||||
_12 = discriminant(_6);
|
||||
switchInt(move _12) -> [0: bb6, 1: bb5, otherwise: bb1];
|
||||
StorageLive(_10);
|
||||
StorageLive(_14);
|
||||
_10 = discriminant(_6);
|
||||
switchInt(move _10) -> [0: bb6, 1: bb5, otherwise: bb1];
|
||||
}
|
||||
|
||||
bb5: {
|
||||
StorageLive(_15);
|
||||
_16 = &_13;
|
||||
_15 = copy _16 as &dyn std::fmt::Debug (PointerCoercion(Unsize, Implicit));
|
||||
_14 = result::unwrap_failed(const "called `Result::unwrap()` on an `Err` value", move _15) -> unwind unreachable;
|
||||
StorageLive(_13);
|
||||
_14 = &_11;
|
||||
_13 = copy _14 as &dyn std::fmt::Debug (PointerCoercion(Unsize, Implicit));
|
||||
_12 = result::unwrap_failed(const "called `Result::unwrap()` on an `Err` value", move _13) -> unwind unreachable;
|
||||
}
|
||||
|
||||
bb6: {
|
||||
_5 = move ((_6 as Ok).0: std::ptr::NonNull<[u8]>);
|
||||
StorageDead(_16);
|
||||
StorageDead(_12);
|
||||
StorageDead(_14);
|
||||
StorageDead(_10);
|
||||
StorageDead(_6);
|
||||
_4 = copy _5 as *mut [u8] (Transmute);
|
||||
StorageDead(_5);
|
||||
|
|
|
|||
|
|
@ -9,22 +9,22 @@
|
|||
let mut _4: *mut [u8];
|
||||
let mut _5: std::ptr::NonNull<[u8]>;
|
||||
let mut _6: std::result::Result<std::ptr::NonNull<[u8]>, std::alloc::AllocError>;
|
||||
let mut _7: &std::alloc::Global;
|
||||
let mut _8: std::alloc::Layout;
|
||||
let mut _7: std::alloc::Layout;
|
||||
scope 1 {
|
||||
debug layout => _1;
|
||||
let mut _9: &std::alloc::Global;
|
||||
scope 2 {
|
||||
debug ptr => _3;
|
||||
}
|
||||
scope 5 (inlined <std::alloc::Global as Allocator>::allocate) {
|
||||
scope 6 (inlined std::alloc::Global::alloc_impl) {
|
||||
}
|
||||
}
|
||||
scope 6 (inlined NonNull::<[u8]>::as_ptr) {
|
||||
scope 7 (inlined NonNull::<[u8]>::as_ptr) {
|
||||
}
|
||||
}
|
||||
scope 3 (inlined #[track_caller] Option::<Layout>::unwrap) {
|
||||
let mut _10: isize;
|
||||
let mut _11: !;
|
||||
let mut _8: isize;
|
||||
let mut _9: !;
|
||||
scope 4 {
|
||||
}
|
||||
}
|
||||
|
|
@ -35,10 +35,10 @@
|
|||
StorageLive(_2);
|
||||
- _2 = Option::<Layout>::None;
|
||||
+ _2 = const Option::<Layout>::None;
|
||||
StorageLive(_10);
|
||||
- _10 = discriminant(_2);
|
||||
- switchInt(move _10) -> [0: bb3, 1: bb4, otherwise: bb2];
|
||||
+ _10 = const 0_isize;
|
||||
StorageLive(_8);
|
||||
- _8 = discriminant(_2);
|
||||
- switchInt(move _8) -> [0: bb3, 1: bb4, otherwise: bb2];
|
||||
+ _8 = const 0_isize;
|
||||
+ switchInt(const 0_isize) -> [0: bb3, 1: bb4, otherwise: bb2];
|
||||
}
|
||||
|
||||
|
|
@ -59,30 +59,26 @@
|
|||
}
|
||||
|
||||
bb3: {
|
||||
_11 = option::unwrap_failed() -> unwind continue;
|
||||
_9 = option::unwrap_failed() -> unwind continue;
|
||||
}
|
||||
|
||||
bb4: {
|
||||
- _1 = move ((_2 as Some).0: std::alloc::Layout);
|
||||
+ _1 = const Layout {{ size: Indirect { alloc_id: ALLOC0, offset: Size(4 bytes) }: usize, align: std::ptr::Alignment(Scalar(0x00000000): std::ptr::alignment::AlignmentEnum) }};
|
||||
StorageDead(_10);
|
||||
StorageDead(_8);
|
||||
StorageDead(_2);
|
||||
StorageLive(_3);
|
||||
StorageLive(_4);
|
||||
StorageLive(_5);
|
||||
StorageLive(_6);
|
||||
StorageLive(_7);
|
||||
_9 = const main::promoted[0];
|
||||
_7 = copy _9;
|
||||
StorageLive(_8);
|
||||
- _8 = copy _1;
|
||||
- _6 = std::alloc::Global::alloc_impl(move _7, move _8, const false) -> [return: bb5, unwind continue];
|
||||
+ _8 = const Layout {{ size: Indirect { alloc_id: ALLOC0, offset: Size(4 bytes) }: usize, align: std::ptr::Alignment(Scalar(0x00000000): std::ptr::alignment::AlignmentEnum) }};
|
||||
+ _6 = std::alloc::Global::alloc_impl(copy _9, const Layout {{ size: Indirect { alloc_id: ALLOC0, offset: Size(4 bytes) }: usize, align: std::ptr::Alignment(Scalar(0x00000000): std::ptr::alignment::AlignmentEnum) }}, const false) -> [return: bb5, unwind continue];
|
||||
- _7 = copy _1;
|
||||
- _6 = std::alloc::Global::alloc_impl_runtime(move _7, const false) -> [return: bb5, unwind continue];
|
||||
+ _7 = const Layout {{ size: Indirect { alloc_id: ALLOC0, offset: Size(4 bytes) }: usize, align: std::ptr::Alignment(Scalar(0x00000000): std::ptr::alignment::AlignmentEnum) }};
|
||||
+ _6 = std::alloc::Global::alloc_impl_runtime(const Layout {{ size: Indirect { alloc_id: ALLOC0, offset: Size(4 bytes) }: usize, align: std::ptr::Alignment(Scalar(0x00000000): std::ptr::alignment::AlignmentEnum) }}, const false) -> [return: bb5, unwind continue];
|
||||
}
|
||||
|
||||
bb5: {
|
||||
StorageDead(_8);
|
||||
StorageDead(_7);
|
||||
_5 = Result::<NonNull<[u8]>, std::alloc::AllocError>::unwrap(move _6) -> [return: bb1, unwind continue];
|
||||
}
|
||||
|
|
|
|||
|
|
@ -9,22 +9,22 @@
|
|||
let mut _4: *mut [u8];
|
||||
let mut _5: std::ptr::NonNull<[u8]>;
|
||||
let mut _6: std::result::Result<std::ptr::NonNull<[u8]>, std::alloc::AllocError>;
|
||||
let mut _7: &std::alloc::Global;
|
||||
let mut _8: std::alloc::Layout;
|
||||
let mut _7: std::alloc::Layout;
|
||||
scope 1 {
|
||||
debug layout => _1;
|
||||
let mut _9: &std::alloc::Global;
|
||||
scope 2 {
|
||||
debug ptr => _3;
|
||||
}
|
||||
scope 5 (inlined <std::alloc::Global as Allocator>::allocate) {
|
||||
scope 6 (inlined std::alloc::Global::alloc_impl) {
|
||||
}
|
||||
}
|
||||
scope 6 (inlined NonNull::<[u8]>::as_ptr) {
|
||||
scope 7 (inlined NonNull::<[u8]>::as_ptr) {
|
||||
}
|
||||
}
|
||||
scope 3 (inlined #[track_caller] Option::<Layout>::unwrap) {
|
||||
let mut _10: isize;
|
||||
let mut _11: !;
|
||||
let mut _8: isize;
|
||||
let mut _9: !;
|
||||
scope 4 {
|
||||
}
|
||||
}
|
||||
|
|
@ -35,10 +35,10 @@
|
|||
StorageLive(_2);
|
||||
- _2 = Option::<Layout>::None;
|
||||
+ _2 = const Option::<Layout>::None;
|
||||
StorageLive(_10);
|
||||
- _10 = discriminant(_2);
|
||||
- switchInt(move _10) -> [0: bb3, 1: bb4, otherwise: bb2];
|
||||
+ _10 = const 0_isize;
|
||||
StorageLive(_8);
|
||||
- _8 = discriminant(_2);
|
||||
- switchInt(move _8) -> [0: bb3, 1: bb4, otherwise: bb2];
|
||||
+ _8 = const 0_isize;
|
||||
+ switchInt(const 0_isize) -> [0: bb3, 1: bb4, otherwise: bb2];
|
||||
}
|
||||
|
||||
|
|
@ -59,30 +59,26 @@
|
|||
}
|
||||
|
||||
bb3: {
|
||||
_11 = option::unwrap_failed() -> unwind continue;
|
||||
_9 = option::unwrap_failed() -> unwind continue;
|
||||
}
|
||||
|
||||
bb4: {
|
||||
- _1 = move ((_2 as Some).0: std::alloc::Layout);
|
||||
+ _1 = const Layout {{ size: Indirect { alloc_id: ALLOC0, offset: Size(8 bytes) }: usize, align: std::ptr::Alignment(Scalar(0x0000000000000000): std::ptr::alignment::AlignmentEnum) }};
|
||||
StorageDead(_10);
|
||||
StorageDead(_8);
|
||||
StorageDead(_2);
|
||||
StorageLive(_3);
|
||||
StorageLive(_4);
|
||||
StorageLive(_5);
|
||||
StorageLive(_6);
|
||||
StorageLive(_7);
|
||||
_9 = const main::promoted[0];
|
||||
_7 = copy _9;
|
||||
StorageLive(_8);
|
||||
- _8 = copy _1;
|
||||
- _6 = std::alloc::Global::alloc_impl(move _7, move _8, const false) -> [return: bb5, unwind continue];
|
||||
+ _8 = const Layout {{ size: Indirect { alloc_id: ALLOC0, offset: Size(8 bytes) }: usize, align: std::ptr::Alignment(Scalar(0x0000000000000000): std::ptr::alignment::AlignmentEnum) }};
|
||||
+ _6 = std::alloc::Global::alloc_impl(copy _9, const Layout {{ size: Indirect { alloc_id: ALLOC0, offset: Size(8 bytes) }: usize, align: std::ptr::Alignment(Scalar(0x0000000000000000): std::ptr::alignment::AlignmentEnum) }}, const false) -> [return: bb5, unwind continue];
|
||||
- _7 = copy _1;
|
||||
- _6 = std::alloc::Global::alloc_impl_runtime(move _7, const false) -> [return: bb5, unwind continue];
|
||||
+ _7 = const Layout {{ size: Indirect { alloc_id: ALLOC0, offset: Size(8 bytes) }: usize, align: std::ptr::Alignment(Scalar(0x0000000000000000): std::ptr::alignment::AlignmentEnum) }};
|
||||
+ _6 = std::alloc::Global::alloc_impl_runtime(const Layout {{ size: Indirect { alloc_id: ALLOC0, offset: Size(8 bytes) }: usize, align: std::ptr::Alignment(Scalar(0x0000000000000000): std::ptr::alignment::AlignmentEnum) }}, const false) -> [return: bb5, unwind continue];
|
||||
}
|
||||
|
||||
bb5: {
|
||||
StorageDead(_8);
|
||||
StorageDead(_7);
|
||||
_5 = Result::<NonNull<[u8]>, std::alloc::AllocError>::unwrap(move _6) -> [return: bb1, unwind continue];
|
||||
}
|
||||
|
|
|
|||
5
tests/ui/consts/const-eval/heap/vec-not-made-global.rs
Normal file
5
tests/ui/consts/const-eval/heap/vec-not-made-global.rs
Normal file
|
|
@ -0,0 +1,5 @@
|
|||
#![feature(const_heap)]
|
||||
const V: Vec<i32> = Vec::with_capacity(1);
|
||||
//~^ ERROR: encountered `const_allocate` pointer in final value that was not made global
|
||||
|
||||
fn main() {}
|
||||
10
tests/ui/consts/const-eval/heap/vec-not-made-global.stderr
Normal file
10
tests/ui/consts/const-eval/heap/vec-not-made-global.stderr
Normal file
|
|
@ -0,0 +1,10 @@
|
|||
error: encountered `const_allocate` pointer in final value that was not made global
|
||||
--> $DIR/vec-not-made-global.rs:2:1
|
||||
|
|
||||
LL | const V: Vec<i32> = Vec::with_capacity(1);
|
||||
| ^^^^^^^^^^^^^^^^^
|
||||
|
|
||||
= note: use `const_make_global` to turn allocated pointers into immutable globals before returning
|
||||
|
||||
error: aborting due to 1 previous error
|
||||
|
||||
Loading…
Add table
Add a link
Reference in a new issue