Auto merge of #147893 - fee1-dead-contrib:constheapheapheap, r=oli-obk

`Vec::push` in consts MVP

Example:

```rust
const X: &'static [u32] = {
    let mut v = Vec::with_capacity(6);
    let mut x = 1;
    while x < 42 {
        v.push(x);
        x *= 2;
    }
    assert!(v.len() == 6);
    v.const_make_global()
};

assert_eq!([1, 2, 4, 8, 16, 32], X);
```

Oh this is fun...

* We split out the implementation of `Global` such that it calls `intrinsics::const_allocate` and `intrinsics::const_deallocate` during compile time. This is achieved using `const_eval_select`
* This allows us to `impl const Allocator for Global`
* We then constify everything necessary for `Vec::with_capacity` and `Vec::push`.
* Added `Vec::const_make_global` to leak and intern the final value via `intrinsics::const_make_global`. If we see any pointer in the final value of a `const` that did not call `const_make_global`, we error as implemented in rust-lang/rust#143595.

r? `@rust-lang/wg-const-eval`

To-do for me:
* [x] Assess the rustdoc impact of additional bounds in the method
* [x] ~~Increase test coverage~~ I think this is enough for an unstable feature.
This commit is contained in:
bors 2026-01-06 11:39:17 +00:00
commit 74fd7516da
19 changed files with 669 additions and 421 deletions

View file

@ -5,8 +5,8 @@
#[stable(feature = "alloc_module", since = "1.28.0")]
#[doc(inline)]
pub use core::alloc::*;
use core::hint;
use core::ptr::{self, NonNull};
use core::{cmp, hint};
unsafe extern "Rust" {
// These are the magic symbols to call the global allocator. rustc generates
@ -182,7 +182,7 @@ pub unsafe fn alloc_zeroed(layout: Layout) -> *mut u8 {
impl Global {
#[inline]
#[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
fn alloc_impl(&self, layout: Layout, zeroed: bool) -> Result<NonNull<[u8]>, AllocError> {
fn alloc_impl_runtime(layout: Layout, zeroed: bool) -> Result<NonNull<[u8]>, AllocError> {
match layout.size() {
0 => Ok(NonNull::slice_from_raw_parts(layout.dangling(), 0)),
// SAFETY: `layout` is non-zero in size,
@ -194,10 +194,26 @@ impl Global {
}
}
#[inline]
#[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
fn deallocate_impl_runtime(ptr: NonNull<u8>, layout: Layout) {
if layout.size() != 0 {
// SAFETY:
// * We have checked that `layout` is non-zero in size.
// * The caller is obligated to provide a layout that "fits", and in this case,
// "fit" always means a layout that is equal to the original, because our
// `allocate()`, `grow()`, and `shrink()` implementations never returns a larger
// allocation than requested.
// * Other conditions must be upheld by the caller, as per `Allocator::deallocate()`'s
// safety documentation.
unsafe { dealloc(ptr.as_ptr(), layout) }
}
}
// SAFETY: Same as `Allocator::grow`
#[inline]
#[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
unsafe fn grow_impl(
fn grow_impl_runtime(
&self,
ptr: NonNull<u8>,
old_layout: Layout,
@ -241,69 +257,16 @@ impl Global {
},
}
}
}
#[unstable(feature = "allocator_api", issue = "32838")]
unsafe impl Allocator for Global {
// SAFETY: Same as `Allocator::grow`
#[inline]
#[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
fn allocate(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
self.alloc_impl(layout, false)
}
#[inline]
#[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
fn allocate_zeroed(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
self.alloc_impl(layout, true)
}
#[inline]
#[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
unsafe fn deallocate(&self, ptr: NonNull<u8>, layout: Layout) {
if layout.size() != 0 {
// SAFETY:
// * We have checked that `layout` is non-zero in size.
// * The caller is obligated to provide a layout that "fits", and in this case,
// "fit" always means a layout that is equal to the original, because our
// `allocate()`, `grow()`, and `shrink()` implementations never returns a larger
// allocation than requested.
// * Other conditions must be upheld by the caller, as per `Allocator::deallocate()`'s
// safety documentation.
unsafe { dealloc(ptr.as_ptr(), layout) }
}
}
#[inline]
#[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
unsafe fn grow(
&self,
ptr: NonNull<u8>,
old_layout: Layout,
new_layout: Layout,
) -> Result<NonNull<[u8]>, AllocError> {
// SAFETY: all conditions must be upheld by the caller
unsafe { self.grow_impl(ptr, old_layout, new_layout, false) }
}
#[inline]
#[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
unsafe fn grow_zeroed(
&self,
ptr: NonNull<u8>,
old_layout: Layout,
new_layout: Layout,
) -> Result<NonNull<[u8]>, AllocError> {
// SAFETY: all conditions must be upheld by the caller
unsafe { self.grow_impl(ptr, old_layout, new_layout, true) }
}
#[inline]
#[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
unsafe fn shrink(
fn shrink_impl_runtime(
&self,
ptr: NonNull<u8>,
old_layout: Layout,
new_layout: Layout,
_zeroed: bool,
) -> Result<NonNull<[u8]>, AllocError> {
debug_assert!(
new_layout.size() <= old_layout.size(),
@ -340,6 +303,180 @@ unsafe impl Allocator for Global {
},
}
}
// SAFETY: Same as `Allocator::allocate`
#[inline]
#[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
#[rustc_const_unstable(feature = "const_heap", issue = "79597")]
const fn alloc_impl(&self, layout: Layout, zeroed: bool) -> Result<NonNull<[u8]>, AllocError> {
core::intrinsics::const_eval_select(
(layout, zeroed),
Global::alloc_impl_const,
Global::alloc_impl_runtime,
)
}
// SAFETY: Same as `Allocator::deallocate`
#[inline]
#[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
#[rustc_const_unstable(feature = "const_heap", issue = "79597")]
const unsafe fn deallocate_impl(&self, ptr: NonNull<u8>, layout: Layout) {
core::intrinsics::const_eval_select(
(ptr, layout),
Global::deallocate_impl_const,
Global::deallocate_impl_runtime,
)
}
// SAFETY: Same as `Allocator::grow`
#[inline]
#[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
#[rustc_const_unstable(feature = "const_heap", issue = "79597")]
const unsafe fn grow_impl(
&self,
ptr: NonNull<u8>,
old_layout: Layout,
new_layout: Layout,
zeroed: bool,
) -> Result<NonNull<[u8]>, AllocError> {
core::intrinsics::const_eval_select(
(self, ptr, old_layout, new_layout, zeroed),
Global::grow_shrink_impl_const,
Global::grow_impl_runtime,
)
}
// SAFETY: Same as `Allocator::shrink`
#[inline]
#[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
#[rustc_const_unstable(feature = "const_heap", issue = "79597")]
const unsafe fn shrink_impl(
&self,
ptr: NonNull<u8>,
old_layout: Layout,
new_layout: Layout,
) -> Result<NonNull<[u8]>, AllocError> {
core::intrinsics::const_eval_select(
(self, ptr, old_layout, new_layout, false),
Global::grow_shrink_impl_const,
Global::shrink_impl_runtime,
)
}
#[inline]
#[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
#[rustc_const_unstable(feature = "const_heap", issue = "79597")]
const fn alloc_impl_const(layout: Layout, zeroed: bool) -> Result<NonNull<[u8]>, AllocError> {
match layout.size() {
0 => Ok(NonNull::slice_from_raw_parts(layout.dangling(), 0)),
// SAFETY: `layout` is non-zero in size,
size => unsafe {
let raw_ptr = core::intrinsics::const_allocate(layout.size(), layout.align());
let ptr = NonNull::new(raw_ptr).ok_or(AllocError)?;
if zeroed {
// SAFETY: the pointer returned by `const_allocate` is valid to write to.
ptr.write_bytes(0, size);
}
Ok(NonNull::slice_from_raw_parts(ptr, size))
},
}
}
#[inline]
#[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
#[rustc_const_unstable(feature = "const_heap", issue = "79597")]
const fn deallocate_impl_const(ptr: NonNull<u8>, layout: Layout) {
if layout.size() != 0 {
// SAFETY: We checked for nonzero size; other preconditions must be upheld by caller.
unsafe {
core::intrinsics::const_deallocate(ptr.as_ptr(), layout.size(), layout.align());
}
}
}
#[inline]
#[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
#[rustc_const_unstable(feature = "const_heap", issue = "79597")]
const fn grow_shrink_impl_const(
&self,
ptr: NonNull<u8>,
old_layout: Layout,
new_layout: Layout,
zeroed: bool,
) -> Result<NonNull<[u8]>, AllocError> {
let new_ptr = self.alloc_impl(new_layout, zeroed)?;
// SAFETY: both pointers are valid and this operations is in bounds.
unsafe {
ptr::copy_nonoverlapping(
ptr.as_ptr(),
new_ptr.as_mut_ptr(),
cmp::min(old_layout.size(), new_layout.size()),
);
}
unsafe {
self.deallocate_impl(ptr, old_layout);
}
Ok(new_ptr)
}
}
#[unstable(feature = "allocator_api", issue = "32838")]
#[rustc_const_unstable(feature = "const_heap", issue = "79597")]
unsafe impl const Allocator for Global {
#[inline]
#[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
fn allocate(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
self.alloc_impl(layout, false)
}
#[inline]
#[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
fn allocate_zeroed(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
self.alloc_impl(layout, true)
}
#[inline]
#[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
unsafe fn deallocate(&self, ptr: NonNull<u8>, layout: Layout) {
// SAFETY: all conditions must be upheld by the caller
unsafe { self.deallocate_impl(ptr, layout) }
}
#[inline]
#[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
unsafe fn grow(
&self,
ptr: NonNull<u8>,
old_layout: Layout,
new_layout: Layout,
) -> Result<NonNull<[u8]>, AllocError> {
// SAFETY: all conditions must be upheld by the caller
unsafe { self.grow_impl(ptr, old_layout, new_layout, false) }
}
#[inline]
#[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
unsafe fn grow_zeroed(
&self,
ptr: NonNull<u8>,
old_layout: Layout,
new_layout: Layout,
) -> Result<NonNull<[u8]>, AllocError> {
// SAFETY: all conditions must be upheld by the caller
unsafe { self.grow_impl(ptr, old_layout, new_layout, true) }
}
#[inline]
#[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
unsafe fn shrink(
&self,
ptr: NonNull<u8>,
old_layout: Layout,
new_layout: Layout,
) -> Result<NonNull<[u8]>, AllocError> {
// SAFETY: all conditions must be upheld by the caller
unsafe { self.shrink_impl(ptr, old_layout, new_layout) }
}
}
/// The allocator for `Box`.

View file

@ -84,13 +84,14 @@ impl TryReserveError {
reason = "Uncertain how much info should be exposed",
issue = "48043"
)]
pub fn kind(&self) -> TryReserveErrorKind {
#[rustc_const_unstable(feature = "const_heap", issue = "79597")]
pub const fn kind(&self) -> TryReserveErrorKind {
self.kind.clone()
}
}
/// Details of the allocation that caused a `TryReserveError`
#[derive(Clone, PartialEq, Eq, Debug)]
#[derive(PartialEq, Eq, Debug)]
#[unstable(
feature = "try_reserve_kind",
reason = "Uncertain how much info should be exposed",
@ -120,6 +121,24 @@ pub enum TryReserveErrorKind {
},
}
#[unstable(
feature = "try_reserve_kind",
reason = "Uncertain how much info should be exposed",
issue = "48043"
)]
#[rustc_const_unstable(feature = "const_heap", issue = "79597")]
#[cfg(not(test))]
impl const Clone for TryReserveErrorKind {
fn clone(&self) -> Self {
match self {
TryReserveErrorKind::CapacityOverflow => TryReserveErrorKind::CapacityOverflow,
TryReserveErrorKind::AllocError { layout, non_exhaustive: () } => {
TryReserveErrorKind::AllocError { layout: *layout, non_exhaustive: () }
}
}
}
}
#[cfg(test)]
pub use realalloc::collections::TryReserveErrorKind;

View file

@ -101,10 +101,15 @@
#![feature(char_internals)]
#![feature(clone_to_uninit)]
#![feature(coerce_unsized)]
#![feature(const_clone)]
#![feature(const_cmp)]
#![feature(const_convert)]
#![feature(const_default)]
#![feature(const_destruct)]
#![feature(const_eval_select)]
#![feature(const_heap)]
#![feature(const_option_ops)]
#![feature(const_try)]
#![feature(copied_into_inner)]
#![feature(core_intrinsics)]
#![feature(deprecated_suggestion)]
@ -119,6 +124,7 @@
#![feature(fmt_internals)]
#![feature(fn_traits)]
#![feature(formatting_options)]
#![feature(freeze)]
#![feature(generic_atomic)]
#![feature(hasher_prefixfree_extras)]
#![feature(inplace_iteration)]
@ -172,6 +178,7 @@
#![feature(const_trait_impl)]
#![feature(coroutine_trait)]
#![feature(decl_macro)]
#![feature(derive_const)]
#![feature(dropck_eyepatch)]
#![feature(fundamental)]
#![feature(hashmap_internals)]

View file

@ -4,7 +4,7 @@
// Note: This module is also included in the alloctests crate using #[path] to
// run the tests. See the comment there for an explanation why this is the case.
use core::marker::PhantomData;
use core::marker::{Destruct, PhantomData};
use core::mem::{ManuallyDrop, MaybeUninit, SizedTypeProperties};
use core::ptr::{self, Alignment, NonNull, Unique};
use core::{cmp, hint};
@ -24,7 +24,7 @@ mod tests;
// only one location which panics rather than a bunch throughout the module.
#[cfg(not(no_global_oom_handling))]
#[cfg_attr(not(panic = "immediate-abort"), inline(never))]
fn capacity_overflow() -> ! {
const fn capacity_overflow() -> ! {
panic!("capacity overflow");
}
@ -165,6 +165,30 @@ const fn min_non_zero_cap(size: usize) -> usize {
}
}
#[rustc_const_unstable(feature = "const_heap", issue = "79597")]
#[rustfmt::skip] // FIXME(fee1-dead): temporary measure before rustfmt is bumped
const impl<T, A: [const] Allocator + [const] Destruct> RawVec<T, A> {
/// Like `with_capacity`, but parameterized over the choice of
/// allocator for the returned `RawVec`.
#[cfg(not(no_global_oom_handling))]
#[inline]
pub(crate) fn with_capacity_in(capacity: usize, alloc: A) -> Self {
Self {
inner: RawVecInner::with_capacity_in(capacity, alloc, T::LAYOUT),
_marker: PhantomData,
}
}
/// A specialized version of `self.reserve(len, 1)` which requires the
/// caller to ensure `len == self.capacity()`.
#[cfg(not(no_global_oom_handling))]
#[inline(never)]
pub(crate) fn grow_one(&mut self) {
// SAFETY: All calls on self.inner pass T::LAYOUT as the elem_layout
unsafe { self.inner.grow_one(T::LAYOUT) }
}
}
impl<T, A: Allocator> RawVec<T, A> {
#[cfg(not(no_global_oom_handling))]
pub(crate) const MIN_NON_ZERO_CAP: usize = min_non_zero_cap(size_of::<T>());
@ -178,17 +202,6 @@ impl<T, A: Allocator> RawVec<T, A> {
Self { inner: RawVecInner::new_in(alloc, Alignment::of::<T>()), _marker: PhantomData }
}
/// Like `with_capacity`, but parameterized over the choice of
/// allocator for the returned `RawVec`.
#[cfg(not(no_global_oom_handling))]
#[inline]
pub(crate) fn with_capacity_in(capacity: usize, alloc: A) -> Self {
Self {
inner: RawVecInner::with_capacity_in(capacity, alloc, T::LAYOUT),
_marker: PhantomData,
}
}
/// Like `try_with_capacity`, but parameterized over the choice of
/// allocator for the returned `RawVec`.
#[inline]
@ -327,15 +340,6 @@ impl<T, A: Allocator> RawVec<T, A> {
unsafe { self.inner.reserve(len, additional, T::LAYOUT) }
}
/// A specialized version of `self.reserve(len, 1)` which requires the
/// caller to ensure `len == self.capacity()`.
#[cfg(not(no_global_oom_handling))]
#[inline(never)]
pub(crate) fn grow_one(&mut self) {
// SAFETY: All calls on self.inner pass T::LAYOUT as the elem_layout
unsafe { self.inner.grow_one(T::LAYOUT) }
}
/// The same as `reserve`, but returns on errors instead of panicking or aborting.
pub(crate) fn try_reserve(
&mut self,
@ -405,14 +409,9 @@ unsafe impl<#[may_dangle] T, A: Allocator> Drop for RawVec<T, A> {
}
}
impl<A: Allocator> RawVecInner<A> {
#[inline]
const fn new_in(alloc: A, align: Alignment) -> Self {
let ptr = Unique::from_non_null(NonNull::without_provenance(align.as_nonzero()));
// `cap: 0` means "unallocated". zero-sized types are ignored.
Self { ptr, cap: ZERO_CAP, alloc }
}
#[rustc_const_unstable(feature = "const_heap", issue = "79597")]
#[rustfmt::skip] // FIXME(fee1-dead): temporary measure before rustfmt is bumped
const impl<A: [const] Allocator + [const] Destruct> RawVecInner<A> {
#[cfg(not(no_global_oom_handling))]
#[inline]
fn with_capacity_in(capacity: usize, alloc: A, elem_layout: Layout) -> Self {
@ -428,24 +427,6 @@ impl<A: Allocator> RawVecInner<A> {
}
}
#[inline]
fn try_with_capacity_in(
capacity: usize,
alloc: A,
elem_layout: Layout,
) -> Result<Self, TryReserveError> {
Self::try_allocate_in(capacity, AllocInit::Uninitialized, alloc, elem_layout)
}
#[cfg(not(no_global_oom_handling))]
#[inline]
fn with_capacity_zeroed_in(capacity: usize, alloc: A, elem_layout: Layout) -> Self {
match Self::try_allocate_in(capacity, AllocInit::Zeroed, alloc, elem_layout) {
Ok(res) => res,
Err(err) => handle_error(err),
}
}
fn try_allocate_in(
capacity: usize,
init: AllocInit,
@ -484,6 +465,118 @@ impl<A: Allocator> RawVecInner<A> {
})
}
/// # Safety
/// - `elem_layout` must be valid for `self`, i.e. it must be the same `elem_layout` used to
/// initially construct `self`
/// - `elem_layout`'s size must be a multiple of its alignment
#[cfg(not(no_global_oom_handling))]
#[inline]
unsafe fn grow_one(&mut self, elem_layout: Layout) {
// SAFETY: Precondition passed to caller
if let Err(err) = unsafe { self.grow_amortized(self.cap.as_inner(), 1, elem_layout) } {
handle_error(err);
}
}
/// # Safety
/// - `elem_layout` must be valid for `self`, i.e. it must be the same `elem_layout` used to
/// initially construct `self`
/// - `elem_layout`'s size must be a multiple of its alignment
/// - The sum of `len` and `additional` must be greater than the current capacity
unsafe fn grow_amortized(
&mut self,
len: usize,
additional: usize,
elem_layout: Layout,
) -> Result<(), TryReserveError> {
// This is ensured by the calling contexts.
debug_assert!(additional > 0);
if elem_layout.size() == 0 {
// Since we return a capacity of `usize::MAX` when `elem_size` is
// 0, getting to here necessarily means the `RawVec` is overfull.
return Err(CapacityOverflow.into());
}
// Nothing we can really do about these checks, sadly.
let required_cap = len.checked_add(additional).ok_or(CapacityOverflow)?;
// This guarantees exponential growth. The doubling cannot overflow
// because `cap <= isize::MAX` and the type of `cap` is `usize`.
let cap = cmp::max(self.cap.as_inner() * 2, required_cap);
let cap = cmp::max(min_non_zero_cap(elem_layout.size()), cap);
// SAFETY:
// - cap >= len + additional
// - other preconditions passed to caller
let ptr = unsafe { self.finish_grow(cap, elem_layout)? };
// SAFETY: `finish_grow` would have failed if `cap > isize::MAX`
unsafe { self.set_ptr_and_cap(ptr, cap) };
Ok(())
}
/// # Safety
/// - `elem_layout` must be valid for `self`, i.e. it must be the same `elem_layout` used to
/// initially construct `self`
/// - `elem_layout`'s size must be a multiple of its alignment
/// - `cap` must be greater than the current capacity
// not marked inline(never) since we want optimizers to be able to observe the specifics of this
// function, see tests/codegen-llvm/vec-reserve-extend.rs.
#[cold]
unsafe fn finish_grow(
&self,
cap: usize,
elem_layout: Layout,
) -> Result<NonNull<[u8]>, TryReserveError> {
let new_layout = layout_array(cap, elem_layout)?;
let memory = if let Some((ptr, old_layout)) = unsafe { self.current_memory(elem_layout) } {
// FIXME(const-hack): switch to `debug_assert_eq`
debug_assert!(old_layout.align() == new_layout.align());
unsafe {
// The allocator checks for alignment equality
hint::assert_unchecked(old_layout.align() == new_layout.align());
self.alloc.grow(ptr, old_layout, new_layout)
}
} else {
self.alloc.allocate(new_layout)
};
// FIXME(const-hack): switch back to `map_err`
match memory {
Ok(memory) => Ok(memory),
Err(_) => Err(AllocError { layout: new_layout, non_exhaustive: () }.into()),
}
}
}
impl<A: Allocator> RawVecInner<A> {
#[inline]
const fn new_in(alloc: A, align: Alignment) -> Self {
let ptr = Unique::from_non_null(NonNull::without_provenance(align.as_nonzero()));
// `cap: 0` means "unallocated". zero-sized types are ignored.
Self { ptr, cap: ZERO_CAP, alloc }
}
#[inline]
fn try_with_capacity_in(
capacity: usize,
alloc: A,
elem_layout: Layout,
) -> Result<Self, TryReserveError> {
Self::try_allocate_in(capacity, AllocInit::Uninitialized, alloc, elem_layout)
}
#[cfg(not(no_global_oom_handling))]
#[inline]
fn with_capacity_zeroed_in(capacity: usize, alloc: A, elem_layout: Layout) -> Self {
match Self::try_allocate_in(capacity, AllocInit::Zeroed, alloc, elem_layout) {
Ok(res) => res,
Err(err) => handle_error(err),
}
}
#[inline]
unsafe fn from_raw_parts_in(ptr: *mut u8, cap: Cap, alloc: A) -> Self {
Self { ptr: unsafe { Unique::new_unchecked(ptr) }, cap, alloc }
@ -519,7 +612,8 @@ impl<A: Allocator> RawVecInner<A> {
/// initially construct `self`
/// - `elem_layout`'s size must be a multiple of its alignment
#[inline]
unsafe fn current_memory(&self, elem_layout: Layout) -> Option<(NonNull<u8>, Layout)> {
#[rustc_const_unstable(feature = "const_heap", issue = "79597")]
const unsafe fn current_memory(&self, elem_layout: Layout) -> Option<(NonNull<u8>, Layout)> {
if elem_layout.size() == 0 || self.cap.as_inner() == 0 {
None
} else {
@ -566,19 +660,6 @@ impl<A: Allocator> RawVecInner<A> {
}
}
/// # Safety
/// - `elem_layout` must be valid for `self`, i.e. it must be the same `elem_layout` used to
/// initially construct `self`
/// - `elem_layout`'s size must be a multiple of its alignment
#[cfg(not(no_global_oom_handling))]
#[inline]
unsafe fn grow_one(&mut self, elem_layout: Layout) {
// SAFETY: Precondition passed to caller
if let Err(err) = unsafe { self.grow_amortized(self.cap.as_inner(), 1, elem_layout) } {
handle_error(err);
}
}
/// # Safety
/// - `elem_layout` must be valid for `self`, i.e. it must be the same `elem_layout` used to
/// initially construct `self`
@ -651,12 +732,13 @@ impl<A: Allocator> RawVecInner<A> {
}
#[inline]
fn needs_to_grow(&self, len: usize, additional: usize, elem_layout: Layout) -> bool {
const fn needs_to_grow(&self, len: usize, additional: usize, elem_layout: Layout) -> bool {
additional > self.capacity(elem_layout.size()).wrapping_sub(len)
}
#[inline]
unsafe fn set_ptr_and_cap(&mut self, ptr: NonNull<[u8]>, cap: usize) {
#[rustc_const_unstable(feature = "const_heap", issue = "79597")]
const unsafe fn set_ptr_and_cap(&mut self, ptr: NonNull<[u8]>, cap: usize) {
// Allocators currently return a `NonNull<[u8]>` whose length matches
// the size requested. If that ever changes, the capacity here should
// change to `ptr.len() / size_of::<T>()`.
@ -664,44 +746,6 @@ impl<A: Allocator> RawVecInner<A> {
self.cap = unsafe { Cap::new_unchecked(cap) };
}
/// # Safety
/// - `elem_layout` must be valid for `self`, i.e. it must be the same `elem_layout` used to
/// initially construct `self`
/// - `elem_layout`'s size must be a multiple of its alignment
/// - The sum of `len` and `additional` must be greater than the current capacity
unsafe fn grow_amortized(
&mut self,
len: usize,
additional: usize,
elem_layout: Layout,
) -> Result<(), TryReserveError> {
// This is ensured by the calling contexts.
debug_assert!(additional > 0);
if elem_layout.size() == 0 {
// Since we return a capacity of `usize::MAX` when `elem_size` is
// 0, getting to here necessarily means the `RawVec` is overfull.
return Err(CapacityOverflow.into());
}
// Nothing we can really do about these checks, sadly.
let required_cap = len.checked_add(additional).ok_or(CapacityOverflow)?;
// This guarantees exponential growth. The doubling cannot overflow
// because `cap <= isize::MAX` and the type of `cap` is `usize`.
let cap = cmp::max(self.cap.as_inner() * 2, required_cap);
let cap = cmp::max(min_non_zero_cap(elem_layout.size()), cap);
// SAFETY:
// - cap >= len + additional
// - other preconditions passed to caller
let ptr = unsafe { self.finish_grow(cap, elem_layout)? };
// SAFETY: `finish_grow` would have failed if `cap > isize::MAX`
unsafe { self.set_ptr_and_cap(ptr, cap) };
Ok(())
}
/// # Safety
/// - `elem_layout` must be valid for `self`, i.e. it must be the same `elem_layout` used to
/// initially construct `self`
@ -729,35 +773,6 @@ impl<A: Allocator> RawVecInner<A> {
Ok(())
}
/// # Safety
/// - `elem_layout` must be valid for `self`, i.e. it must be the same `elem_layout` used to
/// initially construct `self`
/// - `elem_layout`'s size must be a multiple of its alignment
/// - `cap` must be greater than the current capacity
// not marked inline(never) since we want optimizers to be able to observe the specifics of this
// function, see tests/codegen-llvm/vec-reserve-extend.rs.
#[cold]
unsafe fn finish_grow(
&self,
cap: usize,
elem_layout: Layout,
) -> Result<NonNull<[u8]>, TryReserveError> {
let new_layout = layout_array(cap, elem_layout)?;
let memory = if let Some((ptr, old_layout)) = unsafe { self.current_memory(elem_layout) } {
debug_assert_eq!(old_layout.align(), new_layout.align());
unsafe {
// The allocator checks for alignment equality
hint::assert_unchecked(old_layout.align() == new_layout.align());
self.alloc.grow(ptr, old_layout, new_layout)
}
} else {
self.alloc.allocate(new_layout)
};
memory.map_err(|_| AllocError { layout: new_layout, non_exhaustive: () }.into())
}
/// # Safety
/// - `elem_layout` must be valid for `self`, i.e. it must be the same `elem_layout` used to
/// initially construct `self`
@ -839,7 +854,8 @@ impl<A: Allocator> RawVecInner<A> {
#[cfg(not(no_global_oom_handling))]
#[cold]
#[optimize(size)]
fn handle_error(e: TryReserveError) -> ! {
#[rustc_const_unstable(feature = "const_heap", issue = "79597")]
const fn handle_error(e: TryReserveError) -> ! {
match e.kind() {
CapacityOverflow => capacity_overflow(),
AllocError { layout, .. } => handle_alloc_error(layout),
@ -847,6 +863,11 @@ fn handle_error(e: TryReserveError) -> ! {
}
#[inline]
fn layout_array(cap: usize, elem_layout: Layout) -> Result<Layout, TryReserveError> {
elem_layout.repeat(cap).map(|(layout, _pad)| layout).map_err(|_| CapacityOverflow.into())
#[rustc_const_unstable(feature = "const_heap", issue = "79597")]
const fn layout_array(cap: usize, elem_layout: Layout) -> Result<Layout, TryReserveError> {
// FIXME(const-hack) return to using `map` and `map_err` once `const_closures` is implemented
match elem_layout.repeat(cap) {
Ok((layout, _pad)) => Ok(layout),
Err(_) => Err(CapacityOverflow.into()),
}
}

View file

@ -81,7 +81,9 @@ use core::cmp::Ordering;
use core::hash::{Hash, Hasher};
#[cfg(not(no_global_oom_handling))]
use core::iter;
use core::marker::PhantomData;
#[cfg(not(no_global_oom_handling))]
use core::marker::Destruct;
use core::marker::{Freeze, PhantomData};
use core::mem::{self, Assume, ManuallyDrop, MaybeUninit, SizedTypeProperties, TransmuteFrom};
use core::ops::{self, Index, IndexMut, Range, RangeBounds};
use core::ptr::{self, NonNull};
@ -519,7 +521,8 @@ impl<T> Vec<T> {
#[stable(feature = "rust1", since = "1.0.0")]
#[must_use]
#[rustc_diagnostic_item = "vec_with_capacity"]
pub fn with_capacity(capacity: usize) -> Self {
#[rustc_const_unstable(feature = "const_heap", issue = "79597")]
pub const fn with_capacity(capacity: usize) -> Self {
Self::with_capacity_in(capacity, Global)
}
@ -881,29 +884,28 @@ impl<T> Vec<T> {
// SAFETY: A `Vec` always has a non-null pointer.
(unsafe { NonNull::new_unchecked(ptr) }, len, capacity)
}
/// Interns the `Vec<T>`, making the underlying memory read-only. This method should be
/// called during compile time. (This is a no-op if called during runtime)
///
/// This method must be called if the memory used by `Vec` needs to appear in the final
/// values of constants.
#[unstable(feature = "const_heap", issue = "79597")]
#[rustc_const_unstable(feature = "const_heap", issue = "79597")]
pub const fn const_make_global(mut self) -> &'static [T]
where
T: Freeze,
{
unsafe { core::intrinsics::const_make_global(self.as_mut_ptr().cast()) };
let me = ManuallyDrop::new(self);
unsafe { slice::from_raw_parts(me.as_ptr(), me.len) }
}
}
impl<T, A: Allocator> Vec<T, A> {
/// Constructs a new, empty `Vec<T, A>`.
///
/// The vector will not allocate until elements are pushed onto it.
///
/// # Examples
///
/// ```
/// #![feature(allocator_api)]
///
/// use std::alloc::System;
///
/// # #[allow(unused_mut)]
/// let mut vec: Vec<i32, _> = Vec::new_in(System);
/// ```
#[inline]
#[unstable(feature = "allocator_api", issue = "32838")]
pub const fn new_in(alloc: A) -> Self {
Vec { buf: RawVec::new_in(alloc), len: 0 }
}
#[cfg(not(no_global_oom_handling))]
#[rustc_const_unstable(feature = "const_heap", issue = "79597")]
#[rustfmt::skip] // FIXME(fee1-dead): temporary measure before rustfmt is bumped
const impl<T, A: [const] Allocator + [const] Destruct> Vec<T, A> {
/// Constructs a new, empty `Vec<T, A>` with at least the specified capacity
/// with the provided allocator.
///
@ -959,13 +961,109 @@ impl<T, A: Allocator> Vec<T, A> {
/// let vec_units = Vec::<(), System>::with_capacity_in(10, System);
/// assert_eq!(vec_units.capacity(), usize::MAX);
/// ```
#[cfg(not(no_global_oom_handling))]
#[inline]
#[unstable(feature = "allocator_api", issue = "32838")]
pub fn with_capacity_in(capacity: usize, alloc: A) -> Self {
Vec { buf: RawVec::with_capacity_in(capacity, alloc), len: 0 }
}
/// Appends an element to the back of a collection.
///
/// # Panics
///
/// Panics if the new capacity exceeds `isize::MAX` _bytes_.
///
/// # Examples
///
/// ```
/// let mut vec = vec![1, 2];
/// vec.push(3);
/// assert_eq!(vec, [1, 2, 3]);
/// ```
///
/// # Time complexity
///
/// Takes amortized *O*(1) time. If the vector's length would exceed its
/// capacity after the push, *O*(*capacity*) time is taken to copy the
/// vector's elements to a larger allocation. This expensive operation is
/// offset by the *capacity* *O*(1) insertions it allows.
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
#[rustc_confusables("push_back", "put", "append")]
pub fn push(&mut self, value: T) {
let _ = self.push_mut(value);
}
/// Appends an element to the back of a collection, returning a reference to it.
///
/// # Panics
///
/// Panics if the new capacity exceeds `isize::MAX` _bytes_.
///
/// # Examples
///
/// ```
/// #![feature(push_mut)]
///
///
/// let mut vec = vec![1, 2];
/// let last = vec.push_mut(3);
/// assert_eq!(*last, 3);
/// assert_eq!(vec, [1, 2, 3]);
///
/// let last = vec.push_mut(3);
/// *last += 1;
/// assert_eq!(vec, [1, 2, 3, 4]);
/// ```
///
/// # Time complexity
///
/// Takes amortized *O*(1) time. If the vector's length would exceed its
/// capacity after the push, *O*(*capacity*) time is taken to copy the
/// vector's elements to a larger allocation. This expensive operation is
/// offset by the *capacity* *O*(1) insertions it allows.
#[inline]
#[unstable(feature = "push_mut", issue = "135974")]
#[must_use = "if you don't need a reference to the value, use `Vec::push` instead"]
pub fn push_mut(&mut self, value: T) -> &mut T {
// Inform codegen that the length does not change across grow_one().
let len = self.len;
// This will panic or abort if we would allocate > isize::MAX bytes
// or if the length increment would overflow for zero-sized types.
if len == self.buf.capacity() {
self.buf.grow_one();
}
unsafe {
let end = self.as_mut_ptr().add(len);
ptr::write(end, value);
self.len = len + 1;
// SAFETY: We just wrote a value to the pointer that will live the lifetime of the reference.
&mut *end
}
}
}
impl<T, A: Allocator> Vec<T, A> {
/// Constructs a new, empty `Vec<T, A>`.
///
/// The vector will not allocate until elements are pushed onto it.
///
/// # Examples
///
/// ```
/// #![feature(allocator_api)]
///
/// use std::alloc::System;
///
/// # #[allow(unused_mut)]
/// let mut vec: Vec<i32, _> = Vec::new_in(System);
/// ```
#[inline]
#[unstable(feature = "allocator_api", issue = "32838")]
pub const fn new_in(alloc: A) -> Self {
Vec { buf: RawVec::new_in(alloc), len: 0 }
}
/// Constructs a new, empty `Vec<T, A>` with at least the specified capacity
/// with the provided allocator.
///
@ -2546,34 +2644,6 @@ impl<T, A: Allocator> Vec<T, A> {
}
}
/// Appends an element to the back of a collection.
///
/// # Panics
///
/// Panics if the new capacity exceeds `isize::MAX` _bytes_.
///
/// # Examples
///
/// ```
/// let mut vec = vec![1, 2];
/// vec.push(3);
/// assert_eq!(vec, [1, 2, 3]);
/// ```
///
/// # Time complexity
///
/// Takes amortized *O*(1) time. If the vector's length would exceed its
/// capacity after the push, *O*(*capacity*) time is taken to copy the
/// vector's elements to a larger allocation. This expensive operation is
/// offset by the *capacity* *O*(1) insertions it allows.
#[cfg(not(no_global_oom_handling))]
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
#[rustc_confusables("push_back", "put", "append")]
pub fn push(&mut self, value: T) {
let _ = self.push_mut(value);
}
/// Appends an element and returns a reference to it if there is sufficient spare capacity,
/// otherwise an error is returned with the element.
///
@ -2627,55 +2697,6 @@ impl<T, A: Allocator> Vec<T, A> {
}
}
/// Appends an element to the back of a collection, returning a reference to it.
///
/// # Panics
///
/// Panics if the new capacity exceeds `isize::MAX` _bytes_.
///
/// # Examples
///
/// ```
/// #![feature(push_mut)]
///
///
/// let mut vec = vec![1, 2];
/// let last = vec.push_mut(3);
/// assert_eq!(*last, 3);
/// assert_eq!(vec, [1, 2, 3]);
///
/// let last = vec.push_mut(3);
/// *last += 1;
/// assert_eq!(vec, [1, 2, 3, 4]);
/// ```
///
/// # Time complexity
///
/// Takes amortized *O*(1) time. If the vector's length would exceed its
/// capacity after the push, *O*(*capacity*) time is taken to copy the
/// vector's elements to a larger allocation. This expensive operation is
/// offset by the *capacity* *O*(1) insertions it allows.
#[cfg(not(no_global_oom_handling))]
#[inline]
#[unstable(feature = "push_mut", issue = "135974")]
#[must_use = "if you don't need a reference to the value, use `Vec::push` instead"]
pub fn push_mut(&mut self, value: T) -> &mut T {
// Inform codegen that the length does not change across grow_one().
let len = self.len;
// This will panic or abort if we would allocate > isize::MAX bytes
// or if the length increment would overflow for zero-sized types.
if len == self.buf.capacity() {
self.buf.grow_one();
}
unsafe {
let end = self.as_mut_ptr().add(len);
ptr::write(end, value);
self.len = len + 1;
// SAFETY: We just wrote a value to the pointer that will live the lifetime of the reference.
&mut *end
}
}
/// Removes the last element from a vector and returns it, or [`None`] if it
/// is empty.
///