Rollup merge of #143672 - beepster4096:box_drop_flags_again, r=oli-obk

Fix Box allocator drop elaboration

New version of rust-lang/rust#131146.

Clearing Box's drop flag after running its destructor can cause it to skip dropping its allocator, so just don't. Its cleared by the drop ladder code afterwards already.

Unlike the last PR this also handles other types with destructors properly, in the event that we can have open drops on them in the future (by partial initialization or DerefMove or something).

Finally, I also added tests for the interaction with async drop here but I discovered rust-lang/rust#143658, so one of the tests has a `knownbug` annotation. Not sure if it should be in this PR at all though.

Fixes rust-lang/rust#131082

r? wesleywiser - prev. reviewer
This commit is contained in:
Stuart Cook 2025-07-31 15:41:59 +10:00 committed by GitHub
commit f478bec907
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
8 changed files with 550 additions and 25 deletions

View file

@ -761,24 +761,37 @@ where
let skip_contents = adt.is_union() || adt.is_manually_drop();
let contents_drop = if skip_contents {
if adt.has_dtor(self.tcx()) && self.elaborator.get_drop_flag(self.path).is_some() {
// the top-level drop flag is usually cleared by open_drop_for_adt_contents
// types with destructors would still need an empty drop ladder to clear it
// however, these types are only open dropped in `DropShimElaborator`
// which does not have drop flags
// a future box-like "DerefMove" trait would allow for this case to happen
span_bug!(self.source_info.span, "open dropping partially moved union");
}
(self.succ, self.unwind, self.dropline)
} else {
self.open_drop_for_adt_contents(adt, args)
};
if adt.is_box() {
// we need to drop the inside of the box before running the destructor
let succ = self.destructor_call_block_sync((contents_drop.0, contents_drop.1));
let unwind = contents_drop
.1
.map(|unwind| self.destructor_call_block_sync((unwind, Unwind::InCleanup)));
let dropline = contents_drop
.2
.map(|dropline| self.destructor_call_block_sync((dropline, contents_drop.1)));
if adt.has_dtor(self.tcx()) {
let destructor_block = if adt.is_box() {
// we need to drop the inside of the box before running the destructor
let succ = self.destructor_call_block_sync((contents_drop.0, contents_drop.1));
let unwind = contents_drop
.1
.map(|unwind| self.destructor_call_block_sync((unwind, Unwind::InCleanup)));
let dropline = contents_drop
.2
.map(|dropline| self.destructor_call_block_sync((dropline, contents_drop.1)));
self.open_drop_for_box_contents(adt, args, succ, unwind, dropline)
} else {
self.destructor_call_block(contents_drop)
};
self.open_drop_for_box_contents(adt, args, succ, unwind, dropline)
} else if adt.has_dtor(self.tcx()) {
self.destructor_call_block(contents_drop)
self.drop_flag_test_block(destructor_block, contents_drop.0, contents_drop.1)
} else {
contents_drop.0
}
@ -982,12 +995,7 @@ where
unwind.is_cleanup(),
);
let destructor_block = self.elaborator.patch().new_block(result);
let block_start = Location { block: destructor_block, statement_index: 0 };
self.elaborator.clear_drop_flag(block_start, self.path, DropFlagMode::Shallow);
self.drop_flag_test_block(destructor_block, succ, unwind)
self.elaborator.patch().new_block(result)
}
fn destructor_call_block(
@ -1002,13 +1010,7 @@ where
&& !unwind.is_cleanup()
&& ty.is_async_drop(self.tcx(), self.elaborator.typing_env())
{
let destructor_block =
self.build_async_drop(self.place, ty, None, succ, unwind, dropline, true);
let block_start = Location { block: destructor_block, statement_index: 0 };
self.elaborator.clear_drop_flag(block_start, self.path, DropFlagMode::Shallow);
self.drop_flag_test_block(destructor_block, succ, unwind)
self.build_async_drop(self.place, ty, None, succ, unwind, dropline, true)
} else {
self.destructor_call_block_sync((succ, unwind))
}

View file

@ -0,0 +1,186 @@
- // MIR for `main` before ElaborateDrops
+ // MIR for `main` after ElaborateDrops
fn main() -> () {
let mut _0: ();
let _1: std::boxed::Box<HasDrop, DropAllocator>;
let mut _2: HasDrop;
let mut _3: DropAllocator;
let mut _4: bool;
let _5: ();
let mut _6: HasDrop;
let _7: ();
let mut _8: std::boxed::Box<HasDrop, DropAllocator>;
+ let mut _9: bool;
+ let mut _10: &mut std::boxed::Box<HasDrop, DropAllocator>;
+ let mut _11: ();
+ let mut _12: &mut std::boxed::Box<HasDrop, DropAllocator>;
+ let mut _13: ();
+ let mut _14: *const HasDrop;
+ let mut _15: &mut std::boxed::Box<HasDrop, DropAllocator>;
+ let mut _16: ();
+ let mut _17: *const HasDrop;
scope 1 {
debug b => _1;
}
bb0: {
+ _9 = const false;
StorageLive(_1);
StorageLive(_2);
_2 = HasDrop;
StorageLive(_3);
_3 = DropAllocator;
_1 = Box::<HasDrop, DropAllocator>::new_in(move _2, move _3) -> [return: bb1, unwind: bb11];
}
bb1: {
+ _9 = const true;
StorageDead(_3);
StorageDead(_2);
StorageLive(_4);
_4 = const true;
switchInt(move _4) -> [0: bb4, otherwise: bb2];
}
bb2: {
StorageLive(_5);
StorageLive(_6);
_6 = move (*_1);
_5 = std::mem::drop::<HasDrop>(move _6) -> [return: bb3, unwind: bb9];
}
bb3: {
StorageDead(_6);
StorageDead(_5);
_0 = const ();
goto -> bb6;
}
bb4: {
StorageLive(_7);
StorageLive(_8);
+ _9 = const false;
_8 = move _1;
_7 = std::mem::drop::<Box<HasDrop, DropAllocator>>(move _8) -> [return: bb5, unwind: bb8];
}
bb5: {
StorageDead(_8);
StorageDead(_7);
_0 = const ();
goto -> bb6;
}
bb6: {
StorageDead(_4);
- drop(_1) -> [return: bb7, unwind continue];
+ goto -> bb23;
}
bb7: {
+ _9 = const false;
StorageDead(_1);
return;
}
bb8 (cleanup): {
- drop(_8) -> [return: bb10, unwind terminate(cleanup)];
+ goto -> bb10;
}
bb9 (cleanup): {
- drop(_6) -> [return: bb10, unwind terminate(cleanup)];
+ goto -> bb10;
}
bb10 (cleanup): {
- drop(_1) -> [return: bb13, unwind terminate(cleanup)];
+ goto -> bb29;
}
bb11 (cleanup): {
- drop(_3) -> [return: bb12, unwind terminate(cleanup)];
+ goto -> bb12;
}
bb12 (cleanup): {
- drop(_2) -> [return: bb13, unwind terminate(cleanup)];
+ goto -> bb13;
}
bb13 (cleanup): {
resume;
+ }
+
+ bb14: {
+ _9 = const false;
+ goto -> bb7;
+ }
+
+ bb15 (cleanup): {
+ drop((_1.1: DropAllocator)) -> [return: bb13, unwind terminate(cleanup)];
+ }
+
+ bb16 (cleanup): {
+ switchInt(copy _9) -> [0: bb13, otherwise: bb15];
+ }
+
+ bb17: {
+ drop((_1.1: DropAllocator)) -> [return: bb14, unwind: bb13];
+ }
+
+ bb18: {
+ switchInt(copy _9) -> [0: bb14, otherwise: bb17];
+ }
+
+ bb19: {
+ _10 = &mut _1;
+ _11 = <Box<HasDrop, DropAllocator> as Drop>::drop(move _10) -> [return: bb18, unwind: bb16];
+ }
+
+ bb20 (cleanup): {
+ _12 = &mut _1;
+ _13 = <Box<HasDrop, DropAllocator> as Drop>::drop(move _12) -> [return: bb16, unwind terminate(cleanup)];
+ }
+
+ bb21: {
+ goto -> bb19;
+ }
+
+ bb22: {
+ _14 = copy ((_1.0: std::ptr::Unique<HasDrop>).0: std::ptr::NonNull<HasDrop>) as *const HasDrop (Transmute);
+ goto -> bb21;
+ }
+
+ bb23: {
+ switchInt(copy _9) -> [0: bb18, otherwise: bb22];
+ }
+
+ bb24 (cleanup): {
+ drop((_1.1: DropAllocator)) -> [return: bb13, unwind terminate(cleanup)];
+ }
+
+ bb25 (cleanup): {
+ switchInt(copy _9) -> [0: bb13, otherwise: bb24];
+ }
+
+ bb26 (cleanup): {
+ _15 = &mut _1;
+ _16 = <Box<HasDrop, DropAllocator> as Drop>::drop(move _15) -> [return: bb25, unwind terminate(cleanup)];
+ }
+
+ bb27 (cleanup): {
+ goto -> bb26;
+ }
+
+ bb28 (cleanup): {
+ _17 = copy ((_1.0: std::ptr::Unique<HasDrop>).0: std::ptr::NonNull<HasDrop>) as *const HasDrop (Transmute);
+ goto -> bb27;
+ }
+
+ bb29 (cleanup): {
+ switchInt(copy _9) -> [0: bb25, otherwise: bb28];
}
}

View file

@ -0,0 +1,39 @@
// skip-filecheck
//@ test-mir-pass: ElaborateDrops
//@ needs-unwind
#![feature(allocator_api)]
// Regression test for #131082.
// Testing that the allocator of a Box is dropped in conditional drops
use std::alloc::{AllocError, Allocator, Global, Layout};
use std::ptr::NonNull;
struct DropAllocator;
unsafe impl Allocator for DropAllocator {
fn allocate(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
Global.allocate(layout)
}
unsafe fn deallocate(&self, ptr: NonNull<u8>, layout: Layout) {
Global.deallocate(ptr, layout);
}
}
impl Drop for DropAllocator {
fn drop(&mut self) {}
}
struct HasDrop;
impl Drop for HasDrop {
fn drop(&mut self) {}
}
// EMIT_MIR box_conditional_drop_allocator.main.ElaborateDrops.diff
fn main() {
let b = Box::new_in(HasDrop, DropAllocator);
if true {
drop(*b);
} else {
drop(b);
}
}

View file

@ -0,0 +1,134 @@
//@ run-pass
//@ check-run-results
// struct `Foo` has both sync and async drop.
// It's used as the allocator of a `Box` which is conditionally moved out of.
// Sync version is called in sync context, async version is called in async function.
#![feature(async_drop, allocator_api)]
#![allow(incomplete_features)]
use std::mem::ManuallyDrop;
//@ edition: 2021
#[inline(never)]
fn myprintln(msg: &str, my_resource_handle: usize) {
println!("{} : {}", msg, my_resource_handle);
}
use std::{
future::{Future, async_drop_in_place, AsyncDrop},
pin::{pin, Pin},
sync::{mpsc, Arc},
task::{Context, Poll, Wake, Waker},
alloc::{AllocError, Allocator, Global, Layout},
ptr::NonNull,
};
struct Foo {
my_resource_handle: usize,
}
impl Foo {
fn new(my_resource_handle: usize) -> Self {
let out = Foo {
my_resource_handle,
};
myprintln("Foo::new()", my_resource_handle);
out
}
}
impl Drop for Foo {
fn drop(&mut self) {
myprintln("Foo::drop()", self.my_resource_handle);
}
}
impl AsyncDrop for Foo {
async fn drop(self: Pin<&mut Self>) {
myprintln("Foo::async drop()", self.my_resource_handle);
}
}
unsafe impl Allocator for Foo {
fn allocate(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
Global.allocate(layout)
}
unsafe fn deallocate(&self, ptr: NonNull<u8>, layout: Layout) {
Global.deallocate(ptr, layout);
}
}
struct HasDrop;
impl Drop for HasDrop {
fn drop(&mut self) {}
}
fn main() {
{
let b = Box::new_in(HasDrop, Foo::new(7));
if true {
let _x = *b;
} else {
let _y = b;
}
}
println!("Middle");
block_on(bar(10));
println!("Done")
}
async fn bar(ident_base: usize) {
let b = Box::new_in(HasDrop, Foo::new(ident_base));
if true {
let _x = *b;
} else {
let _y = b;
}
}
fn block_on<F>(fut_unpin: F) -> F::Output
where
F: Future,
{
let mut fut_pin = pin!(ManuallyDrop::new(fut_unpin));
let mut fut: Pin<&mut F> = unsafe {
Pin::map_unchecked_mut(fut_pin.as_mut(), |x| &mut **x)
};
let (waker, rx) = simple_waker();
let mut context = Context::from_waker(&waker);
let rv = loop {
match fut.as_mut().poll(&mut context) {
Poll::Ready(out) => break out,
// expect wake in polls
Poll::Pending => rx.try_recv().unwrap(),
}
};
let drop_fut_unpin = unsafe { async_drop_in_place(fut.get_unchecked_mut()) };
let mut drop_fut: Pin<&mut _> = pin!(drop_fut_unpin);
loop {
match drop_fut.as_mut().poll(&mut context) {
Poll::Ready(()) => break,
Poll::Pending => rx.try_recv().unwrap(),
}
}
rv
}
fn simple_waker() -> (Waker, mpsc::Receiver<()>) {
struct SimpleWaker {
tx: std::sync::mpsc::Sender<()>,
}
impl Wake for SimpleWaker {
fn wake(self: Arc<Self>) {
self.tx.send(()).unwrap();
}
}
let (tx, rx) = mpsc::channel();
(Waker::from(Arc::new(SimpleWaker { tx })), rx)
}

View file

@ -0,0 +1,6 @@
Foo::new() : 7
Foo::drop() : 7
Middle
Foo::new() : 10
Foo::async drop() : 10
Done

View file

@ -0,0 +1,109 @@
//@ run-pass
//@ check-run-results
// struct `Foo` has both sync and async drop.
// `Foo` is always inside `Box`
// Sync version is called in sync context, async version is called in async function.
//@ known-bug: #143658
// async version is never actually called
#![feature(async_drop)]
#![allow(incomplete_features)]
use std::mem::ManuallyDrop;
//@ edition: 2021
#[inline(never)]
fn myprintln(msg: &str, my_resource_handle: usize) {
println!("{} : {}", msg, my_resource_handle);
}
use std::{
future::{Future, async_drop_in_place, AsyncDrop},
pin::{pin, Pin},
sync::{mpsc, Arc},
task::{Context, Poll, Wake, Waker},
};
struct Foo {
my_resource_handle: usize,
}
impl Foo {
fn new(my_resource_handle: usize) -> Self {
let out = Foo {
my_resource_handle,
};
myprintln("Foo::new()", my_resource_handle);
out
}
}
impl Drop for Foo {
fn drop(&mut self) {
myprintln("Foo::drop()", self.my_resource_handle);
}
}
impl AsyncDrop for Foo {
async fn drop(self: Pin<&mut Self>) {
myprintln("Foo::async drop()", self.my_resource_handle);
}
}
fn main() {
{
let _ = Box::new(Foo::new(7));
}
println!("Middle");
block_on(bar(10));
println!("Done")
}
async fn bar(ident_base: usize) {
let _first = Box::new(Foo::new(ident_base));
}
fn block_on<F>(fut_unpin: F) -> F::Output
where
F: Future,
{
let mut fut_pin = pin!(ManuallyDrop::new(fut_unpin));
let mut fut: Pin<&mut F> = unsafe {
Pin::map_unchecked_mut(fut_pin.as_mut(), |x| &mut **x)
};
let (waker, rx) = simple_waker();
let mut context = Context::from_waker(&waker);
let rv = loop {
match fut.as_mut().poll(&mut context) {
Poll::Ready(out) => break out,
// expect wake in polls
Poll::Pending => rx.try_recv().unwrap(),
}
};
let drop_fut_unpin = unsafe { async_drop_in_place(fut.get_unchecked_mut()) };
let mut drop_fut: Pin<&mut _> = pin!(drop_fut_unpin);
loop {
match drop_fut.as_mut().poll(&mut context) {
Poll::Ready(()) => break,
Poll::Pending => rx.try_recv().unwrap(),
}
}
rv
}
fn simple_waker() -> (Waker, mpsc::Receiver<()>) {
struct SimpleWaker {
tx: std::sync::mpsc::Sender<()>,
}
impl Wake for SimpleWaker {
fn wake(self: Arc<Self>) {
self.tx.send(()).unwrap();
}
}
let (tx, rx) = mpsc::channel();
(Waker::from(Arc::new(SimpleWaker { tx })), rx)
}

View file

@ -0,0 +1,6 @@
Foo::new() : 7
Foo::drop() : 7
Middle
Foo::new() : 10
Foo::drop() : 10
Done

View file

@ -0,0 +1,43 @@
//@ run-pass
#![feature(allocator_api)]
// Regression test for #131082.
// Testing that the allocator of a Box is dropped in conditional drops
use std::alloc::{AllocError, Allocator, Global, Layout};
use std::cell::Cell;
use std::ptr::NonNull;
struct DropCheckingAllocator<'a>(&'a Cell<bool>);
unsafe impl Allocator for DropCheckingAllocator<'_> {
fn allocate(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
Global.allocate(layout)
}
unsafe fn deallocate(&self, ptr: NonNull<u8>, layout: Layout) {
Global.deallocate(ptr, layout);
}
}
impl Drop for DropCheckingAllocator<'_> {
fn drop(&mut self) {
self.0.set(true);
}
}
struct HasDrop;
impl Drop for HasDrop {
fn drop(&mut self) {}
}
fn main() {
let dropped = Cell::new(false);
{
let b = Box::new_in(HasDrop, DropCheckingAllocator(&dropped));
if true {
drop(*b);
} else {
drop(b);
}
}
assert!(dropped.get());
}