Skip to content

Fix Box allocator drop elaboration #143672

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Open
wants to merge 2 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
55 changes: 28 additions & 27 deletions compiler/rustc_mir_transform/src/elaborate_drop.rs
Original file line number Diff line number Diff line change
Expand Up @@ -761,24 +761,36 @@ where

let skip_contents = adt.is_union() || adt.is_manually_drop();
let contents_drop = if skip_contents {
(self.succ, self.unwind, self.dropline)
if adt.has_dtor(self.tcx()) {
// the top-level drop flag is usually cleared by open_drop_for_adt_contents
// types with destructors still need an empty drop ladder to clear it

// currently no rust types can trigger this path in a context where drop flags exist
// however, a future box-like "DerefMove" trait would allow it
self.drop_ladder_bottom()
} else {
(self.succ, self.unwind, self.dropline)
}
} else {
self.open_drop_for_adt_contents(adt, args)
};

if adt.is_box() {
// we need to drop the inside of the box before running the destructor
let succ = self.destructor_call_block_sync((contents_drop.0, contents_drop.1));
let unwind = contents_drop
.1
.map(|unwind| self.destructor_call_block_sync((unwind, Unwind::InCleanup)));
let dropline = contents_drop
.2
.map(|dropline| self.destructor_call_block_sync((dropline, contents_drop.1)));

self.open_drop_for_box_contents(adt, args, succ, unwind, dropline)
} else if adt.has_dtor(self.tcx()) {
self.destructor_call_block(contents_drop)
if adt.has_dtor(self.tcx()) {
let destructor_block = if adt.is_box() {
// we need to drop the inside of the box before running the destructor
let succ = self.destructor_call_block_sync((contents_drop.0, contents_drop.1));
let unwind = contents_drop
.1
.map(|unwind| self.destructor_call_block_sync((unwind, Unwind::InCleanup)));
let dropline = contents_drop
.2
.map(|dropline| self.destructor_call_block_sync((dropline, contents_drop.1)));
self.open_drop_for_box_contents(adt, args, succ, unwind, dropline)
} else {
self.destructor_call_block(contents_drop)
};

self.drop_flag_test_block(destructor_block, contents_drop.0, contents_drop.1)
} else {
contents_drop.0
}
Expand Down Expand Up @@ -982,12 +994,7 @@ where
unwind.is_cleanup(),
);

let destructor_block = self.elaborator.patch().new_block(result);

let block_start = Location { block: destructor_block, statement_index: 0 };
self.elaborator.clear_drop_flag(block_start, self.path, DropFlagMode::Shallow);

self.drop_flag_test_block(destructor_block, succ, unwind)
self.elaborator.patch().new_block(result)
}

fn destructor_call_block(
Expand All @@ -1002,13 +1009,7 @@ where
&& !unwind.is_cleanup()
&& ty.is_async_drop(self.tcx(), self.elaborator.typing_env())
{
let destructor_block =
self.build_async_drop(self.place, ty, None, succ, unwind, dropline, true);

let block_start = Location { block: destructor_block, statement_index: 0 };
self.elaborator.clear_drop_flag(block_start, self.path, DropFlagMode::Shallow);

self.drop_flag_test_block(destructor_block, succ, unwind)
self.build_async_drop(self.place, ty, None, succ, unwind, dropline, true)
} else {
self.destructor_call_block_sync((succ, unwind))
}
Expand Down
186 changes: 186 additions & 0 deletions tests/mir-opt/box_conditional_drop_allocator.main.ElaborateDrops.diff
Original file line number Diff line number Diff line change
@@ -0,0 +1,186 @@
- // MIR for `main` before ElaborateDrops
+ // MIR for `main` after ElaborateDrops

fn main() -> () {
let mut _0: ();
let _1: std::boxed::Box<HasDrop, DropAllocator>;
let mut _2: HasDrop;
let mut _3: DropAllocator;
let mut _4: bool;
let _5: ();
let mut _6: HasDrop;
let _7: ();
let mut _8: std::boxed::Box<HasDrop, DropAllocator>;
+ let mut _9: bool;
+ let mut _10: &mut std::boxed::Box<HasDrop, DropAllocator>;
+ let mut _11: ();
+ let mut _12: &mut std::boxed::Box<HasDrop, DropAllocator>;
+ let mut _13: ();
+ let mut _14: *const HasDrop;
+ let mut _15: &mut std::boxed::Box<HasDrop, DropAllocator>;
+ let mut _16: ();
+ let mut _17: *const HasDrop;
scope 1 {
debug b => _1;
}

bb0: {
+ _9 = const false;
StorageLive(_1);
StorageLive(_2);
_2 = HasDrop;
StorageLive(_3);
_3 = DropAllocator;
_1 = Box::<HasDrop, DropAllocator>::new_in(move _2, move _3) -> [return: bb1, unwind: bb11];
}

bb1: {
+ _9 = const true;
StorageDead(_3);
StorageDead(_2);
StorageLive(_4);
_4 = const true;
switchInt(move _4) -> [0: bb4, otherwise: bb2];
}

bb2: {
StorageLive(_5);
StorageLive(_6);
_6 = move (*_1);
_5 = std::mem::drop::<HasDrop>(move _6) -> [return: bb3, unwind: bb9];
}

bb3: {
StorageDead(_6);
StorageDead(_5);
_0 = const ();
goto -> bb6;
}

bb4: {
StorageLive(_7);
StorageLive(_8);
+ _9 = const false;
_8 = move _1;
_7 = std::mem::drop::<Box<HasDrop, DropAllocator>>(move _8) -> [return: bb5, unwind: bb8];
}

bb5: {
StorageDead(_8);
StorageDead(_7);
_0 = const ();
goto -> bb6;
}

bb6: {
StorageDead(_4);
- drop(_1) -> [return: bb7, unwind continue];
+ goto -> bb23;
}

bb7: {
+ _9 = const false;
StorageDead(_1);
return;
}

bb8 (cleanup): {
- drop(_8) -> [return: bb10, unwind terminate(cleanup)];
+ goto -> bb10;
}

bb9 (cleanup): {
- drop(_6) -> [return: bb10, unwind terminate(cleanup)];
+ goto -> bb10;
}

bb10 (cleanup): {
- drop(_1) -> [return: bb13, unwind terminate(cleanup)];
+ goto -> bb29;
}

bb11 (cleanup): {
- drop(_3) -> [return: bb12, unwind terminate(cleanup)];
+ goto -> bb12;
}

bb12 (cleanup): {
- drop(_2) -> [return: bb13, unwind terminate(cleanup)];
+ goto -> bb13;
}

bb13 (cleanup): {
resume;
+ }
+
+ bb14: {
+ _9 = const false;
+ goto -> bb7;
+ }
+
+ bb15 (cleanup): {
+ drop((_1.1: DropAllocator)) -> [return: bb13, unwind terminate(cleanup)];
+ }
+
+ bb16 (cleanup): {
+ switchInt(copy _9) -> [0: bb13, otherwise: bb15];
+ }
+
+ bb17: {
+ drop((_1.1: DropAllocator)) -> [return: bb14, unwind: bb13];
+ }
+
+ bb18: {
+ switchInt(copy _9) -> [0: bb14, otherwise: bb17];
+ }
+
+ bb19: {
+ _10 = &mut _1;
+ _11 = <Box<HasDrop, DropAllocator> as Drop>::drop(move _10) -> [return: bb18, unwind: bb16];
+ }
+
+ bb20 (cleanup): {
+ _12 = &mut _1;
+ _13 = <Box<HasDrop, DropAllocator> as Drop>::drop(move _12) -> [return: bb16, unwind terminate(cleanup)];
+ }
+
+ bb21: {
+ goto -> bb19;
+ }
+
+ bb22: {
+ _14 = copy ((_1.0: std::ptr::Unique<HasDrop>).0: std::ptr::NonNull<HasDrop>) as *const HasDrop (Transmute);
+ goto -> bb21;
+ }
+
+ bb23: {
+ switchInt(copy _9) -> [0: bb18, otherwise: bb22];
+ }
+
+ bb24 (cleanup): {
+ drop((_1.1: DropAllocator)) -> [return: bb13, unwind terminate(cleanup)];
+ }
+
+ bb25 (cleanup): {
+ switchInt(copy _9) -> [0: bb13, otherwise: bb24];
+ }
+
+ bb26 (cleanup): {
+ _15 = &mut _1;
+ _16 = <Box<HasDrop, DropAllocator> as Drop>::drop(move _15) -> [return: bb25, unwind terminate(cleanup)];
+ }
+
+ bb27 (cleanup): {
+ goto -> bb26;
+ }
+
+ bb28 (cleanup): {
+ _17 = copy ((_1.0: std::ptr::Unique<HasDrop>).0: std::ptr::NonNull<HasDrop>) as *const HasDrop (Transmute);
+ goto -> bb27;
+ }
+
+ bb29 (cleanup): {
+ switchInt(copy _9) -> [0: bb25, otherwise: bb28];
}
}

38 changes: 38 additions & 0 deletions tests/mir-opt/box_conditional_drop_allocator.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,38 @@
// skip-filecheck
//@ test-mir-pass: ElaborateDrops
#![feature(allocator_api)]

// Regression test for #131082.
// Testing that the allocator of a Box is dropped in conditional drops

use std::alloc::{AllocError, Allocator, Global, Layout};
use std::ptr::NonNull;

struct DropAllocator;

unsafe impl Allocator for DropAllocator {
fn allocate(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
Global.allocate(layout)
}
unsafe fn deallocate(&self, ptr: NonNull<u8>, layout: Layout) {
Global.deallocate(ptr, layout);
}
}
impl Drop for DropAllocator {
fn drop(&mut self) {}
}

struct HasDrop;
impl Drop for HasDrop {
fn drop(&mut self) {}
}

// EMIT_MIR box_conditional_drop_allocator.main.ElaborateDrops.diff
fn main() {
let b = Box::new_in(HasDrop, DropAllocator);
if true {
drop(*b);
} else {
drop(b);
}
}
Loading
Loading