Rewrite boxed_region/memory_region in Rust
This drops more of the old C++ runtime to rather be written in rust. A few features were lost along the way, but hopefully not too many. The main loss is that there are no longer backtraces associated with allocations (rust doesn't have a way of acquiring those just yet). Other than that though, I believe that the rest of the debugging utilities made their way over into rust. Closes #8704
This commit is contained in:
parent
c5074ae646
commit
357ef1f69c
13 changed files with 257 additions and 656 deletions
|
|
@ -158,7 +158,6 @@ pub mod raw {
|
|||
use at_vec::capacity;
|
||||
use cast;
|
||||
use cast::{transmute, transmute_copy};
|
||||
use libc;
|
||||
use ptr;
|
||||
use mem;
|
||||
use uint;
|
||||
|
|
@ -250,7 +249,7 @@ pub mod raw {
|
|||
use rt::task::Task;
|
||||
|
||||
do Local::borrow |task: &mut Task| {
|
||||
task.heap.realloc(ptr as *libc::c_void, size) as *()
|
||||
task.heap.realloc(ptr as *mut Box<()>, size) as *()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -11,7 +11,7 @@
|
|||
#[doc(hidden)];
|
||||
|
||||
use libc::c_void;
|
||||
use ptr::null;
|
||||
use ptr;
|
||||
use unstable::intrinsics::TyDesc;
|
||||
use unstable::raw;
|
||||
|
||||
|
|
@ -37,7 +37,7 @@ unsafe fn each_live_alloc(read_next_before: bool,
|
|||
use rt::local_heap;
|
||||
|
||||
let mut box = local_heap::live_allocs();
|
||||
while box != null() {
|
||||
while box != ptr::mut_null() {
|
||||
let next_before = (*box).next;
|
||||
let uniq = (*box).ref_count == managed::RC_MANAGED_UNIQUE;
|
||||
|
||||
|
|
|
|||
|
|
@ -19,6 +19,7 @@ use os;
|
|||
|
||||
static mut MIN_STACK: uint = 4000000;
|
||||
static mut DEBUG_BORROW: bool = false;
|
||||
static mut POISON_ON_FREE: bool = false;
|
||||
|
||||
pub fn init() {
|
||||
unsafe {
|
||||
|
|
@ -33,6 +34,10 @@ pub fn init() {
|
|||
Some(_) => DEBUG_BORROW = true,
|
||||
None => ()
|
||||
}
|
||||
match os::getenv("RUST_POISON_ON_FREE") {
|
||||
Some(_) => POISON_ON_FREE = true,
|
||||
None => ()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -43,3 +48,7 @@ pub fn min_stack() -> uint {
|
|||
pub fn debug_borrow() -> bool {
|
||||
unsafe { DEBUG_BORROW }
|
||||
}
|
||||
|
||||
pub fn poison_on_free() -> bool {
|
||||
unsafe { POISON_ON_FREE }
|
||||
}
|
||||
|
|
|
|||
|
|
@ -19,7 +19,7 @@ extern {
|
|||
}
|
||||
|
||||
#[inline]
|
||||
fn get_box_size(body_size: uint, body_align: uint) -> uint {
|
||||
pub fn get_box_size(body_size: uint, body_align: uint) -> uint {
|
||||
let header_size = size_of::<raw::Box<()>>();
|
||||
// FIXME (#2699): This alignment calculation is suspicious. Is it right?
|
||||
let total_size = align_to(header_size, body_align) + body_size;
|
||||
|
|
|
|||
|
|
@ -10,80 +10,272 @@
|
|||
|
||||
//! The local, garbage collected heap
|
||||
|
||||
use cast;
|
||||
use libc::{c_void, uintptr_t};
|
||||
use libc;
|
||||
use libc::{c_void, uintptr_t, size_t};
|
||||
use mem;
|
||||
use ops::Drop;
|
||||
use option::{Option, None, Some};
|
||||
use ptr;
|
||||
use rt::env;
|
||||
use rt::global_heap;
|
||||
use rt::local::Local;
|
||||
use rt::task::Task;
|
||||
use unstable::intrinsics::TyDesc;
|
||||
use unstable::raw;
|
||||
|
||||
type MemoryRegion = c_void;
|
||||
// This has no meaning with out rtdebug also turned on.
|
||||
static TRACK_ALLOCATIONS: int = 0;
|
||||
static MAGIC: u32 = 0xbadc0ffe;
|
||||
|
||||
struct Env { priv opaque: () }
|
||||
pub type Box = raw::Box<()>;
|
||||
|
||||
struct BoxedRegion {
|
||||
env: *Env,
|
||||
backing_region: *MemoryRegion,
|
||||
live_allocs: *raw::Box<()>,
|
||||
pub struct MemoryRegion {
|
||||
priv allocations: ~[*AllocHeader],
|
||||
priv live_allocations: uint,
|
||||
}
|
||||
|
||||
pub type OpaqueBox = c_void;
|
||||
pub type TypeDesc = c_void;
|
||||
|
||||
pub struct LocalHeap {
|
||||
priv memory_region: *MemoryRegion,
|
||||
priv boxed_region: *BoxedRegion
|
||||
priv memory_region: MemoryRegion,
|
||||
|
||||
priv poison_on_free: bool,
|
||||
priv live_allocs: *mut raw::Box<()>,
|
||||
}
|
||||
|
||||
impl LocalHeap {
|
||||
#[fixed_stack_segment] #[inline(never)]
|
||||
pub fn new() -> LocalHeap {
|
||||
unsafe {
|
||||
// XXX: These usually come from the environment
|
||||
let detailed_leaks = false as uintptr_t;
|
||||
let poison_on_free = false as uintptr_t;
|
||||
let region = rust_new_memory_region(detailed_leaks, poison_on_free);
|
||||
assert!(region.is_not_null());
|
||||
let boxed = rust_new_boxed_region(region, poison_on_free);
|
||||
assert!(boxed.is_not_null());
|
||||
LocalHeap {
|
||||
memory_region: region,
|
||||
boxed_region: boxed
|
||||
let region = MemoryRegion {
|
||||
allocations: ~[],
|
||||
live_allocations: 0,
|
||||
};
|
||||
LocalHeap {
|
||||
memory_region: region,
|
||||
poison_on_free: env::poison_on_free(),
|
||||
live_allocs: ptr::mut_null(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn alloc(&mut self, td: *TyDesc, size: uint) -> *mut Box {
|
||||
let total_size = global_heap::get_box_size(size, unsafe { (*td).align });
|
||||
let box = self.memory_region.malloc(total_size);
|
||||
{
|
||||
// Make sure that we can't use `mybox` outside of this scope
|
||||
let mybox: &mut Box = unsafe { cast::transmute(box) };
|
||||
// Clear out this box, and move it to the front of the live
|
||||
// allocations list
|
||||
mybox.type_desc = td;
|
||||
mybox.ref_count = 1;
|
||||
mybox.prev = ptr::mut_null();
|
||||
mybox.next = self.live_allocs;
|
||||
if !self.live_allocs.is_null() {
|
||||
unsafe { (*self.live_allocs).prev = box; }
|
||||
}
|
||||
self.live_allocs = box;
|
||||
}
|
||||
return box;
|
||||
}
|
||||
|
||||
pub fn realloc(&mut self, ptr: *mut Box, size: uint) -> *mut Box {
|
||||
// Make sure that we can't use `mybox` outside of this scope
|
||||
let total_size = size + mem::size_of::<Box>();
|
||||
let new_box = self.memory_region.realloc(ptr, total_size);
|
||||
{
|
||||
// Fix links because we could have moved around
|
||||
let mybox: &mut Box = unsafe { cast::transmute(new_box) };
|
||||
if !mybox.prev.is_null() {
|
||||
unsafe { (*mybox.prev).next = new_box; }
|
||||
}
|
||||
if !mybox.next.is_null() {
|
||||
unsafe { (*mybox.next).prev = new_box; }
|
||||
}
|
||||
}
|
||||
if self.live_allocs == ptr {
|
||||
self.live_allocs = new_box;
|
||||
}
|
||||
return new_box;
|
||||
}
|
||||
|
||||
#[fixed_stack_segment] #[inline(never)]
|
||||
pub fn alloc(&mut self, td: *TypeDesc, size: uint) -> *OpaqueBox {
|
||||
unsafe {
|
||||
return rust_boxed_region_malloc(self.boxed_region, td, size as size_t);
|
||||
}
|
||||
}
|
||||
pub fn free(&mut self, box: *mut Box) {
|
||||
{
|
||||
// Make sure that we can't use `mybox` outside of this scope
|
||||
let mybox: &mut Box = unsafe { cast::transmute(box) };
|
||||
assert!(!mybox.type_desc.is_null());
|
||||
|
||||
#[fixed_stack_segment] #[inline(never)]
|
||||
pub fn realloc(&mut self, ptr: *OpaqueBox, size: uint) -> *OpaqueBox {
|
||||
unsafe {
|
||||
return rust_boxed_region_realloc(self.boxed_region, ptr, size as size_t);
|
||||
}
|
||||
}
|
||||
// Unlink it from the linked list
|
||||
if !mybox.prev.is_null() {
|
||||
unsafe { (*mybox.prev).next = mybox.next; }
|
||||
}
|
||||
if !mybox.next.is_null() {
|
||||
unsafe { (*mybox.next).prev = mybox.prev; }
|
||||
}
|
||||
if self.live_allocs == box {
|
||||
self.live_allocs = mybox.next;
|
||||
}
|
||||
|
||||
#[fixed_stack_segment] #[inline(never)]
|
||||
pub fn free(&mut self, box: *OpaqueBox) {
|
||||
unsafe {
|
||||
return rust_boxed_region_free(self.boxed_region, box);
|
||||
// Destroy the box memory-wise
|
||||
if self.poison_on_free {
|
||||
unsafe {
|
||||
let ptr: *mut u8 = cast::transmute(&mybox.data);
|
||||
ptr::set_memory(ptr, 0xab, (*mybox.type_desc).size);
|
||||
}
|
||||
}
|
||||
mybox.prev = ptr::mut_null();
|
||||
mybox.next = ptr::mut_null();
|
||||
mybox.type_desc = ptr::null();
|
||||
}
|
||||
|
||||
self.memory_region.free(box);
|
||||
}
|
||||
}
|
||||
|
||||
impl Drop for LocalHeap {
|
||||
#[fixed_stack_segment] #[inline(never)]
|
||||
fn drop(&mut self) {
|
||||
unsafe {
|
||||
rust_delete_boxed_region(self.boxed_region);
|
||||
rust_delete_memory_region(self.memory_region);
|
||||
assert!(self.live_allocs.is_null());
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(rtdebug)]
|
||||
struct AllocHeader {
|
||||
magic: u32,
|
||||
index: i32,
|
||||
size: u32,
|
||||
}
|
||||
#[cfg(not(rtdebug))]
|
||||
struct AllocHeader;
|
||||
|
||||
impl AllocHeader {
|
||||
#[cfg(rtdebug)]
|
||||
fn init(&mut self, size: u32) {
|
||||
if TRACK_ALLOCATIONS > 0 {
|
||||
self.magic = MAGIC;
|
||||
self.index = -1;
|
||||
self.size = size;
|
||||
}
|
||||
}
|
||||
#[cfg(not(rtdebug))]
|
||||
fn init(&mut self, _size: u32) {}
|
||||
|
||||
#[cfg(rtdebug)]
|
||||
fn assert_sane(&self) {
|
||||
if TRACK_ALLOCATIONS > 0 {
|
||||
rtassert!(self.magic == MAGIC);
|
||||
}
|
||||
}
|
||||
#[cfg(not(rtdebug))]
|
||||
fn assert_sane(&self) {}
|
||||
|
||||
#[cfg(rtdebug)]
|
||||
fn update_size(&mut self, size: u32) {
|
||||
if TRACK_ALLOCATIONS > 0 {
|
||||
self.size = size;
|
||||
}
|
||||
}
|
||||
#[cfg(not(rtdebug))]
|
||||
fn update_size(&mut self, _size: u32) {}
|
||||
|
||||
fn box(&mut self) -> *mut Box {
|
||||
let myaddr: uint = unsafe { cast::transmute(self) };
|
||||
(myaddr + AllocHeader::size()) as *mut Box
|
||||
}
|
||||
|
||||
fn size() -> uint {
|
||||
// For some platforms, 16 byte alignment is required.
|
||||
let ptr_size = 16;
|
||||
let header_size = mem::size_of::<AllocHeader>();
|
||||
return (header_size + ptr_size - 1) / ptr_size * ptr_size;
|
||||
}
|
||||
|
||||
fn from(box: *mut Box) -> *mut AllocHeader {
|
||||
(box as uint - AllocHeader::size()) as *mut AllocHeader
|
||||
}
|
||||
}
|
||||
|
||||
impl MemoryRegion {
|
||||
fn malloc(&mut self, size: uint) -> *mut Box {
|
||||
let total_size = size + AllocHeader::size();
|
||||
let alloc: *AllocHeader = unsafe {
|
||||
global_heap::malloc_raw(total_size) as *AllocHeader
|
||||
};
|
||||
|
||||
let alloc: &mut AllocHeader = unsafe { cast::transmute(alloc) };
|
||||
alloc.init(size as u32);
|
||||
self.claim(alloc);
|
||||
self.live_allocations += 1;
|
||||
|
||||
return alloc.box();
|
||||
}
|
||||
|
||||
fn realloc(&mut self, box: *mut Box, size: uint) -> *mut Box {
|
||||
rtassert!(!box.is_null());
|
||||
let orig_alloc = AllocHeader::from(box);
|
||||
unsafe { (*orig_alloc).assert_sane(); }
|
||||
|
||||
let total_size = size + AllocHeader::size();
|
||||
let alloc: *AllocHeader = unsafe {
|
||||
global_heap::realloc_raw(orig_alloc as *mut libc::c_void,
|
||||
total_size) as *AllocHeader
|
||||
};
|
||||
|
||||
let alloc: &mut AllocHeader = unsafe { cast::transmute(alloc) };
|
||||
alloc.assert_sane();
|
||||
alloc.update_size(size as u32);
|
||||
self.update(alloc, orig_alloc as *AllocHeader);
|
||||
return alloc.box();
|
||||
}
|
||||
|
||||
fn free(&mut self, box: *mut Box) {
|
||||
rtassert!(!box.is_null());
|
||||
let alloc = AllocHeader::from(box);
|
||||
unsafe {
|
||||
(*alloc).assert_sane();
|
||||
self.release(cast::transmute(alloc));
|
||||
rtassert!(self.live_allocations > 0);
|
||||
self.live_allocations -= 1;
|
||||
global_heap::exchange_free(alloc as *libc::c_char)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(rtdebug)]
|
||||
fn claim(&mut self, alloc: &mut AllocHeader) {
|
||||
alloc.assert_sane();
|
||||
if TRACK_ALLOCATIONS > 1 {
|
||||
alloc.index = self.allocations.len() as i32;
|
||||
self.allocations.push(&*alloc as *AllocHeader);
|
||||
}
|
||||
}
|
||||
#[cfg(not(rtdebug))]
|
||||
fn claim(&mut self, _alloc: &mut AllocHeader) {}
|
||||
|
||||
#[cfg(rtdebug)]
|
||||
fn release(&mut self, alloc: &AllocHeader) {
|
||||
alloc.assert_sane();
|
||||
if TRACK_ALLOCATIONS > 1 {
|
||||
rtassert!(self.allocations[alloc.index] == alloc as *AllocHeader);
|
||||
self.allocations[alloc.index] = ptr::null();
|
||||
}
|
||||
}
|
||||
#[cfg(not(rtdebug))]
|
||||
fn release(&mut self, _alloc: &AllocHeader) {}
|
||||
|
||||
#[cfg(rtdebug)]
|
||||
fn update(&mut self, alloc: &mut AllocHeader, orig: *AllocHeader) {
|
||||
alloc.assert_sane();
|
||||
if TRACK_ALLOCATIONS > 1 {
|
||||
rtassert!(self.allocations[alloc.index] == orig);
|
||||
self.allocations[alloc.index] = &*alloc as *AllocHeader;
|
||||
}
|
||||
}
|
||||
#[cfg(not(rtdebug))]
|
||||
fn update(&mut self, _alloc: &mut AllocHeader, _orig: *AllocHeader) {}
|
||||
}
|
||||
|
||||
impl Drop for MemoryRegion {
|
||||
fn drop(&mut self) {
|
||||
if self.live_allocations != 0 {
|
||||
rtabort!("leaked managed memory ({} objects)", self.live_allocations);
|
||||
}
|
||||
rtassert!(self.allocations.iter().all(|s| s.is_null()));
|
||||
}
|
||||
}
|
||||
|
||||
pub unsafe fn local_malloc(td: *libc::c_char, size: libc::uintptr_t) -> *libc::c_char {
|
||||
|
|
@ -91,7 +283,7 @@ pub unsafe fn local_malloc(td: *libc::c_char, size: libc::uintptr_t) -> *libc::c
|
|||
let task: Option<*mut Task> = Local::try_unsafe_borrow();
|
||||
match task {
|
||||
Some(task) => {
|
||||
(*task).heap.alloc(td as *libc::c_void, size as uint) as *libc::c_char
|
||||
(*task).heap.alloc(td as *TyDesc, size as uint) as *libc::c_char
|
||||
}
|
||||
None => rtabort!("local malloc outside of task")
|
||||
}
|
||||
|
|
@ -103,34 +295,16 @@ pub unsafe fn local_free(ptr: *libc::c_char) {
|
|||
let task_ptr: Option<*mut Task> = Local::try_unsafe_borrow();
|
||||
match task_ptr {
|
||||
Some(task) => {
|
||||
(*task).heap.free(ptr as *libc::c_void);
|
||||
(*task).heap.free(ptr as *mut Box)
|
||||
}
|
||||
None => rtabort!("local free outside of task")
|
||||
}
|
||||
}
|
||||
|
||||
pub fn live_allocs() -> *raw::Box<()> {
|
||||
let region = do Local::borrow |task: &mut Task| {
|
||||
task.heap.boxed_region
|
||||
};
|
||||
|
||||
return unsafe { (*region).live_allocs };
|
||||
}
|
||||
|
||||
extern {
|
||||
fn rust_new_memory_region(detailed_leaks: uintptr_t,
|
||||
poison_on_free: uintptr_t) -> *MemoryRegion;
|
||||
fn rust_delete_memory_region(region: *MemoryRegion);
|
||||
fn rust_new_boxed_region(region: *MemoryRegion,
|
||||
poison_on_free: uintptr_t) -> *BoxedRegion;
|
||||
fn rust_delete_boxed_region(region: *BoxedRegion);
|
||||
fn rust_boxed_region_malloc(region: *BoxedRegion,
|
||||
td: *TypeDesc,
|
||||
size: size_t) -> *OpaqueBox;
|
||||
fn rust_boxed_region_realloc(region: *BoxedRegion,
|
||||
ptr: *OpaqueBox,
|
||||
size: size_t) -> *OpaqueBox;
|
||||
fn rust_boxed_region_free(region: *BoxedRegion, box: *OpaqueBox);
|
||||
pub fn live_allocs() -> *mut Box {
|
||||
do Local::borrow |task: &mut Task| {
|
||||
task.heap.live_allocs
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
|
|
|
|||
|
|
@ -15,8 +15,8 @@ use unstable::intrinsics::TyDesc;
|
|||
pub struct Box<T> {
|
||||
ref_count: uint,
|
||||
type_desc: *TyDesc,
|
||||
priv prev: *Box<T>,
|
||||
next: *Box<T>,
|
||||
prev: *mut Box<T>,
|
||||
next: *mut Box<T>,
|
||||
data: T
|
||||
}
|
||||
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue