From c08ddaaa48074040eeecf75e243cb5cbc19f4eaf Mon Sep 17 00:00:00 2001 From: Scott Olson Date: Wed, 6 Apr 2016 04:08:52 -0600 Subject: [PATCH] Implement a naive, slow version of undef mask copying. --- src/memory.rs | 20 ++++++++++++++++++-- test/sums.rs | 5 +++++ 2 files changed, 23 insertions(+), 2 deletions(-) diff --git a/src/memory.rs b/src/memory.rs index 113930f986d2..c8f19fa165fa 100644 --- a/src/memory.rs +++ b/src/memory.rs @@ -225,8 +225,10 @@ impl Memory { } } - // TODO(tsion): Copy undef ranges from src to dest. - self.copy_relocations(src, dest, size) + try!(self.copy_undef_mask(src, dest, size)); + try!(self.copy_relocations(src, dest, size)); + + Ok(()) } pub fn write_bytes(&mut self, ptr: Pointer, src: &[u8]) -> EvalResult<()> { @@ -379,6 +381,20 @@ impl Memory { // Undefined bytes //////////////////////////////////////////////////////////////////////////////// + // FIXME(tsino): This is a very naive, slow version. + fn copy_undef_mask(&mut self, src: Pointer, dest: Pointer, size: usize) -> EvalResult<()> { + // The bits have to be saved locally before writing to dest in case src and dest overlap. + let mut v = Vec::with_capacity(size); + for i in 0..size { + let defined = try!(self.get(src.alloc_id)).undef_mask.get(src.offset + i); + v.push(defined); + } + for (i, defined) in v.into_iter().enumerate() { + try!(self.get_mut(dest.alloc_id)).undef_mask.set(dest.offset + i, defined); + } + Ok(()) + } + fn check_defined(&self, ptr: Pointer, size: usize) -> EvalResult<()> { let alloc = try!(self.get(ptr.alloc_id)); if !alloc.undef_mask.is_range_defined(ptr.offset, ptr.offset + size) { diff --git a/test/sums.rs b/test/sums.rs index c63d3e680450..67257050364c 100644 --- a/test/sums.rs +++ b/test/sums.rs @@ -47,3 +47,8 @@ fn match_opt_some() -> i8 { None => 20, } } + +#[miri_run] +fn two_nones() -> (Option, Option) { + (None, None) +}