From 57ce47b72814f8147d2cddb87628b17aa1084e74 Mon Sep 17 00:00:00 2001 From: carbotaniuman <41451839+carbotaniuman@users.noreply.github.com> Date: Wed, 15 Jun 2022 14:55:21 -0500 Subject: [PATCH 1/8] Handle wildcard pointers in SB --- src/machine.rs | 26 +- src/stacked_borrows.rs | 244 +++++++++++++----- src/stacked_borrows/diagnostics.rs | 22 +- .../permissive_provenance_transmute.rs | 2 +- tests/fail/provenance/ptr_int_unexposed.rs | 2 +- tests/pass/ptr_int_from_exposed.rs | 2 +- tests/pass/stacked-borrows/int-to-ptr.rs | 72 +++++- 7 files changed, 282 insertions(+), 88 deletions(-) diff --git a/src/machine.rs b/src/machine.rs index d14ddaa1a6bb..c2a7a34a9cc0 100644 --- a/src/machine.rs +++ b/src/machine.rs @@ -489,7 +489,7 @@ impl<'mir, 'tcx> Machine<'mir, 'tcx> for Evaluator<'mir, 'tcx> { type AllocExtra = AllocExtra; type PointerTag = Tag; - type TagExtra = SbTag; + type TagExtra = Option; type MemoryMap = MonoHashMap, Allocation)>; @@ -708,8 +708,24 @@ impl<'mir, 'tcx> Machine<'mir, 'tcx> for Evaluator<'mir, 'tcx> { ptr: Pointer, ) -> InterpResult<'tcx> { match ptr.provenance { - Tag::Concrete(concrete) => - intptrcast::GlobalStateInner::expose_addr(ecx, concrete.alloc_id), + Tag::Concrete(ConcreteTag { alloc_id, sb }) => { + intptrcast::GlobalStateInner::expose_addr(ecx, alloc_id); + + let (size, _) = + ecx.get_alloc_size_and_align(alloc_id, AllocCheck::MaybeDead).unwrap(); + + // Function pointers and dead objects don't have an alloc_extra so we ignore them. + if let Ok(alloc_extra) = ecx.get_alloc_extra(alloc_id) { + if let Some(stacked_borrows) = &alloc_extra.stacked_borrows { + stacked_borrows.ptr_exposed( + alloc_id, + sb, + alloc_range(Size::from_bytes(0), size), + ecx.machine.stacked_borrows.as_ref().unwrap(), + )?; + } + } + } Tag::Wildcard => { // No need to do anything for wildcard pointers as // their provenances have already been previously exposed. @@ -728,8 +744,8 @@ impl<'mir, 'tcx> Machine<'mir, 'tcx> for Evaluator<'mir, 'tcx> { rel.map(|(alloc_id, size)| { let sb = match ptr.provenance { - Tag::Concrete(ConcreteTag { sb, .. }) => sb, - Tag::Wildcard => SbTag::Untagged, + Tag::Concrete(ConcreteTag { sb, .. }) => Some(sb), + Tag::Wildcard => None, }; (alloc_id, size, sb) }) diff --git a/src/stacked_borrows.rs b/src/stacked_borrows.rs index 0c537e0d7a5c..ad7569008e20 100644 --- a/src/stacked_borrows.rs +++ b/src/stacked_borrows.rs @@ -104,6 +104,10 @@ pub struct Stack { /// * Above a `SharedReadOnly` there can only be more `SharedReadOnly`. /// * Except for `Untagged`, no tag occurs in the stack more than once. borrows: Vec, + /// If this is `Some(id)`, then the actual current stack is unknown. What we do know + /// is that `borrows` are at the top of the stack, and below it are arbitrarily many items + /// whose `tag` is either `Untagged` or strictly less than `id`. + unknown_bottom: Option, } /// Extra per-allocation state. @@ -113,6 +117,8 @@ pub struct Stacks { stacks: RefCell>, /// Stores past operations on this allocation history: RefCell, + /// The set of tags that have been exposed + exposed_tags: RefCell>, } /// Extra global state, available to the memory access hooks. @@ -283,32 +289,72 @@ impl Permission { impl<'tcx> Stack { /// Find the item granting the given kind of access to the given tag, and return where /// it is on the stack. - fn find_granting(&self, access: AccessKind, tag: SbTag) -> Option { - self.borrows + // TODO: Doc ok with Some(index) or None if unknown_bottom used + // Err if does not match + fn find_granting( + &self, + access: AccessKind, + tag: Option, + exposed_tags: &FxHashSet, + ) -> Result, ()> { + let res = self + .borrows .iter() .enumerate() // we also need to know *where* in the stack .rev() // search top-to-bottom // Return permission of first item that grants access. // We require a permission with the right tag, ensuring U3 and F3. - .find_map( - |(idx, item)| { - if tag == item.tag && item.perm.grants(access) { Some(idx) } else { None } + .find_map(|(idx, item)| { + match tag { + Some(tag) if tag == item.tag && item.perm.grants(access) => Some(idx), + None if exposed_tags.contains(&item.tag) => Some(idx), + _ => None, + } + }); + + if res.is_some() { + return Ok(res); + } + + match self.unknown_bottom { + Some(id) => + match tag { + Some(tag) => + match tag { + SbTag::Tagged(tag_id) if tag_id < id => Ok(None), + SbTag::Untagged => Ok(None), + _ => Err(()), + }, + None => Ok(None), }, - ) + None => Err(()), + } } /// Find the first write-incompatible item above the given one -- /// i.e, find the height to which the stack will be truncated when writing to `granting`. - fn find_first_write_incompatible(&self, granting: usize) -> usize { - let perm = self.borrows[granting].perm; + fn find_first_write_incompatible(&self, granting: Option) -> usize { + let perm = if let Some(idx) = granting { + self.borrows[idx].perm + } else { + // I assume this has to be it? + Permission::SharedReadWrite + }; + match perm { Permission::SharedReadOnly => bug!("Cannot use SharedReadOnly for writing"), Permission::Disabled => bug!("Cannot use Disabled for anything"), // On a write, everything above us is incompatible. - Permission::Unique => granting + 1, + Permission::Unique => + if let Some(idx) = granting { + idx + 1 + } else { + 0 + }, Permission::SharedReadWrite => { // The SharedReadWrite *just* above us are compatible, to skip those. - let mut idx = granting + 1; + let mut idx = if let Some(idx) = granting { idx + 1 } else { 0 }; + while let Some(item) = self.borrows.get(idx) { if item.perm == Permission::SharedReadWrite { // Go on. @@ -380,58 +426,67 @@ impl<'tcx> Stack { fn access( &mut self, access: AccessKind, - tag: SbTag, + tag: Option, (alloc_id, alloc_range, offset): (AllocId, AllocRange, Size), // just for debug printing and error messages global: &mut GlobalStateInner, current_span: &mut CurrentSpan<'_, '_, 'tcx>, alloc_history: &mut AllocHistory, + exposed_tags: &FxHashSet, ) -> InterpResult<'tcx> { // Two main steps: Find granting item, remove incompatible items above. // Step 1: Find granting item. - let granting_idx = self.find_granting(access, tag).ok_or_else(|| { + let granting_idx = self.find_granting(access, tag, exposed_tags).map_err(|_| { alloc_history.access_error(access, tag, alloc_id, alloc_range, offset, self) })?; // Step 2: Remove incompatible items above them. Make sure we do not remove protected // items. Behavior differs for reads and writes. - if access == AccessKind::Write { - // Remove everything above the write-compatible items, like a proper stack. This makes sure read-only and unique - // pointers become invalid on write accesses (ensures F2a, and ensures U2 for write accesses). - let first_incompatible_idx = self.find_first_write_incompatible(granting_idx); - for item in self.borrows.drain(first_incompatible_idx..).rev() { - trace!("access: popping item {:?}", item); - Stack::check_protector( - &item, - Some((tag, alloc_range, offset, access)), - global, - alloc_history, - )?; - alloc_history.log_invalidation(item.tag, alloc_range, current_span); - } - } else { - // On a read, *disable* all `Unique` above the granting item. This ensures U2 for read accesses. - // The reason this is not following the stack discipline (by removing the first Unique and - // everything on top of it) is that in `let raw = &mut *x as *mut _; let _val = *x;`, the second statement - // would pop the `Unique` from the reborrow of the first statement, and subsequently also pop the - // `SharedReadWrite` for `raw`. - // This pattern occurs a lot in the standard library: create a raw pointer, then also create a shared - // reference and use that. - // We *disable* instead of removing `Unique` to avoid "connecting" two neighbouring blocks of SRWs. - for idx in ((granting_idx + 1)..self.borrows.len()).rev() { - let item = &mut self.borrows[idx]; - if item.perm == Permission::Unique { - trace!("access: disabling item {:?}", item); + if let Some(tag) = tag { + if access == AccessKind::Write { + // Remove everything above the write-compatible items, like a proper stack. This makes sure read-only and unique + // pointers become invalid on write accesses (ensures F2a, and ensures U2 for write accesses). + let first_incompatible_idx = self.find_first_write_incompatible(granting_idx); + for item in self.borrows.drain(first_incompatible_idx..).rev() { + trace!("access: popping item {:?}", item); Stack::check_protector( - item, + &item, Some((tag, alloc_range, offset, access)), global, alloc_history, )?; - item.perm = Permission::Disabled; alloc_history.log_invalidation(item.tag, alloc_range, current_span); } + } else { + let start_idx = if let Some(idx) = granting_idx { idx + 1 } else { 0 }; + // On a read, *disable* all `Unique` above the granting item. This ensures U2 for read accesses. + // The reason this is not following the stack discipline (by removing the first Unique and + // everything on top of it) is that in `let raw = &mut *x as *mut _; let _val = *x;`, the second statement + // would pop the `Unique` from the reborrow of the first statement, and subsequently also pop the + // `SharedReadWrite` for `raw`. + // This pattern occurs a lot in the standard library: create a raw pointer, then also create a shared + // reference and use that. + // We *disable* instead of removing `Unique` to avoid "connecting" two neighbouring blocks of SRWs. + for idx in (start_idx..self.borrows.len()).rev() { + let item = &mut self.borrows[idx]; + + if item.perm == Permission::Unique { + trace!("access: disabling item {:?}", item); + Stack::check_protector( + item, + Some((tag, alloc_range, offset, access)), + global, + alloc_history, + )?; + item.perm = Permission::Disabled; + alloc_history.log_invalidation(item.tag, alloc_range, current_span); + } + } } + } else { + self.borrows.clear(); + // TODO + // self.borrows.push(ItemOrUnknown::Unknown(global.next_ptr_id)); } // Done. @@ -442,19 +497,20 @@ impl<'tcx> Stack { /// active protectors at all because we will remove all items. fn dealloc( &mut self, - tag: SbTag, + tag: Option, (alloc_id, alloc_range, offset): (AllocId, AllocRange, Size), // just for debug printing and error messages global: &GlobalStateInner, alloc_history: &mut AllocHistory, + exposed_tags: &FxHashSet, ) -> InterpResult<'tcx> { // Step 1: Find granting item. - self.find_granting(AccessKind::Write, tag).ok_or_else(|| { + self.find_granting(AccessKind::Write, tag, exposed_tags).map_err(|_| { err_sb_ub(format!( "no item granting write access for deallocation to tag {:?} at {:?} found in borrow stack", tag, alloc_id, ), None, - alloc_history.get_logs_relevant_to(tag, alloc_range, offset, None), + tag.and_then(|tag| alloc_history.get_logs_relevant_to(tag, alloc_range, offset, None)), ) })?; @@ -462,7 +518,6 @@ impl<'tcx> Stack { for item in self.borrows.drain(..).rev() { Stack::check_protector(&item, None, global, alloc_history)?; } - Ok(()) } @@ -474,21 +529,17 @@ impl<'tcx> Stack { /// `range` that we are currently checking. fn grant( &mut self, - derived_from: SbTag, + derived_from: Option, new: Item, (alloc_id, alloc_range, offset): (AllocId, AllocRange, Size), // just for debug printing and error messages global: &mut GlobalStateInner, current_span: &mut CurrentSpan<'_, '_, 'tcx>, alloc_history: &mut AllocHistory, + exposed_tags: &FxHashSet, ) -> InterpResult<'tcx> { // Figure out which access `perm` corresponds to. let access = if new.perm.grants(AccessKind::Write) { AccessKind::Write } else { AccessKind::Read }; - // Now we figure out which item grants our parent (`derived_from`) this kind of access. - // We use that to determine where to put the new item. - let granting_idx = self.find_granting(access, derived_from).ok_or_else(|| { - alloc_history.grant_error(derived_from, new, alloc_id, alloc_range, offset, self) - })?; // Compute where to put the new item. // Either way, we ensure that we insert the new item in a way such that between @@ -498,6 +549,21 @@ impl<'tcx> Stack { access == AccessKind::Write, "this case only makes sense for stack-like accesses" ); + + // Now we figure out which item grants our parent (`derived_from`) this kind of access. + // We use that to determine where to put the new item. + let granting_idx = + self.find_granting(access, derived_from, exposed_tags).map_err(|_| { + alloc_history.grant_error( + derived_from, + new, + alloc_id, + alloc_range, + offset, + self, + ) + })?; + // SharedReadWrite can coexist with "existing loans", meaning they don't act like a write // access. Instead of popping the stack, we insert the item at the place the stack would // be popped to (i.e., we insert it above all the write-compatible items). @@ -514,6 +580,7 @@ impl<'tcx> Stack { global, current_span, alloc_history, + exposed_tags, )?; // We insert "as far up as possible": We know only compatible items are remaining @@ -522,7 +589,6 @@ impl<'tcx> Stack { // This ensures U1 and F1. self.borrows.len() }; - // Put the new item there. As an optimization, deduplicate if it is equal to one of its new neighbors. if self.borrows[new_idx - 1] == new || self.borrows.get(new_idx) == Some(&new) { // Optimization applies, done. @@ -531,7 +597,6 @@ impl<'tcx> Stack { trace!("reborrow: adding item {:?}", new); self.borrows.insert(new_idx, new); } - Ok(()) } } @@ -542,11 +607,12 @@ impl<'tcx> Stacks { /// Creates new stack with initial tag. fn new(size: Size, perm: Permission, tag: SbTag) -> Self { let item = Item { perm, tag, protector: None }; - let stack = Stack { borrows: vec![item] }; + let stack = Stack { borrows: vec![item], unknown_bottom: None }; Stacks { stacks: RefCell::new(RangeMap::new(size, stack)), history: RefCell::new(AllocHistory::new()), + exposed_tags: RefCell::new(FxHashSet::default()), } } @@ -554,12 +620,18 @@ impl<'tcx> Stacks { fn for_each( &self, range: AllocRange, - mut f: impl FnMut(Size, &mut Stack, &mut AllocHistory) -> InterpResult<'tcx>, + mut f: impl FnMut( + Size, + &mut Stack, + &mut AllocHistory, + &mut FxHashSet, + ) -> InterpResult<'tcx>, ) -> InterpResult<'tcx> { let mut stacks = self.stacks.borrow_mut(); let history = &mut *self.history.borrow_mut(); + let exposed_tags = &mut *self.exposed_tags.borrow_mut(); for (offset, stack) in stacks.iter_mut(range.start, range.size) { - f(offset, stack, history)?; + f(offset, stack, history, exposed_tags)?; } Ok(()) } @@ -568,12 +640,18 @@ impl<'tcx> Stacks { fn for_each_mut( &mut self, range: AllocRange, - mut f: impl FnMut(Size, &mut Stack, &mut AllocHistory) -> InterpResult<'tcx>, + mut f: impl FnMut( + Size, + &mut Stack, + &mut AllocHistory, + &mut FxHashSet, + ) -> InterpResult<'tcx>, ) -> InterpResult<'tcx> { let stacks = self.stacks.get_mut(); let history = &mut *self.history.borrow_mut(); + let exposed_tags = self.exposed_tags.get_mut(); for (offset, stack) in stacks.iter_mut(range.start, range.size) { - f(offset, stack, history)?; + f(offset, stack, history, exposed_tags)?; } Ok(()) } @@ -631,11 +709,31 @@ impl Stacks { } #[inline(always)] - pub fn memory_read<'tcx>( + pub fn ptr_exposed<'tcx>( &self, alloc_id: AllocId, tag: SbTag, range: AllocRange, + _state: &GlobalState, + ) -> InterpResult<'tcx> { + trace!( + "allocation exposed with tag {:?}: {:?}, size {}", + tag, + Pointer::new(alloc_id, range.start), + range.size.bytes() + ); + + self.exposed_tags.borrow_mut().insert(tag); + + Ok(()) + } + + #[inline(always)] + pub fn memory_read<'tcx>( + &self, + alloc_id: AllocId, + tag: Option, + range: AllocRange, state: &GlobalState, mut current_span: CurrentSpan<'_, '_, 'tcx>, ) -> InterpResult<'tcx> { @@ -646,7 +744,7 @@ impl Stacks { range.size.bytes() ); let mut state = state.borrow_mut(); - self.for_each(range, |offset, stack, history| { + self.for_each(range, |offset, stack, history, exposed_tags| { stack.access( AccessKind::Read, tag, @@ -654,6 +752,7 @@ impl Stacks { &mut state, &mut current_span, history, + exposed_tags, ) }) } @@ -662,7 +761,7 @@ impl Stacks { pub fn memory_written<'tcx>( &mut self, alloc_id: AllocId, - tag: SbTag, + tag: Option, range: AllocRange, state: &GlobalState, mut current_span: CurrentSpan<'_, '_, 'tcx>, @@ -674,7 +773,7 @@ impl Stacks { range.size.bytes() ); let mut state = state.borrow_mut(); - self.for_each_mut(range, |offset, stack, history| { + self.for_each_mut(range, |offset, stack, history, exposed_tags| { stack.access( AccessKind::Write, tag, @@ -682,6 +781,7 @@ impl Stacks { &mut state, &mut current_span, history, + exposed_tags, ) }) } @@ -690,14 +790,14 @@ impl Stacks { pub fn memory_deallocated<'tcx>( &mut self, alloc_id: AllocId, - tag: SbTag, + tag: Option, range: AllocRange, state: &GlobalState, ) -> InterpResult<'tcx> { trace!("deallocation with tag {:?}: {:?}, size {}", tag, alloc_id, range.size.bytes()); let state = state.borrow(); - self.for_each_mut(range, |offset, stack, history| { - stack.dealloc(tag, (alloc_id, range, offset), &state, history) + self.for_each_mut(range, |offset, stack, history, exposed_tags| { + stack.dealloc(tag, (alloc_id, range, offset), &state, history, exposed_tags) })?; Ok(()) } @@ -749,7 +849,9 @@ trait EvalContextPrivExt<'mir, 'tcx: 'mir>: crate::MiriEvalContextExt<'mir, 'tcx // Dangling slices are a common case here; it's valid to get their length but with raw // pointer tagging for example all calls to get_unchecked on them are invalid. if let Ok((alloc_id, base_offset, orig_tag)) = this.ptr_try_get_alloc_id(place.ptr) { - log_creation(this, current_span, alloc_id, base_offset, orig_tag)?; + if let Some(orig_tag) = orig_tag { + log_creation(this, current_span, alloc_id, base_offset, orig_tag)?; + } } trace!( @@ -762,7 +864,9 @@ trait EvalContextPrivExt<'mir, 'tcx: 'mir>: crate::MiriEvalContextExt<'mir, 'tcx return Ok(()); } let (alloc_id, base_offset, orig_tag) = this.ptr_get_alloc_id(place.ptr)?; - log_creation(this, current_span, alloc_id, base_offset, orig_tag)?; + if let Some(orig_tag) = orig_tag { + log_creation(this, current_span, alloc_id, base_offset, orig_tag)?; + } // Ensure we bail out if the pointer goes out-of-bounds (see miri#1050). let (alloc_size, _) = @@ -830,7 +934,7 @@ trait EvalContextPrivExt<'mir, 'tcx: 'mir>: crate::MiriEvalContextExt<'mir, 'tcx }; let item = Item { perm, tag: new_tag, protector }; let mut global = this.machine.stacked_borrows.as_ref().unwrap().borrow_mut(); - stacked_borrows.for_each(range, |offset, stack, history| { + stacked_borrows.for_each(range, |offset, stack, history, exposed_tags| { stack.grant( orig_tag, item, @@ -838,6 +942,7 @@ trait EvalContextPrivExt<'mir, 'tcx: 'mir>: crate::MiriEvalContextExt<'mir, 'tcx &mut *global, current_span, history, + exposed_tags, ) }) })?; @@ -854,7 +959,7 @@ trait EvalContextPrivExt<'mir, 'tcx: 'mir>: crate::MiriEvalContextExt<'mir, 'tcx let range = alloc_range(base_offset, size); let mut global = machine.stacked_borrows.as_ref().unwrap().borrow_mut(); let current_span = &mut machine.current_span(); // `get_alloc_extra_mut` invalidated our old `current_span` - stacked_borrows.for_each_mut(range, |offset, stack, history| { + stacked_borrows.for_each_mut(range, |offset, stack, history, exposed_tags| { stack.grant( orig_tag, item, @@ -862,6 +967,7 @@ trait EvalContextPrivExt<'mir, 'tcx: 'mir>: crate::MiriEvalContextExt<'mir, 'tcx &mut global, current_span, history, + exposed_tags, ) })?; diff --git a/src/stacked_borrows/diagnostics.rs b/src/stacked_borrows/diagnostics.rs index 5400e9abe503..224954c76159 100644 --- a/src/stacked_borrows/diagnostics.rs +++ b/src/stacked_borrows/diagnostics.rs @@ -197,7 +197,7 @@ impl AllocHistory { /// Report a descriptive error when `new` could not be granted from `derived_from`. pub fn grant_error<'tcx>( &self, - derived_from: SbTag, + derived_from: Option, new: Item, alloc_id: AllocId, alloc_range: AllocRange, @@ -214,7 +214,9 @@ impl AllocHistory { err_sb_ub( format!("{}{}", action, error_cause(stack, derived_from)), Some(operation_summary("a reborrow", alloc_id, alloc_range)), - self.get_logs_relevant_to(derived_from, alloc_range, error_offset, None), + derived_from.and_then(|derived_from| { + self.get_logs_relevant_to(derived_from, alloc_range, error_offset, None) + }), ) } @@ -222,7 +224,7 @@ impl AllocHistory { pub fn access_error<'tcx>( &self, access: AccessKind, - tag: SbTag, + tag: Option, alloc_id: AllocId, alloc_range: AllocRange, error_offset: Size, @@ -238,7 +240,7 @@ impl AllocHistory { err_sb_ub( format!("{}{}", action, error_cause(stack, tag)), Some(operation_summary("an access", alloc_id, alloc_range)), - self.get_logs_relevant_to(tag, alloc_range, error_offset, None), + tag.and_then(|tag| self.get_logs_relevant_to(tag, alloc_range, error_offset, None)), ) } } @@ -251,10 +253,14 @@ fn operation_summary( format!("this error occurs as part of {} at {:?}{}", operation, alloc_id, HexRange(alloc_range)) } -fn error_cause(stack: &Stack, tag: SbTag) -> &'static str { - if stack.borrows.iter().any(|item| item.tag == tag && item.perm != Permission::Disabled) { - ", but that tag only grants SharedReadOnly permission for this location" +fn error_cause(stack: &Stack, tag: Option) -> &'static str { + if let Some(tag) = tag { + if stack.borrows.iter().any(|item| item.tag == tag && item.perm != Permission::Disabled) { + ", but that tag only grants SharedReadOnly permission for this location" + } else { + ", but that tag does not exist in the borrow stack for this location" + } } else { - ", but that tag does not exist in the borrow stack for this location" + ", but no exposed tags are valid in the borrow stack for this location" } } diff --git a/tests/fail/provenance/permissive_provenance_transmute.rs b/tests/fail/provenance/permissive_provenance_transmute.rs index dbfc5732ed3b..28e6ba623080 100644 --- a/tests/fail/provenance/permissive_provenance_transmute.rs +++ b/tests/fail/provenance/permissive_provenance_transmute.rs @@ -1,4 +1,4 @@ -// compile-flags: -Zmiri-permissive-provenance -Zmiri-disable-stacked-borrows +// compile-flags: -Zmiri-permissive-provenance #![feature(strict_provenance)] use std::mem; diff --git a/tests/fail/provenance/ptr_int_unexposed.rs b/tests/fail/provenance/ptr_int_unexposed.rs index 310024c1efc7..ad29d38dc3f7 100644 --- a/tests/fail/provenance/ptr_int_unexposed.rs +++ b/tests/fail/provenance/ptr_int_unexposed.rs @@ -1,4 +1,4 @@ -// compile-flags: -Zmiri-permissive-provenance -Zmiri-disable-stacked-borrows +// compile-flags: -Zmiri-permissive-provenance #![feature(strict_provenance)] fn main() { diff --git a/tests/pass/ptr_int_from_exposed.rs b/tests/pass/ptr_int_from_exposed.rs index e025cf921313..dc9cb393b781 100644 --- a/tests/pass/ptr_int_from_exposed.rs +++ b/tests/pass/ptr_int_from_exposed.rs @@ -1,4 +1,4 @@ -// compile-flags: -Zmiri-permissive-provenance -Zmiri-disable-stacked-borrows +// compile-flags: -Zmiri-permissive-provenance #![feature(strict_provenance)] use std::ptr; diff --git a/tests/pass/stacked-borrows/int-to-ptr.rs b/tests/pass/stacked-borrows/int-to-ptr.rs index efba0da1b935..dc581d8af618 100644 --- a/tests/pass/stacked-borrows/int-to-ptr.rs +++ b/tests/pass/stacked-borrows/int-to-ptr.rs @@ -1,6 +1,6 @@ -fn main() { - ref_raw_int_raw(); -} +// compile-flags: -Zmiri-permissive-provenance +#![feature(strict_provenance)] +use std::ptr; // Just to make sure that casting a ref to raw, to int and back to raw // and only then using it works. This rules out ideas like "do escape-to-raw lazily"; @@ -11,3 +11,69 @@ fn ref_raw_int_raw() { let xraw = xref as *mut i32 as usize as *mut i32; assert_eq!(unsafe { *xraw }, 3); } + +/// Ensure that we do not just pick the topmost possible item on int2ptr casts. +fn example(variant: bool) { unsafe { + fn not_so_innocent(x: &mut u32) -> usize { + let x_raw4 = x as *mut u32; + x_raw4.expose_addr() + } + + let mut c = 42u32; + + let x_unique1 = &mut c; + // [..., Unique(1)] + + let x_raw2 = x_unique1 as *mut u32; + let x_raw2_addr = x_raw2.expose_addr(); + // [..., Unique(1), SharedRW(2)] + + let x_unique3 = &mut *x_raw2; + // [.., Unique(1), SharedRW(2), Unique(3)] + + assert_eq!(not_so_innocent(x_unique3), x_raw2_addr); + // [.., Unique(1), SharedRW(2), Unique(3), ..., SharedRW(4)] + + // Do an int2ptr cast. This can pick tag 2 or 4 (the two previously exposed tags). + // 4 is the "obvious" choice (topmost tag, what we used to do with untagged pointers). + // And indeed if `variant == true` it is the only possible choice. + // But if `variant == false` then 2 is the only possible choice! + let x_wildcard = ptr::from_exposed_addr_mut::(x_raw2_addr); + + if variant { + // If we picked 2, this will invalidate 3. + *x_wildcard = 10; + // Now we use 3. Only possible if above we picked 4. + *x_unique3 = 12; + } else { + // This invalidates tag 4. + *x_unique3 = 10; + // Now try to write with the "guessed" tag; it must be 2. + *x_wildcard = 12; + } +} } + +fn test() { unsafe { + let root = &mut 42; + let root_raw = root as *mut i32; + let addr1 = root_raw as usize; + let child = &mut *root_raw; + let child_raw = child as *mut i32; + let addr2 = child_raw as usize; + assert_eq!(addr1, addr2); + // First use child. + *(addr2 as *mut i32) -= 2; // picks child_raw + *child -= 2; + // Then use root. + *(addr1 as *mut i32) += 2; // picks root_raw + *root += 2; + // Value should be unchanged. + assert_eq!(*root, 42); +} } + +fn main() { + ref_raw_int_raw(); + example(false); + example(true); + test(); +} From c7feb014b0b73d2e06ccfd3b754171d3fec7eeda Mon Sep 17 00:00:00 2001 From: carbotaniuman <41451839+carbotaniuman@users.noreply.github.com> Date: Wed, 15 Jun 2022 20:53:44 -0500 Subject: [PATCH 2/8] Maybe this wil work --- src/stacked_borrows.rs | 49 ++++++++++++++++-------------- src/stacked_borrows/diagnostics.rs | 19 ++++++++++-- 2 files changed, 44 insertions(+), 24 deletions(-) diff --git a/src/stacked_borrows.rs b/src/stacked_borrows.rs index ad7569008e20..d28c6425cd42 100644 --- a/src/stacked_borrows.rs +++ b/src/stacked_borrows.rs @@ -485,8 +485,7 @@ impl<'tcx> Stack { } } else { self.borrows.clear(); - // TODO - // self.borrows.push(ItemOrUnknown::Unknown(global.next_ptr_id)); + self.unknown_bottom = Some(global.next_ptr_id); } // Done. @@ -541,6 +540,20 @@ impl<'tcx> Stack { let access = if new.perm.grants(AccessKind::Write) { AccessKind::Write } else { AccessKind::Read }; + // Now we figure out which item grants our parent (`derived_from`) this kind of access. + // We use that to determine where to put the new item. + let granting_idx = + self.find_granting(access, derived_from, exposed_tags).map_err(|_| { + alloc_history.grant_error( + derived_from, + new, + alloc_id, + alloc_range, + offset, + self, + ) + })?; + // Compute where to put the new item. // Either way, we ensure that we insert the new item in a way such that between // `derived_from` and the new one, there are only items *compatible with* `derived_from`. @@ -550,25 +563,17 @@ impl<'tcx> Stack { "this case only makes sense for stack-like accesses" ); - // Now we figure out which item grants our parent (`derived_from`) this kind of access. - // We use that to determine where to put the new item. - let granting_idx = - self.find_granting(access, derived_from, exposed_tags).map_err(|_| { - alloc_history.grant_error( - derived_from, - new, - alloc_id, - alloc_range, - offset, - self, - ) - })?; - - // SharedReadWrite can coexist with "existing loans", meaning they don't act like a write - // access. Instead of popping the stack, we insert the item at the place the stack would - // be popped to (i.e., we insert it above all the write-compatible items). - // This ensures F2b by adding the new item below any potentially existing `SharedReadOnly`. - self.find_first_write_incompatible(granting_idx) + if derived_from.is_some() { + // SharedReadWrite can coexist with "existing loans", meaning they don't act like a write + // access. Instead of popping the stack, we insert the item at the place the stack would + // be popped to (i.e., we insert it above all the write-compatible items). + // This ensures F2b by adding the new item below any potentially existing `SharedReadOnly`. + self.find_first_write_incompatible(granting_idx) + } else { + // TODO: is this correct + self.borrows.clear(); + 0 + } } else { // A "safe" reborrow for a pointer that actually expects some aliasing guarantees. // Here, creating a reference actually counts as an access. @@ -590,7 +595,7 @@ impl<'tcx> Stack { self.borrows.len() }; // Put the new item there. As an optimization, deduplicate if it is equal to one of its new neighbors. - if self.borrows[new_idx - 1] == new || self.borrows.get(new_idx) == Some(&new) { + if self.borrows.get(new_idx) == Some(&new) || new_idx > 0 && self.borrows.get(new_idx - 1) == Some(&new) { // Optimization applies, done. trace!("reborrow: avoiding adding redundant item {:?}", new); } else { diff --git a/src/stacked_borrows/diagnostics.rs b/src/stacked_borrows/diagnostics.rs index 224954c76159..6a22d9a74392 100644 --- a/src/stacked_borrows/diagnostics.rs +++ b/src/stacked_borrows/diagnostics.rs @@ -4,6 +4,8 @@ use rustc_middle::mir::interpret::{AllocId, AllocRange}; use rustc_span::{Span, SpanData}; use rustc_target::abi::Size; +use core::fmt::Debug; + use crate::helpers::{CurrentSpan, HexRange}; use crate::stacked_borrows::{err_sb_ub, AccessKind, Permission}; use crate::Item; @@ -204,9 +206,16 @@ impl AllocHistory { error_offset: Size, stack: &Stack, ) -> InterpError<'tcx> { + // TODO: Fix this properly + let z = &derived_from; + let f = if let Some(ref t) = z { + t as &dyn Debug + } else { + &"" as &dyn Debug + }; let action = format!( "trying to reborrow {:?} for {:?} permission at {}[{:#x}]", - derived_from, + f, new.perm, alloc_id, error_offset.bytes(), @@ -230,10 +239,16 @@ impl AllocHistory { error_offset: Size, stack: &Stack, ) -> InterpError<'tcx> { + let z = &tag; + let f = if let Some(ref t) = z { + t as &dyn Debug + } else { + &"" as &dyn Debug + }; let action = format!( "attempting a {} using {:?} at {}[{:#x}]", access, - tag, + f, alloc_id, error_offset.bytes(), ); From d1e7de117c84ccfe611a14791dbb53c5e1520b50 Mon Sep 17 00:00:00 2001 From: carbotaniuman <41451839+carbotaniuman@users.noreply.github.com> Date: Wed, 15 Jun 2022 20:55:54 -0500 Subject: [PATCH 3/8] Try fix stuff --- src/stacked_borrows.rs | 17 ++++++----------- src/stacked_borrows/diagnostics.rs | 12 ++---------- 2 files changed, 8 insertions(+), 21 deletions(-) diff --git a/src/stacked_borrows.rs b/src/stacked_borrows.rs index d28c6425cd42..75c2ff265879 100644 --- a/src/stacked_borrows.rs +++ b/src/stacked_borrows.rs @@ -543,16 +543,9 @@ impl<'tcx> Stack { // Now we figure out which item grants our parent (`derived_from`) this kind of access. // We use that to determine where to put the new item. let granting_idx = - self.find_granting(access, derived_from, exposed_tags).map_err(|_| { - alloc_history.grant_error( - derived_from, - new, - alloc_id, - alloc_range, - offset, - self, - ) - })?; + self.find_granting(access, derived_from, exposed_tags).map_err(|_| { + alloc_history.grant_error(derived_from, new, alloc_id, alloc_range, offset, self) + })?; // Compute where to put the new item. // Either way, we ensure that we insert the new item in a way such that between @@ -595,7 +588,9 @@ impl<'tcx> Stack { self.borrows.len() }; // Put the new item there. As an optimization, deduplicate if it is equal to one of its new neighbors. - if self.borrows.get(new_idx) == Some(&new) || new_idx > 0 && self.borrows.get(new_idx - 1) == Some(&new) { + if self.borrows.get(new_idx) == Some(&new) + || new_idx > 0 && self.borrows.get(new_idx - 1) == Some(&new) + { // Optimization applies, done. trace!("reborrow: avoiding adding redundant item {:?}", new); } else { diff --git a/src/stacked_borrows/diagnostics.rs b/src/stacked_borrows/diagnostics.rs index 6a22d9a74392..91dfe22c1964 100644 --- a/src/stacked_borrows/diagnostics.rs +++ b/src/stacked_borrows/diagnostics.rs @@ -208,11 +208,7 @@ impl AllocHistory { ) -> InterpError<'tcx> { // TODO: Fix this properly let z = &derived_from; - let f = if let Some(ref t) = z { - t as &dyn Debug - } else { - &"" as &dyn Debug - }; + let f = if let Some(ref t) = z { t as &dyn Debug } else { &"" as &dyn Debug }; let action = format!( "trying to reborrow {:?} for {:?} permission at {}[{:#x}]", f, @@ -240,11 +236,7 @@ impl AllocHistory { stack: &Stack, ) -> InterpError<'tcx> { let z = &tag; - let f = if let Some(ref t) = z { - t as &dyn Debug - } else { - &"" as &dyn Debug - }; + let f = if let Some(ref t) = z { t as &dyn Debug } else { &"" as &dyn Debug }; let action = format!( "attempting a {} using {:?} at {}[{:#x}]", access, From 2deb9e5dae5b6d6b66105fda92e2d30e48a0cfab Mon Sep 17 00:00:00 2001 From: Ralf Jung Date: Fri, 24 Jun 2022 16:21:47 -0400 Subject: [PATCH 4/8] add exposed_only_ro test --- tests/fail/stacked_borrows/exposed_only_ro.rs | 12 ++++++++++++ 1 file changed, 12 insertions(+) create mode 100644 tests/fail/stacked_borrows/exposed_only_ro.rs diff --git a/tests/fail/stacked_borrows/exposed_only_ro.rs b/tests/fail/stacked_borrows/exposed_only_ro.rs new file mode 100644 index 000000000000..b3adb0b855f6 --- /dev/null +++ b/tests/fail/stacked_borrows/exposed_only_ro.rs @@ -0,0 +1,12 @@ +// compile-flags: -Zmiri-permissive-provenance +#![feature(strict_provenance)] + +// If we have only exposed read-only pointers, doing a write through a wildcard ptr should fail. + +fn main() { + let mut x = 0; + let _fool = &mut x as *mut i32; // this would have fooled the old untagged pointer logic + let addr = (&x as *const i32).expose_addr(); + let ptr = std::ptr::from_exposed_addr_mut::(addr); + unsafe { ptr.write(0) }; //~ ERROR: borrow stack +} From c0f7118342ccf77330c92d50222f90856730254b Mon Sep 17 00:00:00 2001 From: Ralf Jung Date: Fri, 24 Jun 2022 16:45:22 -0400 Subject: [PATCH 5/8] reorganize exposure code a bit --- src/intptrcast.rs | 7 +++++-- src/machine.rs | 18 ++---------------- src/stacked_borrows.rs | 35 ++++++++++++++--------------------- 3 files changed, 21 insertions(+), 39 deletions(-) diff --git a/src/intptrcast.rs b/src/intptrcast.rs index 4a86490ed09a..279bf3d01d2e 100644 --- a/src/intptrcast.rs +++ b/src/intptrcast.rs @@ -101,14 +101,17 @@ impl<'mir, 'tcx> GlobalStateInner { } } - pub fn expose_addr(ecx: &MiriEvalContext<'mir, 'tcx>, alloc_id: AllocId) { + pub fn expose_ptr(ecx: &mut MiriEvalContext<'mir, 'tcx>, alloc_id: AllocId, sb: SbTag) { trace!("Exposing allocation id {:?}", alloc_id); - let mut global_state = ecx.machine.intptrcast.borrow_mut(); + let global_state = ecx.machine.intptrcast.get_mut(); // In legacy and strict mode, we don't need this, so we can save some cycles // by not tracking it. if global_state.provenance_mode == ProvenanceMode::Permissive { global_state.exposed.insert(alloc_id); + if ecx.machine.stacked_borrows.is_some() { + ecx.expose_tag(alloc_id, sb); + } } } diff --git a/src/machine.rs b/src/machine.rs index c2a7a34a9cc0..3704a5385141 100644 --- a/src/machine.rs +++ b/src/machine.rs @@ -489,6 +489,7 @@ impl<'mir, 'tcx> Machine<'mir, 'tcx> for Evaluator<'mir, 'tcx> { type AllocExtra = AllocExtra; type PointerTag = Tag; + // `None` represents a wildcard pointer. type TagExtra = Option; type MemoryMap = @@ -709,22 +710,7 @@ impl<'mir, 'tcx> Machine<'mir, 'tcx> for Evaluator<'mir, 'tcx> { ) -> InterpResult<'tcx> { match ptr.provenance { Tag::Concrete(ConcreteTag { alloc_id, sb }) => { - intptrcast::GlobalStateInner::expose_addr(ecx, alloc_id); - - let (size, _) = - ecx.get_alloc_size_and_align(alloc_id, AllocCheck::MaybeDead).unwrap(); - - // Function pointers and dead objects don't have an alloc_extra so we ignore them. - if let Ok(alloc_extra) = ecx.get_alloc_extra(alloc_id) { - if let Some(stacked_borrows) = &alloc_extra.stacked_borrows { - stacked_borrows.ptr_exposed( - alloc_id, - sb, - alloc_range(Size::from_bytes(0), size), - ecx.machine.stacked_borrows.as_ref().unwrap(), - )?; - } - } + intptrcast::GlobalStateInner::expose_ptr(ecx, alloc_id, sb); } Tag::Wildcard => { // No need to do anything for wildcard pointers as diff --git a/src/stacked_borrows.rs b/src/stacked_borrows.rs index 75c2ff265879..b66864b8302a 100644 --- a/src/stacked_borrows.rs +++ b/src/stacked_borrows.rs @@ -117,7 +117,7 @@ pub struct Stacks { stacks: RefCell>, /// Stores past operations on this allocation history: RefCell, - /// The set of tags that have been exposed + /// The set of tags that have been exposed inside this allocation. exposed_tags: RefCell>, } @@ -708,26 +708,6 @@ impl Stacks { stacks } - #[inline(always)] - pub fn ptr_exposed<'tcx>( - &self, - alloc_id: AllocId, - tag: SbTag, - range: AllocRange, - _state: &GlobalState, - ) -> InterpResult<'tcx> { - trace!( - "allocation exposed with tag {:?}: {:?}, size {}", - tag, - Pointer::new(alloc_id, range.start), - range.size.bytes() - ); - - self.exposed_tags.borrow_mut().insert(tag); - - Ok(()) - } - #[inline(always)] pub fn memory_read<'tcx>( &self, @@ -1096,4 +1076,17 @@ pub trait EvalContextExt<'mir, 'tcx: 'mir>: crate::MiriEvalContextExt<'mir, 'tcx Ok(()) } + + /// Mark the given tag as exposed. It was found on a pointer with the given AllocId. + fn expose_tag(&mut self, alloc_id: AllocId, tag: SbTag) { + let this = self.eval_context_mut(); + + // Function pointers and dead objects don't have an alloc_extra so we ignore them. + // This is okay because accessing them is UB anyway, no need for any Stacked Borrows checks. + // FIXME: this catches `InterpError`, which we should not usually do. + // We might need a proper fallible API from `memory.rs` to avoid this though. + if let Ok((alloc_extra, _)) = this.get_alloc_extra_mut(alloc_id) { + alloc_extra.stacked_borrows.as_mut().unwrap().exposed_tags.get_mut().insert(tag); + } + } } From 8d6fdaa024138de464a74ebd5307c706c68a3eef Mon Sep 17 00:00:00 2001 From: Ralf Jung Date: Fri, 24 Jun 2022 18:02:25 -0400 Subject: [PATCH 6/8] make the tests pass (and some formatting) --- README.md | 4 +- src/intptrcast.rs | 3 +- src/lib.rs | 1 + src/stacked_borrows.rs | 223 ++++++++++-------- tests/fail/stacked_borrows/exposed_only_ro.rs | 2 +- .../stacked_borrows/exposed_only_ro.stderr | 18 ++ tests/pass/stacked-borrows/int-to-ptr.rs | 112 ++++----- 7 files changed, 202 insertions(+), 161 deletions(-) create mode 100644 tests/fail/stacked_borrows/exposed_only_ro.stderr diff --git a/README.md b/README.md index 88ac8a135304..f6b2413c65f5 100644 --- a/README.md +++ b/README.md @@ -358,9 +358,7 @@ to Miri failing to detect cases of undefined behavior in a program. for pointer-to-int and int-to-pointer casts, respectively. This will necessarily miss some bugs as those semantics are not efficiently implementable in a sanitizer, but it will only miss bugs that concerns - memory/pointers which is subject to these operations. Also note that this flag - is currently incompatible with Stacked Borrows, so you will have to also pass - `-Zmiri-disable-stacked-borrows` to use this. + memory/pointers which is subject to these operations. * `-Zmiri-symbolic-alignment-check` makes the alignment check more strict. By default, alignment is checked by casting the pointer to an integer, and making sure that is a multiple of the alignment. This can lead to cases where a diff --git a/src/intptrcast.rs b/src/intptrcast.rs index 279bf3d01d2e..9ec96b8f491e 100644 --- a/src/intptrcast.rs +++ b/src/intptrcast.rs @@ -102,12 +102,11 @@ impl<'mir, 'tcx> GlobalStateInner { } pub fn expose_ptr(ecx: &mut MiriEvalContext<'mir, 'tcx>, alloc_id: AllocId, sb: SbTag) { - trace!("Exposing allocation id {:?}", alloc_id); - let global_state = ecx.machine.intptrcast.get_mut(); // In legacy and strict mode, we don't need this, so we can save some cycles // by not tracking it. if global_state.provenance_mode == ProvenanceMode::Permissive { + trace!("Exposing allocation id {alloc_id:?}"); global_state.exposed.insert(alloc_id); if ecx.machine.stacked_borrows.is_some() { ecx.expose_tag(alloc_id, sb); diff --git a/src/lib.rs b/src/lib.rs index dc6c0b8dc603..432f469733cb 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -6,6 +6,7 @@ #![feature(let_else)] #![feature(io_error_more)] #![feature(yeet_expr)] +#![feature(is_some_with)] #![warn(rust_2018_idioms)] #![allow( clippy::collapsible_else_if, diff --git a/src/stacked_borrows.rs b/src/stacked_borrows.rs index b66864b8302a..8411d85f0e01 100644 --- a/src/stacked_borrows.rs +++ b/src/stacked_borrows.rs @@ -104,9 +104,10 @@ pub struct Stack { /// * Above a `SharedReadOnly` there can only be more `SharedReadOnly`. /// * Except for `Untagged`, no tag occurs in the stack more than once. borrows: Vec, - /// If this is `Some(id)`, then the actual current stack is unknown. What we do know - /// is that `borrows` are at the top of the stack, and below it are arbitrarily many items - /// whose `tag` is either `Untagged` or strictly less than `id`. + /// If this is `Some(id)`, then the actual current stack is unknown. THis can happen when + /// wildcard pointers are used to access this location. What we do know is that `borrows` are at + /// the top of the stack, and below it are arbitrarily many items whose `tag` is either + /// `Untagged` or strictly less than `id`. unknown_bottom: Option, } @@ -289,72 +290,72 @@ impl Permission { impl<'tcx> Stack { /// Find the item granting the given kind of access to the given tag, and return where /// it is on the stack. - // TODO: Doc ok with Some(index) or None if unknown_bottom used - // Err if does not match + /// `Ok(None)` indicates it matched the "unknown" part of the stack, or it was a wildcard tag + /// and we have no clue what exactly it matched (but it could have matched something) + /// `Err` indicates it was not found. fn find_granting( &self, access: AccessKind, tag: Option, exposed_tags: &FxHashSet, ) -> Result, ()> { - let res = self - .borrows - .iter() - .enumerate() // we also need to know *where* in the stack - .rev() // search top-to-bottom - // Return permission of first item that grants access. - // We require a permission with the right tag, ensuring U3 and F3. - .find_map(|(idx, item)| { - match tag { - Some(tag) if tag == item.tag && item.perm.grants(access) => Some(idx), - None if exposed_tags.contains(&item.tag) => Some(idx), - _ => None, - } - }); + let Some(tag) = tag else { + // Handle the wildcard case. + // Go search the stack for an exposed tag. + let maybe_in_stack = self + .borrows + .iter() + .rev() // search top-to-bottom + .find_map(|item| { + // If the item fits and *might* be this wildcard, use it. + if item.perm.grants(access) && exposed_tags.contains(&item.tag) { + Some(()) + } else { + None + } + }) + .is_some(); + // If we couldn't find it in the stack, check the unknown bottom. + let found = maybe_in_stack || self.unknown_bottom.is_some(); + return if found { Ok(None) } else { Err(()) }; + }; - if res.is_some() { - return Ok(res); + if let Some(idx) = + self.borrows + .iter() + .enumerate() // we also need to know *where* in the stack + .rev() // search top-to-bottom + // Return permission of first item that grants access. + // We require a permission with the right tag, ensuring U3 and F3. + .find_map(|(idx, item)| { + if tag == item.tag && item.perm.grants(access) { Some(idx) } else { None } + }) + { + return Ok(Some(idx)); } - match self.unknown_bottom { - Some(id) => - match tag { - Some(tag) => - match tag { - SbTag::Tagged(tag_id) if tag_id < id => Ok(None), - SbTag::Untagged => Ok(None), - _ => Err(()), - }, - None => Ok(None), - }, - None => Err(()), - } + // Couldn't find it in the stack; but if there is an unknown bottom it might be there. + let found = self.unknown_bottom.is_some_and(|&unknown_limit| { + match tag { + SbTag::Tagged(tag_id) => tag_id < unknown_limit, // unknown_limit is an upper bound for what can be in the unknown bottom. + SbTag::Untagged => true, // yeah whatever + } + }); + if found { Ok(None) } else { Err(()) } } /// Find the first write-incompatible item above the given one -- /// i.e, find the height to which the stack will be truncated when writing to `granting`. - fn find_first_write_incompatible(&self, granting: Option) -> usize { - let perm = if let Some(idx) = granting { - self.borrows[idx].perm - } else { - // I assume this has to be it? - Permission::SharedReadWrite - }; - + fn find_first_write_incompatible(&self, granting: usize) -> usize { + let perm = self.borrows[granting].perm; match perm { Permission::SharedReadOnly => bug!("Cannot use SharedReadOnly for writing"), Permission::Disabled => bug!("Cannot use Disabled for anything"), // On a write, everything above us is incompatible. - Permission::Unique => - if let Some(idx) = granting { - idx + 1 - } else { - 0 - }, + Permission::Unique => granting + 1, Permission::SharedReadWrite => { // The SharedReadWrite *just* above us are compatible, to skip those. - let mut idx = if let Some(idx) = granting { idx + 1 } else { 0 }; - + let mut idx = granting + 1; while let Some(item) = self.borrows.get(idx) { if item.perm == Permission::SharedReadWrite { // Go on. @@ -440,52 +441,57 @@ impl<'tcx> Stack { alloc_history.access_error(access, tag, alloc_id, alloc_range, offset, self) })?; + let Some(granting_idx) = granting_idx else { + // The access used a wildcard pointer or matched the unknown bottom. + // Nobody knows what happened, so forget everything. + trace!("access: clearing stack due to wildcard"); + self.borrows.clear(); + self.unknown_bottom = Some(global.next_ptr_id); + return Ok(()); + }; + let tag = tag.unwrap(); // only precise tags have precise locations + // Step 2: Remove incompatible items above them. Make sure we do not remove protected // items. Behavior differs for reads and writes. - if let Some(tag) = tag { - if access == AccessKind::Write { - // Remove everything above the write-compatible items, like a proper stack. This makes sure read-only and unique - // pointers become invalid on write accesses (ensures F2a, and ensures U2 for write accesses). - let first_incompatible_idx = self.find_first_write_incompatible(granting_idx); - for item in self.borrows.drain(first_incompatible_idx..).rev() { - trace!("access: popping item {:?}", item); + if access == AccessKind::Write { + // Remove everything above the write-compatible items, like a proper stack. This makes sure read-only and unique + // pointers become invalid on write accesses (ensures F2a, and ensures U2 for write accesses). + let first_incompatible_idx = self.find_first_write_incompatible(granting_idx); + for item in self.borrows.drain(first_incompatible_idx..).rev() { + trace!("access: popping item {:?}", item); + Stack::check_protector( + &item, + Some((tag, alloc_range, offset, access)), + global, + alloc_history, + )?; + alloc_history.log_invalidation(item.tag, alloc_range, current_span); + } + } else { + // On a read, *disable* all `Unique` above the granting item. This ensures U2 for read accesses. + // The reason this is not following the stack discipline (by removing the first Unique and + // everything on top of it) is that in `let raw = &mut *x as *mut _; let _val = *x;`, the second statement + // would pop the `Unique` from the reborrow of the first statement, and subsequently also pop the + // `SharedReadWrite` for `raw`. + // This pattern occurs a lot in the standard library: create a raw pointer, then also create a shared + // reference and use that. + // We *disable* instead of removing `Unique` to avoid "connecting" two neighbouring blocks of SRWs. + let first_incompatible_idx = granting_idx + 1; + for idx in (first_incompatible_idx..self.borrows.len()).rev() { + let item = &mut self.borrows[idx]; + + if item.perm == Permission::Unique { + trace!("access: disabling item {:?}", item); Stack::check_protector( - &item, + item, Some((tag, alloc_range, offset, access)), global, alloc_history, )?; + item.perm = Permission::Disabled; alloc_history.log_invalidation(item.tag, alloc_range, current_span); } - } else { - let start_idx = if let Some(idx) = granting_idx { idx + 1 } else { 0 }; - // On a read, *disable* all `Unique` above the granting item. This ensures U2 for read accesses. - // The reason this is not following the stack discipline (by removing the first Unique and - // everything on top of it) is that in `let raw = &mut *x as *mut _; let _val = *x;`, the second statement - // would pop the `Unique` from the reborrow of the first statement, and subsequently also pop the - // `SharedReadWrite` for `raw`. - // This pattern occurs a lot in the standard library: create a raw pointer, then also create a shared - // reference and use that. - // We *disable* instead of removing `Unique` to avoid "connecting" two neighbouring blocks of SRWs. - for idx in (start_idx..self.borrows.len()).rev() { - let item = &mut self.borrows[idx]; - - if item.perm == Permission::Unique { - trace!("access: disabling item {:?}", item); - Stack::check_protector( - item, - Some((tag, alloc_range, offset, access)), - global, - alloc_history, - )?; - item.perm = Permission::Disabled; - alloc_history.log_invalidation(item.tag, alloc_range, current_span); - } - } } - } else { - self.borrows.clear(); - self.unknown_bottom = Some(global.next_ptr_id); } // Done. @@ -502,7 +508,7 @@ impl<'tcx> Stack { alloc_history: &mut AllocHistory, exposed_tags: &FxHashSet, ) -> InterpResult<'tcx> { - // Step 1: Find granting item. + // Step 1: Make sure there is a granting item. self.find_granting(AccessKind::Write, tag, exposed_tags).map_err(|_| { err_sb_ub(format!( "no item granting write access for deallocation to tag {:?} at {:?} found in borrow stack", @@ -556,17 +562,20 @@ impl<'tcx> Stack { "this case only makes sense for stack-like accesses" ); - if derived_from.is_some() { - // SharedReadWrite can coexist with "existing loans", meaning they don't act like a write - // access. Instead of popping the stack, we insert the item at the place the stack would - // be popped to (i.e., we insert it above all the write-compatible items). - // This ensures F2b by adding the new item below any potentially existing `SharedReadOnly`. - self.find_first_write_incompatible(granting_idx) - } else { - // TODO: is this correct + let Some(granting_idx) = granting_idx else { + // The parent is a wildcard pointer or matched the unknown bottom. + // Nobody knows what happened, so forget everything. + trace!("reborrow: clearing stack due to wildcard"); self.borrows.clear(); - 0 - } + self.unknown_bottom = Some(global.next_ptr_id); + return Ok(()); + }; + + // SharedReadWrite can coexist with "existing loans", meaning they don't act like a write + // access. Instead of popping the stack, we insert the item at the place the stack would + // be popped to (i.e., we insert it above all the write-compatible items). + // This ensures F2b by adding the new item below any potentially existing `SharedReadOnly`. + self.find_first_write_incompatible(granting_idx) } else { // A "safe" reborrow for a pointer that actually expects some aliasing guarantees. // Here, creating a reference actually counts as an access. @@ -587,9 +596,11 @@ impl<'tcx> Stack { // This ensures U1 and F1. self.borrows.len() }; + // Put the new item there. As an optimization, deduplicate if it is equal to one of its new neighbors. + // `new_idx` might be 0 if we just cleared the entire stack. if self.borrows.get(new_idx) == Some(&new) - || new_idx > 0 && self.borrows.get(new_idx - 1) == Some(&new) + || (new_idx > 0 && self.borrows[new_idx - 1] == new) { // Optimization applies, done. trace!("reborrow: avoiding adding redundant item {:?}", new); @@ -648,7 +659,7 @@ impl<'tcx> Stacks { ) -> InterpResult<'tcx>, ) -> InterpResult<'tcx> { let stacks = self.stacks.get_mut(); - let history = &mut *self.history.borrow_mut(); + let history = &mut *self.history.get_mut(); let exposed_tags = self.exposed_tags.get_mut(); for (offset, stack) in stacks.iter_mut(range.start, range.size) { f(offset, stack, history, exposed_tags)?; @@ -1083,10 +1094,20 @@ pub trait EvalContextExt<'mir, 'tcx: 'mir>: crate::MiriEvalContextExt<'mir, 'tcx // Function pointers and dead objects don't have an alloc_extra so we ignore them. // This is okay because accessing them is UB anyway, no need for any Stacked Borrows checks. + // NOT using `get_alloc_extra_mut` since this might be a read-only allocation! // FIXME: this catches `InterpError`, which we should not usually do. // We might need a proper fallible API from `memory.rs` to avoid this though. - if let Ok((alloc_extra, _)) = this.get_alloc_extra_mut(alloc_id) { - alloc_extra.stacked_borrows.as_mut().unwrap().exposed_tags.get_mut().insert(tag); + match this.get_alloc_extra(alloc_id) { + Ok(alloc_extra) => { + trace!("Stacked Borrows tag {tag:?} exposed in {alloc_id}"); + alloc_extra.stacked_borrows.as_ref().unwrap().exposed_tags.borrow_mut().insert(tag); + } + Err(err) => { + trace!( + "Not exposing Stacked Borrows tag {tag:?} due to error \ + when accessing {alloc_id}: {err}" + ); + } } } } diff --git a/tests/fail/stacked_borrows/exposed_only_ro.rs b/tests/fail/stacked_borrows/exposed_only_ro.rs index b3adb0b855f6..9b4234499df0 100644 --- a/tests/fail/stacked_borrows/exposed_only_ro.rs +++ b/tests/fail/stacked_borrows/exposed_only_ro.rs @@ -8,5 +8,5 @@ fn main() { let _fool = &mut x as *mut i32; // this would have fooled the old untagged pointer logic let addr = (&x as *const i32).expose_addr(); let ptr = std::ptr::from_exposed_addr_mut::(addr); - unsafe { ptr.write(0) }; //~ ERROR: borrow stack + unsafe { *ptr = 0 }; //~ ERROR: borrow stack } diff --git a/tests/fail/stacked_borrows/exposed_only_ro.stderr b/tests/fail/stacked_borrows/exposed_only_ro.stderr new file mode 100644 index 000000000000..9748fe78c718 --- /dev/null +++ b/tests/fail/stacked_borrows/exposed_only_ro.stderr @@ -0,0 +1,18 @@ +error: Undefined Behavior: attempting a write access using "" at ALLOC[0x0], but no exposed tags are valid in the borrow stack for this location + --> $DIR/exposed_only_ro.rs:LL:CC + | +LL | unsafe { *ptr = 0 }; + | ^^^^^^^^ + | | + | attempting a write access using "" at ALLOC[0x0], but no exposed tags are valid in the borrow stack for this location + | this error occurs as part of an access at ALLOC[0x0..0x4] + | + = help: this indicates a potential bug in the program: it performed an invalid operation, but the Stacked Borrows rules it violated are still experimental + = help: see https://github.com/rust-lang/unsafe-code-guidelines/blob/master/wip/stacked-borrows.md for further information + + = note: inside `main` at $DIR/exposed_only_ro.rs:LL:CC + +note: some details are omitted, run with `MIRIFLAGS=-Zmiri-backtrace=full` for a verbose backtrace + +error: aborting due to previous error + diff --git a/tests/pass/stacked-borrows/int-to-ptr.rs b/tests/pass/stacked-borrows/int-to-ptr.rs index dc581d8af618..dc3675406276 100644 --- a/tests/pass/stacked-borrows/int-to-ptr.rs +++ b/tests/pass/stacked-borrows/int-to-ptr.rs @@ -13,63 +13,67 @@ fn ref_raw_int_raw() { } /// Ensure that we do not just pick the topmost possible item on int2ptr casts. -fn example(variant: bool) { unsafe { - fn not_so_innocent(x: &mut u32) -> usize { - let x_raw4 = x as *mut u32; - x_raw4.expose_addr() +fn example(variant: bool) { + unsafe { + fn not_so_innocent(x: &mut u32) -> usize { + let x_raw4 = x as *mut u32; + x_raw4.expose_addr() + } + + let mut c = 42u32; + + let x_unique1 = &mut c; + // [..., Unique(1)] + + let x_raw2 = x_unique1 as *mut u32; + let x_raw2_addr = x_raw2.expose_addr(); + // [..., Unique(1), SharedRW(2)] + + let x_unique3 = &mut *x_raw2; + // [.., Unique(1), SharedRW(2), Unique(3)] + + assert_eq!(not_so_innocent(x_unique3), x_raw2_addr); + // [.., Unique(1), SharedRW(2), Unique(3), ..., SharedRW(4)] + + // Do an int2ptr cast. This can pick tag 2 or 4 (the two previously exposed tags). + // 4 is the "obvious" choice (topmost tag, what we used to do with untagged pointers). + // And indeed if `variant == true` it is the only possible choice. + // But if `variant == false` then 2 is the only possible choice! + let x_wildcard = ptr::from_exposed_addr_mut::(x_raw2_addr); + + if variant { + // If we picked 2, this will invalidate 3. + *x_wildcard = 10; + // Now we use 3. Only possible if above we picked 4. + *x_unique3 = 12; + } else { + // This invalidates tag 4. + *x_unique3 = 10; + // Now try to write with the "guessed" tag; it must be 2. + *x_wildcard = 12; + } } +} - let mut c = 42u32; - - let x_unique1 = &mut c; - // [..., Unique(1)] - - let x_raw2 = x_unique1 as *mut u32; - let x_raw2_addr = x_raw2.expose_addr(); - // [..., Unique(1), SharedRW(2)] - - let x_unique3 = &mut *x_raw2; - // [.., Unique(1), SharedRW(2), Unique(3)] - - assert_eq!(not_so_innocent(x_unique3), x_raw2_addr); - // [.., Unique(1), SharedRW(2), Unique(3), ..., SharedRW(4)] - - // Do an int2ptr cast. This can pick tag 2 or 4 (the two previously exposed tags). - // 4 is the "obvious" choice (topmost tag, what we used to do with untagged pointers). - // And indeed if `variant == true` it is the only possible choice. - // But if `variant == false` then 2 is the only possible choice! - let x_wildcard = ptr::from_exposed_addr_mut::(x_raw2_addr); - - if variant { - // If we picked 2, this will invalidate 3. - *x_wildcard = 10; - // Now we use 3. Only possible if above we picked 4. - *x_unique3 = 12; - } else { - // This invalidates tag 4. - *x_unique3 = 10; - // Now try to write with the "guessed" tag; it must be 2. - *x_wildcard = 12; +fn test() { + unsafe { + let root = &mut 42; + let root_raw = root as *mut i32; + let addr1 = root_raw as usize; + let child = &mut *root_raw; + let child_raw = child as *mut i32; + let addr2 = child_raw as usize; + assert_eq!(addr1, addr2); + // First use child. + *(addr2 as *mut i32) -= 2; // picks child_raw + *child -= 2; + // Then use root. + *(addr1 as *mut i32) += 2; // picks root_raw + *root += 2; + // Value should be unchanged. + assert_eq!(*root, 42); } -} } - -fn test() { unsafe { - let root = &mut 42; - let root_raw = root as *mut i32; - let addr1 = root_raw as usize; - let child = &mut *root_raw; - let child_raw = child as *mut i32; - let addr2 = child_raw as usize; - assert_eq!(addr1, addr2); - // First use child. - *(addr2 as *mut i32) -= 2; // picks child_raw - *child -= 2; - // Then use root. - *(addr1 as *mut i32) += 2; // picks root_raw - *root += 2; - // Value should be unchanged. - assert_eq!(*root, 42); -} } +} fn main() { ref_raw_int_raw(); From 4fbb284a99d0eceeefcf7f7fdb5c6c545ea68ae2 Mon Sep 17 00:00:00 2001 From: Ralf Jung Date: Fri, 24 Jun 2022 19:45:35 -0400 Subject: [PATCH 7/8] implement 'delimited' expose tracking so we still detect some UB --- src/diagnostics.rs | 8 +- src/intptrcast.rs | 8 +- src/lib.rs | 9 +- src/machine.rs | 34 ++- src/stacked_borrows.rs | 227 ++++++++++++------ src/stacked_borrows/diagnostics.rs | 23 +- .../stacked_borrows/exposed_only_ro.stderr | 4 +- .../illegal_read_despite_exposed1.rs | 17 ++ .../illegal_read_despite_exposed1.stderr | 18 ++ .../illegal_read_despite_exposed2.rs | 18 ++ .../illegal_read_despite_exposed2.stderr | 18 ++ .../illegal_write_despite_exposed1.rs | 16 ++ .../illegal_write_despite_exposed1.stderr | 18 ++ 13 files changed, 293 insertions(+), 125 deletions(-) create mode 100644 tests/fail/stacked_borrows/illegal_read_despite_exposed1.rs create mode 100644 tests/fail/stacked_borrows/illegal_read_despite_exposed1.stderr create mode 100644 tests/fail/stacked_borrows/illegal_read_despite_exposed2.rs create mode 100644 tests/fail/stacked_borrows/illegal_read_despite_exposed2.stderr create mode 100644 tests/fail/stacked_borrows/illegal_write_despite_exposed1.rs create mode 100644 tests/fail/stacked_borrows/illegal_write_despite_exposed1.stderr diff --git a/src/diagnostics.rs b/src/diagnostics.rs index 0e3e693e33f9..230f46c569db 100644 --- a/src/diagnostics.rs +++ b/src/diagnostics.rs @@ -8,7 +8,7 @@ use rustc_middle::ty; use rustc_span::{source_map::DUMMY_SP, Span, SpanData, Symbol}; use crate::helpers::HexRange; -use crate::stacked_borrows::{diagnostics::TagHistory, AccessKind, SbTag}; +use crate::stacked_borrows::{diagnostics::TagHistory, AccessKind}; use crate::*; /// Details of premature program termination. @@ -61,9 +61,9 @@ impl MachineStopType for TerminationInfo {} /// Miri specific diagnostics pub enum NonHaltingDiagnostic { CreatedPointerTag(NonZeroU64), - /// This `Item` was popped from the borrow stack, either due to a grant of - /// `AccessKind` to `SbTag` or a deallocation when the second argument is `None`. - PoppedPointerTag(Item, Option<(SbTag, AccessKind)>), + /// This `Item` was popped from the borrow stack, either due to an access with the given tag or + /// a deallocation when the second argument is `None`. + PoppedPointerTag(Item, Option<(SbTagExtra, AccessKind)>), CreatedCallId(CallId), CreatedAlloc(AllocId), FreedAlloc(AllocId), diff --git a/src/intptrcast.rs b/src/intptrcast.rs index 9ec96b8f491e..cfaf61f9d5c8 100644 --- a/src/intptrcast.rs +++ b/src/intptrcast.rs @@ -142,9 +142,7 @@ impl<'mir, 'tcx> GlobalStateInner { // Determine the allocation this points to at cast time. let alloc_id = Self::alloc_id_from_addr(ecx, addr); Pointer::new( - alloc_id.map(|alloc_id| { - Tag::Concrete(ConcreteTag { alloc_id, sb: SbTag::Untagged }) - }), + alloc_id.map(|alloc_id| Tag::Concrete { alloc_id, sb: SbTag::Untagged }), Size::from_bytes(addr), ) } @@ -222,8 +220,8 @@ impl<'mir, 'tcx> GlobalStateInner { ) -> Option<(AllocId, Size)> { let (tag, addr) = ptr.into_parts(); // addr is absolute (Tag provenance) - let alloc_id = if let Tag::Concrete(concrete) = tag { - concrete.alloc_id + let alloc_id = if let Tag::Concrete { alloc_id, .. } = tag { + alloc_id } else { // A wildcard pointer. assert_eq!(ecx.machine.intptrcast.borrow().provenance_mode, ProvenanceMode::Permissive); diff --git a/src/lib.rs b/src/lib.rs index 432f469733cb..68489c9b47b9 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -7,6 +7,7 @@ #![feature(io_error_more)] #![feature(yeet_expr)] #![feature(is_some_with)] +#![feature(nonzero_ops)] #![warn(rust_2018_idioms)] #![allow( clippy::collapsible_else_if, @@ -81,15 +82,15 @@ pub use crate::eval::{ pub use crate::helpers::{CurrentSpan, EvalContextExt as HelpersEvalContextExt}; pub use crate::intptrcast::ProvenanceMode; pub use crate::machine::{ - AllocExtra, ConcreteTag, Evaluator, FrameData, MiriEvalContext, MiriEvalContextExt, - MiriMemoryKind, Tag, NUM_CPUS, PAGE_SIZE, STACK_ADDR, STACK_SIZE, + AllocExtra, Evaluator, FrameData, MiriEvalContext, MiriEvalContextExt, MiriMemoryKind, Tag, + NUM_CPUS, PAGE_SIZE, STACK_ADDR, STACK_SIZE, }; pub use crate::mono_hash_map::MonoHashMap; pub use crate::operator::EvalContextExt as OperatorEvalContextExt; pub use crate::range_map::RangeMap; pub use crate::stacked_borrows::{ - CallId, EvalContextExt as StackedBorEvalContextExt, Item, Permission, PtrId, SbTag, Stack, - Stacks, + CallId, EvalContextExt as StackedBorEvalContextExt, Item, Permission, PtrId, SbTag, SbTagExtra, + Stack, Stacks, }; pub use crate::sync::{CondvarId, EvalContextExt as SyncEvalContextExt, MutexId, RwLockId}; pub use crate::thread::{ diff --git a/src/machine.rs b/src/machine.rs index 3704a5385141..5810ac6d7eca 100644 --- a/src/machine.rs +++ b/src/machine.rs @@ -130,17 +130,14 @@ impl fmt::Display for MiriMemoryKind { /// Pointer provenance (tag). #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] pub enum Tag { - Concrete(ConcreteTag), + Concrete { + alloc_id: AllocId, + /// Stacked Borrows tag. + sb: SbTag, + }, Wildcard, } -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] -pub struct ConcreteTag { - pub alloc_id: AllocId, - /// Stacked Borrows tag. - pub sb: SbTag, -} - #[cfg(all(target_arch = "x86_64", target_pointer_width = "64"))] static_assert_size!(Pointer, 24); // #[cfg(all(target_arch = "x86_64", target_pointer_width = "64"))] @@ -160,15 +157,15 @@ impl Provenance for Tag { write!(f, "0x{:x}", addr.bytes())?; match tag { - Tag::Concrete(tag) => { + Tag::Concrete { alloc_id, sb } => { // Forward `alternate` flag to `alloc_id` printing. if f.alternate() { - write!(f, "[{:#?}]", tag.alloc_id)?; + write!(f, "[{:#?}]", alloc_id)?; } else { - write!(f, "[{:?}]", tag.alloc_id)?; + write!(f, "[{:?}]", alloc_id)?; } // Print Stacked Borrows tag. - write!(f, "{:?}", tag.sb)?; + write!(f, "{:?}", sb)?; } Tag::Wildcard => { write!(f, "[Wildcard]")?; @@ -180,7 +177,7 @@ impl Provenance for Tag { fn get_alloc_id(self) -> Option { match self { - Tag::Concrete(concrete) => Some(concrete.alloc_id), + Tag::Concrete { alloc_id, .. } => Some(alloc_id), Tag::Wildcard => None, } } @@ -489,8 +486,7 @@ impl<'mir, 'tcx> Machine<'mir, 'tcx> for Evaluator<'mir, 'tcx> { type AllocExtra = AllocExtra; type PointerTag = Tag; - // `None` represents a wildcard pointer. - type TagExtra = Option; + type TagExtra = SbTagExtra; type MemoryMap = MonoHashMap, Allocation)>; @@ -683,7 +679,7 @@ impl<'mir, 'tcx> Machine<'mir, 'tcx> for Evaluator<'mir, 'tcx> { SbTag::Untagged }; Pointer::new( - Tag::Concrete(ConcreteTag { alloc_id: ptr.provenance, sb: sb_tag }), + Tag::Concrete { alloc_id: ptr.provenance, sb: sb_tag }, Size::from_bytes(absolute_addr), ) } @@ -709,7 +705,7 @@ impl<'mir, 'tcx> Machine<'mir, 'tcx> for Evaluator<'mir, 'tcx> { ptr: Pointer, ) -> InterpResult<'tcx> { match ptr.provenance { - Tag::Concrete(ConcreteTag { alloc_id, sb }) => { + Tag::Concrete { alloc_id, sb } => { intptrcast::GlobalStateInner::expose_ptr(ecx, alloc_id, sb); } Tag::Wildcard => { @@ -730,8 +726,8 @@ impl<'mir, 'tcx> Machine<'mir, 'tcx> for Evaluator<'mir, 'tcx> { rel.map(|(alloc_id, size)| { let sb = match ptr.provenance { - Tag::Concrete(ConcreteTag { sb, .. }) => Some(sb), - Tag::Wildcard => None, + Tag::Concrete { sb, .. } => SbTagExtra::Concrete(sb), + Tag::Wildcard => SbTagExtra::Wildcard, }; (alloc_id, size, sb) }) diff --git a/src/stacked_borrows.rs b/src/stacked_borrows.rs index 8411d85f0e01..b7b339ce77bf 100644 --- a/src/stacked_borrows.rs +++ b/src/stacked_borrows.rs @@ -3,6 +3,7 @@ use log::trace; use std::cell::RefCell; +use std::cmp; use std::fmt; use std::num::NonZeroU64; @@ -60,6 +61,32 @@ impl fmt::Debug for SbTag { } } +/// The "extra" information an SB pointer has over a regular AllocId. +/// Newtype for `Option`. +#[derive(Copy, Clone)] +pub enum SbTagExtra { + Concrete(SbTag), + Wildcard, +} + +impl fmt::Debug for SbTagExtra { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + SbTagExtra::Concrete(tag) => write!(f, "{tag:?}"), + SbTagExtra::Wildcard => write!(f, ""), + } + } +} + +impl SbTagExtra { + fn and_then(self, f: impl FnOnce(SbTag) -> Option) -> Option { + match self { + SbTagExtra::Concrete(tag) => f(tag), + SbTagExtra::Wildcard => None, + } + } +} + /// Indicates which permission is granted (by this item to some pointers) #[derive(Copy, Clone, Debug, Hash, PartialEq, Eq)] pub enum Permission { @@ -108,6 +135,8 @@ pub struct Stack { /// wildcard pointers are used to access this location. What we do know is that `borrows` are at /// the top of the stack, and below it are arbitrarily many items whose `tag` is either /// `Untagged` or strictly less than `id`. + /// When the bottom is unknown, `borrows` always has a `SharedReadOnly` or `Unique` at the bottom; + /// we never have the unknown-to-known boundary in an SRW group. unknown_bottom: Option, } @@ -289,35 +318,37 @@ impl Permission { /// Core per-location operations: access, dealloc, reborrow. impl<'tcx> Stack { /// Find the item granting the given kind of access to the given tag, and return where - /// it is on the stack. - /// `Ok(None)` indicates it matched the "unknown" part of the stack, or it was a wildcard tag - /// and we have no clue what exactly it matched (but it could have matched something) + /// it is on the stack. For wildcard tags, the given index is approximate, but if *no* + /// index is given it means the match was *not* in the known part of the stack. + /// `Ok(None)` indicates it matched the "unknown" part of the stack. /// `Err` indicates it was not found. fn find_granting( &self, access: AccessKind, - tag: Option, + tag: SbTagExtra, exposed_tags: &FxHashSet, ) -> Result, ()> { - let Some(tag) = tag else { + let SbTagExtra::Concrete(tag) = tag else { // Handle the wildcard case. // Go search the stack for an exposed tag. - let maybe_in_stack = self - .borrows - .iter() - .rev() // search top-to-bottom - .find_map(|item| { - // If the item fits and *might* be this wildcard, use it. - if item.perm.grants(access) && exposed_tags.contains(&item.tag) { - Some(()) - } else { - None - } - }) - .is_some(); + if let Some(idx) = + self.borrows + .iter() + .enumerate() // we also need to know *where* in the stack + .rev() // search top-to-bottom + .find_map(|(idx, item)| { + // If the item fits and *might* be this wildcard, use it. + if item.perm.grants(access) && exposed_tags.contains(&item.tag) { + Some(idx) + } else { + None + } + }) + { + return Ok(Some(idx)); + } // If we couldn't find it in the stack, check the unknown bottom. - let found = maybe_in_stack || self.unknown_bottom.is_some(); - return if found { Ok(None) } else { Err(()) }; + return if self.unknown_bottom.is_some() { Ok(None) } else { Err(()) }; }; if let Some(idx) = @@ -351,8 +382,10 @@ impl<'tcx> Stack { match perm { Permission::SharedReadOnly => bug!("Cannot use SharedReadOnly for writing"), Permission::Disabled => bug!("Cannot use Disabled for anything"), - // On a write, everything above us is incompatible. - Permission::Unique => granting + 1, + Permission::Unique => { + // On a write, everything above us is incompatible. + granting + 1 + } Permission::SharedReadWrite => { // The SharedReadWrite *just* above us are compatible, to skip those. let mut idx = granting + 1; @@ -380,7 +413,7 @@ impl<'tcx> Stack { /// currently checking. fn check_protector( item: &Item, - provoking_access: Option<(SbTag, AllocRange, Size, AccessKind)>, // just for debug printing and error messages + provoking_access: Option<(SbTagExtra, AllocRange, Size, AccessKind)>, // just for debug printing and error messages global: &GlobalStateInner, alloc_history: &mut AllocHistory, ) -> InterpResult<'tcx> { @@ -401,12 +434,14 @@ impl<'tcx> Stack { tag, item ), None, - alloc_history.get_logs_relevant_to( - tag, - alloc_range, - offset, - Some(item.tag), - ), + tag.and_then(|tag| { + alloc_history.get_logs_relevant_to( + tag, + alloc_range, + offset, + Some(item.tag), + ) + }), ))? } else { Err(err_sb_ub( @@ -427,7 +462,7 @@ impl<'tcx> Stack { fn access( &mut self, access: AccessKind, - tag: Option, + tag: SbTagExtra, (alloc_id, alloc_range, offset): (AllocId, AllocRange, Size), // just for debug printing and error messages global: &mut GlobalStateInner, current_span: &mut CurrentSpan<'_, '_, 'tcx>, @@ -441,22 +476,22 @@ impl<'tcx> Stack { alloc_history.access_error(access, tag, alloc_id, alloc_range, offset, self) })?; - let Some(granting_idx) = granting_idx else { - // The access used a wildcard pointer or matched the unknown bottom. - // Nobody knows what happened, so forget everything. - trace!("access: clearing stack due to wildcard"); - self.borrows.clear(); - self.unknown_bottom = Some(global.next_ptr_id); - return Ok(()); - }; - let tag = tag.unwrap(); // only precise tags have precise locations - // Step 2: Remove incompatible items above them. Make sure we do not remove protected // items. Behavior differs for reads and writes. + // In case of wildcards/unknown matches, we remove everything that is *definitely* gone. if access == AccessKind::Write { // Remove everything above the write-compatible items, like a proper stack. This makes sure read-only and unique // pointers become invalid on write accesses (ensures F2a, and ensures U2 for write accesses). - let first_incompatible_idx = self.find_first_write_incompatible(granting_idx); + let first_incompatible_idx = if let Some(granting_idx) = granting_idx { + // The granting_idx *might* be approximate, but any lower idx would remove more + // things. Even if this is a Unique and the lower idx is an SRW (which removes + // less), there is an SRW group boundary here so strictly more would get removed. + self.find_first_write_incompatible(granting_idx) + } else { + // We are writing to something in the unknown part. + // There is a SRW group boundary between the unknown and the known, so everything is incompatible. + 0 + }; for item in self.borrows.drain(first_incompatible_idx..).rev() { trace!("access: popping item {:?}", item); Stack::check_protector( @@ -476,7 +511,13 @@ impl<'tcx> Stack { // This pattern occurs a lot in the standard library: create a raw pointer, then also create a shared // reference and use that. // We *disable* instead of removing `Unique` to avoid "connecting" two neighbouring blocks of SRWs. - let first_incompatible_idx = granting_idx + 1; + let first_incompatible_idx = if let Some(granting_idx) = granting_idx { + // The granting_idx *might* be approximate, but any lower idx would disable more things. + granting_idx + 1 + } else { + // We are reading from something in the unknown part. That means *all* `Unique` we know about are dead now. + 0 + }; for idx in (first_incompatible_idx..self.borrows.len()).rev() { let item = &mut self.borrows[idx]; @@ -494,6 +535,31 @@ impl<'tcx> Stack { } } + // If this was an approximate action, we now collapse everything into an unknown. + if granting_idx.is_none() || matches!(tag, SbTagExtra::Wildcard) { + // Compute the upper bound of the items that remain. + // (This is why we did all the work above: to reduce the items we have to consider here.) + let mut max = NonZeroU64::new(1).unwrap(); + for item in &self.borrows { + // Skip disabled items, they cannot be matched anyway. + if !matches!(item.perm, Permission::Disabled) { + if let SbTag::Tagged(tag) = item.tag { + // We are looking for a strict upper bound, so add 1 to this tag. + max = cmp::max(tag.checked_add(1).unwrap(), max); + } + } + } + if let Some(unk) = self.unknown_bottom { + max = cmp::max(unk, max); + } + // Use `max` as new strict upper bound for everything. + trace!( + "access: forgetting stack to upper bound {max} due to wildcard or unknown access" + ); + self.borrows.clear(); + self.unknown_bottom = Some(max); + } + // Done. Ok(()) } @@ -502,7 +568,7 @@ impl<'tcx> Stack { /// active protectors at all because we will remove all items. fn dealloc( &mut self, - tag: Option, + tag: SbTagExtra, (alloc_id, alloc_range, offset): (AllocId, AllocRange, Size), // just for debug printing and error messages global: &GlobalStateInner, alloc_history: &mut AllocHistory, @@ -534,7 +600,7 @@ impl<'tcx> Stack { /// `range` that we are currently checking. fn grant( &mut self, - derived_from: Option, + derived_from: SbTagExtra, new: Item, (alloc_id, alloc_range, offset): (AllocId, AllocRange, Size), // just for debug printing and error messages global: &mut GlobalStateInner, @@ -562,10 +628,12 @@ impl<'tcx> Stack { "this case only makes sense for stack-like accesses" ); - let Some(granting_idx) = granting_idx else { + let (Some(granting_idx), SbTagExtra::Concrete(_)) = (granting_idx, derived_from) else { // The parent is a wildcard pointer or matched the unknown bottom. - // Nobody knows what happened, so forget everything. - trace!("reborrow: clearing stack due to wildcard"); + // This is approximate. Nobody knows what happened, so forget everything. + // The new thing is SRW anyway, so we cannot push it "on top of the unkown part" + // (for all we know, it might join an SRW group inside the unknown). + trace!("reborrow: forgetting stack entirely due to SharedReadWrite reborrow from wildcard or unknown"); self.borrows.clear(); self.unknown_bottom = Some(global.next_ptr_id); return Ok(()); @@ -723,7 +791,7 @@ impl Stacks { pub fn memory_read<'tcx>( &self, alloc_id: AllocId, - tag: Option, + tag: SbTagExtra, range: AllocRange, state: &GlobalState, mut current_span: CurrentSpan<'_, '_, 'tcx>, @@ -752,7 +820,7 @@ impl Stacks { pub fn memory_written<'tcx>( &mut self, alloc_id: AllocId, - tag: Option, + tag: SbTagExtra, range: AllocRange, state: &GlobalState, mut current_span: CurrentSpan<'_, '_, 'tcx>, @@ -781,7 +849,7 @@ impl Stacks { pub fn memory_deallocated<'tcx>( &mut self, alloc_id: AllocId, - tag: Option, + tag: SbTagExtra, range: AllocRange, state: &GlobalState, ) -> InterpResult<'tcx> { @@ -798,6 +866,8 @@ impl Stacks { /// to grant for which references, and when to add protectors. impl<'mir, 'tcx: 'mir> EvalContextPrivExt<'mir, 'tcx> for crate::MiriEvalContext<'mir, 'tcx> {} trait EvalContextPrivExt<'mir, 'tcx: 'mir>: crate::MiriEvalContextExt<'mir, 'tcx> { + /// Returns the `AllocId` the reborrow was done in, if some actual borrow stack manipulation + /// happened. fn reborrow( &mut self, place: &MPlaceTy<'tcx, Tag>, @@ -805,7 +875,7 @@ trait EvalContextPrivExt<'mir, 'tcx: 'mir>: crate::MiriEvalContextExt<'mir, 'tcx kind: RefKind, new_tag: SbTag, protect: bool, - ) -> InterpResult<'tcx> { + ) -> InterpResult<'tcx, Option> { let this = self.eval_context_mut(); let current_span = &mut this.machine.current_span(); @@ -815,6 +885,10 @@ trait EvalContextPrivExt<'mir, 'tcx: 'mir>: crate::MiriEvalContextExt<'mir, 'tcx base_offset, orig_tag| -> InterpResult<'tcx> { + let SbTagExtra::Concrete(orig_tag) = orig_tag else { + // FIXME: should we log this? + return Ok(()) + }; let extra = this.get_alloc_extra(alloc_id)?; let stacked_borrows = extra.stacked_borrows.as_ref().expect("we should have Stacked Borrows data"); @@ -832,6 +906,13 @@ trait EvalContextPrivExt<'mir, 'tcx: 'mir>: crate::MiriEvalContextExt<'mir, 'tcx }; if size == Size::ZERO { + trace!( + "reborrow of size 0: {} reference {:?} derived from {:?} (pointee {})", + kind, + new_tag, + place.ptr, + place.layout.ty, + ); // Don't update any stacks for a zero-sized access; borrow stacks are per-byte and this // touches no bytes so there is no stack to put this tag in. // However, if the pointer for this operation points at a real allocation we still @@ -840,24 +921,14 @@ trait EvalContextPrivExt<'mir, 'tcx: 'mir>: crate::MiriEvalContextExt<'mir, 'tcx // Dangling slices are a common case here; it's valid to get their length but with raw // pointer tagging for example all calls to get_unchecked on them are invalid. if let Ok((alloc_id, base_offset, orig_tag)) = this.ptr_try_get_alloc_id(place.ptr) { - if let Some(orig_tag) = orig_tag { - log_creation(this, current_span, alloc_id, base_offset, orig_tag)?; - } + log_creation(this, current_span, alloc_id, base_offset, orig_tag)?; + return Ok(Some(alloc_id)); } - - trace!( - "reborrow of size 0: {} reference {:?} derived from {:?} (pointee {})", - kind, - new_tag, - place.ptr, - place.layout.ty, - ); - return Ok(()); + // This pointer doesn't come with an AllocId. :shrug: + return Ok(None); } let (alloc_id, base_offset, orig_tag) = this.ptr_get_alloc_id(place.ptr)?; - if let Some(orig_tag) = orig_tag { - log_creation(this, current_span, alloc_id, base_offset, orig_tag)?; - } + log_creation(this, current_span, alloc_id, base_offset, orig_tag)?; // Ensure we bail out if the pointer goes out-of-bounds (see miri#1050). let (alloc_size, _) = @@ -937,7 +1008,7 @@ trait EvalContextPrivExt<'mir, 'tcx: 'mir>: crate::MiriEvalContextExt<'mir, 'tcx ) }) })?; - return Ok(()); + return Ok(Some(alloc_id)); } }; // Here we can avoid `borrow()` calls because we have mutable references. @@ -962,7 +1033,7 @@ trait EvalContextPrivExt<'mir, 'tcx: 'mir>: crate::MiriEvalContextExt<'mir, 'tcx ) })?; - Ok(()) + Ok(Some(alloc_id)) } /// Retags an indidual pointer, returning the retagged version. @@ -997,16 +1068,22 @@ trait EvalContextPrivExt<'mir, 'tcx: 'mir>: crate::MiriEvalContextExt<'mir, 'tcx }; // Reborrow. - this.reborrow(&place, size, kind, new_tag, protect)?; + let alloc_id = this.reborrow(&place, size, kind, new_tag, protect)?; // Adjust pointer. let new_place = place.map_provenance(|p| { - p.map(|t| { - // TODO: Fix this eventually - if let Tag::Concrete(t) = t { - Tag::Concrete(ConcreteTag { sb: new_tag, ..t }) - } else { - t + p.map(|prov| { + match alloc_id { + Some(alloc_id) => { + // If `reborrow` could figure out the AllocId of this ptr, hard-code it into the new one. + // Even if we started out with a wildcard, this newly retagged pointer is tied to that allocation. + Tag::Concrete { alloc_id, sb: new_tag } + } + None => { + // Looks like this has to stay a wildcard pointer. + assert!(matches!(prov, Tag::Wildcard)); + Tag::Wildcard + } } }) }); diff --git a/src/stacked_borrows/diagnostics.rs b/src/stacked_borrows/diagnostics.rs index 91dfe22c1964..d3c706c1404a 100644 --- a/src/stacked_borrows/diagnostics.rs +++ b/src/stacked_borrows/diagnostics.rs @@ -4,12 +4,11 @@ use rustc_middle::mir::interpret::{AllocId, AllocRange}; use rustc_span::{Span, SpanData}; use rustc_target::abi::Size; -use core::fmt::Debug; - use crate::helpers::{CurrentSpan, HexRange}; use crate::stacked_borrows::{err_sb_ub, AccessKind, Permission}; use crate::Item; use crate::SbTag; +use crate::SbTagExtra; use crate::Stack; use rustc_middle::mir::interpret::InterpError; @@ -199,19 +198,15 @@ impl AllocHistory { /// Report a descriptive error when `new` could not be granted from `derived_from`. pub fn grant_error<'tcx>( &self, - derived_from: Option, + derived_from: SbTagExtra, new: Item, alloc_id: AllocId, alloc_range: AllocRange, error_offset: Size, stack: &Stack, ) -> InterpError<'tcx> { - // TODO: Fix this properly - let z = &derived_from; - let f = if let Some(ref t) = z { t as &dyn Debug } else { &"" as &dyn Debug }; let action = format!( - "trying to reborrow {:?} for {:?} permission at {}[{:#x}]", - f, + "trying to reborrow {derived_from:?} for {:?} permission at {}[{:#x}]", new.perm, alloc_id, error_offset.bytes(), @@ -229,18 +224,14 @@ impl AllocHistory { pub fn access_error<'tcx>( &self, access: AccessKind, - tag: Option, + tag: SbTagExtra, alloc_id: AllocId, alloc_range: AllocRange, error_offset: Size, stack: &Stack, ) -> InterpError<'tcx> { - let z = &tag; - let f = if let Some(ref t) = z { t as &dyn Debug } else { &"" as &dyn Debug }; let action = format!( - "attempting a {} using {:?} at {}[{:#x}]", - access, - f, + "attempting a {access} using {tag:?} at {}[{:#x}]", alloc_id, error_offset.bytes(), ); @@ -260,8 +251,8 @@ fn operation_summary( format!("this error occurs as part of {} at {:?}{}", operation, alloc_id, HexRange(alloc_range)) } -fn error_cause(stack: &Stack, tag: Option) -> &'static str { - if let Some(tag) = tag { +fn error_cause(stack: &Stack, tag: SbTagExtra) -> &'static str { + if let SbTagExtra::Concrete(tag) = tag { if stack.borrows.iter().any(|item| item.tag == tag && item.perm != Permission::Disabled) { ", but that tag only grants SharedReadOnly permission for this location" } else { diff --git a/tests/fail/stacked_borrows/exposed_only_ro.stderr b/tests/fail/stacked_borrows/exposed_only_ro.stderr index 9748fe78c718..ceeca61e5587 100644 --- a/tests/fail/stacked_borrows/exposed_only_ro.stderr +++ b/tests/fail/stacked_borrows/exposed_only_ro.stderr @@ -1,10 +1,10 @@ -error: Undefined Behavior: attempting a write access using "" at ALLOC[0x0], but no exposed tags are valid in the borrow stack for this location +error: Undefined Behavior: attempting a write access using at ALLOC[0x0], but no exposed tags are valid in the borrow stack for this location --> $DIR/exposed_only_ro.rs:LL:CC | LL | unsafe { *ptr = 0 }; | ^^^^^^^^ | | - | attempting a write access using "" at ALLOC[0x0], but no exposed tags are valid in the borrow stack for this location + | attempting a write access using at ALLOC[0x0], but no exposed tags are valid in the borrow stack for this location | this error occurs as part of an access at ALLOC[0x0..0x4] | = help: this indicates a potential bug in the program: it performed an invalid operation, but the Stacked Borrows rules it violated are still experimental diff --git a/tests/fail/stacked_borrows/illegal_read_despite_exposed1.rs b/tests/fail/stacked_borrows/illegal_read_despite_exposed1.rs new file mode 100644 index 000000000000..61a5e05d34cd --- /dev/null +++ b/tests/fail/stacked_borrows/illegal_read_despite_exposed1.rs @@ -0,0 +1,17 @@ +// compile-flags: -Zmiri-permissive-provenance + +fn main() { + unsafe { + let root = &mut 42; + let addr = root as *mut i32 as usize; + let exposed_ptr = addr as *mut i32; + // From the exposed ptr, we get a new unique ptr. + let root2 = &mut *exposed_ptr; + let _fool = root2 as *mut _; // this would have fooled the old untagged pointer logic + // Stack: Unknown( at ALLOC[0x0], but that tag does not exist in the borrow stack for this location + --> $DIR/illegal_read_despite_exposed1.rs:LL:CC + | +LL | let _val = *root2; + | ^^^^^^ + | | + | attempting a read access using at ALLOC[0x0], but that tag does not exist in the borrow stack for this location + | this error occurs as part of an access at ALLOC[0x0..0x4] + | + = help: this indicates a potential bug in the program: it performed an invalid operation, but the Stacked Borrows rules it violated are still experimental + = help: see https://github.com/rust-lang/unsafe-code-guidelines/blob/master/wip/stacked-borrows.md for further information + + = note: inside `main` at $DIR/illegal_read_despite_exposed1.rs:LL:CC + +note: some details are omitted, run with `MIRIFLAGS=-Zmiri-backtrace=full` for a verbose backtrace + +error: aborting due to previous error + diff --git a/tests/fail/stacked_borrows/illegal_read_despite_exposed2.rs b/tests/fail/stacked_borrows/illegal_read_despite_exposed2.rs new file mode 100644 index 000000000000..b45b7b5d285c --- /dev/null +++ b/tests/fail/stacked_borrows/illegal_read_despite_exposed2.rs @@ -0,0 +1,18 @@ +// compile-flags: -Zmiri-permissive-provenance + +fn main() { + unsafe { + let root = &mut 42; + let addr = root as *mut i32 as usize; + let exposed_ptr = addr as *mut i32; + // From the exposed ptr, we get a new unique ptr. + let root2 = &mut *exposed_ptr; + // let _fool = root2 as *mut _; // this would [fool] us, since SRW(N+1) remains on the stack + // Stack: Unknown( at ALLOC[0x0], but that tag does not exist in the borrow stack for this location + --> $DIR/illegal_read_despite_exposed2.rs:LL:CC + | +LL | let _val = *root2; + | ^^^^^^ + | | + | attempting a read access using at ALLOC[0x0], but that tag does not exist in the borrow stack for this location + | this error occurs as part of an access at ALLOC[0x0..0x4] + | + = help: this indicates a potential bug in the program: it performed an invalid operation, but the Stacked Borrows rules it violated are still experimental + = help: see https://github.com/rust-lang/unsafe-code-guidelines/blob/master/wip/stacked-borrows.md for further information + + = note: inside `main` at $DIR/illegal_read_despite_exposed2.rs:LL:CC + +note: some details are omitted, run with `MIRIFLAGS=-Zmiri-backtrace=full` for a verbose backtrace + +error: aborting due to previous error + diff --git a/tests/fail/stacked_borrows/illegal_write_despite_exposed1.rs b/tests/fail/stacked_borrows/illegal_write_despite_exposed1.rs new file mode 100644 index 000000000000..b50399b9df52 --- /dev/null +++ b/tests/fail/stacked_borrows/illegal_write_despite_exposed1.rs @@ -0,0 +1,16 @@ +// compile-flags: -Zmiri-permissive-provenance + +fn main() { + unsafe { + let root = &mut 42; + let addr = root as *mut i32 as usize; + let exposed_ptr = addr as *mut i32; + // From the exposed ptr, we get a new SRO ptr. + let root2 = &*exposed_ptr; + // Stack: Unknown( at ALLOC[0x0], but that tag does not exist in the borrow stack for this location + --> $DIR/illegal_write_despite_exposed1.rs:LL:CC + | +LL | let _val = *root2; + | ^^^^^^ + | | + | attempting a read access using at ALLOC[0x0], but that tag does not exist in the borrow stack for this location + | this error occurs as part of an access at ALLOC[0x0..0x4] + | + = help: this indicates a potential bug in the program: it performed an invalid operation, but the Stacked Borrows rules it violated are still experimental + = help: see https://github.com/rust-lang/unsafe-code-guidelines/blob/master/wip/stacked-borrows.md for further information + + = note: inside `main` at $DIR/illegal_write_despite_exposed1.rs:LL:CC + +note: some details are omitted, run with `MIRIFLAGS=-Zmiri-backtrace=full` for a verbose backtrace + +error: aborting due to previous error + From 58c79c5b6f2717241ec14aa61442b98ca8b66d00 Mon Sep 17 00:00:00 2001 From: Ralf Jung Date: Fri, 24 Jun 2022 22:02:17 -0400 Subject: [PATCH 8/8] tweaks and feedback --- src/machine.rs | 8 +- src/stacked_borrows.rs | 84 ++++++++----------- src/stacked_borrows/diagnostics.rs | 2 +- .../stacked_borrows/exposed_only_ro.stderr | 4 +- .../illegal_read_despite_exposed2.rs | 10 ++- 5 files changed, 48 insertions(+), 60 deletions(-) diff --git a/src/machine.rs b/src/machine.rs index 5810ac6d7eca..79414ada5eac 100644 --- a/src/machine.rs +++ b/src/machine.rs @@ -646,7 +646,7 @@ impl<'mir, 'tcx> Machine<'mir, 'tcx> for Evaluator<'mir, 'tcx> { let alloc: Allocation = alloc.convert_tag_add_extra( &ecx.tcx, AllocExtra { - stacked_borrows: stacks, + stacked_borrows: stacks.map(RefCell::new), data_race: race_alloc, weak_memory: buffer_alloc, }, @@ -745,7 +745,7 @@ impl<'mir, 'tcx> Machine<'mir, 'tcx> for Evaluator<'mir, 'tcx> { data_race.read(alloc_id, range, machine.data_race.as_ref().unwrap())?; } if let Some(stacked_borrows) = &alloc_extra.stacked_borrows { - stacked_borrows.memory_read( + stacked_borrows.borrow_mut().memory_read( alloc_id, tag, range, @@ -771,7 +771,7 @@ impl<'mir, 'tcx> Machine<'mir, 'tcx> for Evaluator<'mir, 'tcx> { data_race.write(alloc_id, range, machine.data_race.as_mut().unwrap())?; } if let Some(stacked_borrows) = &mut alloc_extra.stacked_borrows { - stacked_borrows.memory_written( + stacked_borrows.get_mut().memory_written( alloc_id, tag, range, @@ -800,7 +800,7 @@ impl<'mir, 'tcx> Machine<'mir, 'tcx> for Evaluator<'mir, 'tcx> { data_race.deallocate(alloc_id, range, machine.data_race.as_mut().unwrap())?; } if let Some(stacked_borrows) = &mut alloc_extra.stacked_borrows { - stacked_borrows.memory_deallocated( + stacked_borrows.get_mut().memory_deallocated( alloc_id, tag, range, diff --git a/src/stacked_borrows.rs b/src/stacked_borrows.rs index b7b339ce77bf..6fa70ddfc5d4 100644 --- a/src/stacked_borrows.rs +++ b/src/stacked_borrows.rs @@ -25,7 +25,8 @@ use diagnostics::{AllocHistory, TagHistory}; pub type PtrId = NonZeroU64; pub type CallId = NonZeroU64; -pub type AllocExtra = Stacks; +// Even reading memory can have effects on the stack, so we need a `RefCell` here. +pub type AllocExtra = RefCell; /// Tracking pointer provenance #[derive(Copy, Clone, Hash, Eq)] @@ -131,7 +132,7 @@ pub struct Stack { /// * Above a `SharedReadOnly` there can only be more `SharedReadOnly`. /// * Except for `Untagged`, no tag occurs in the stack more than once. borrows: Vec, - /// If this is `Some(id)`, then the actual current stack is unknown. THis can happen when + /// If this is `Some(id)`, then the actual current stack is unknown. This can happen when /// wildcard pointers are used to access this location. What we do know is that `borrows` are at /// the top of the stack, and below it are arbitrarily many items whose `tag` is either /// `Untagged` or strictly less than `id`. @@ -144,11 +145,11 @@ pub struct Stack { #[derive(Clone, Debug)] pub struct Stacks { // Even reading memory can have effects on the stack, so we need a `RefCell` here. - stacks: RefCell>, + stacks: RangeMap, /// Stores past operations on this allocation - history: RefCell, + history: AllocHistory, /// The set of tags that have been exposed inside this allocation. - exposed_tags: RefCell>, + exposed_tags: FxHashSet, } /// Extra global state, available to the memory access hooks. @@ -689,34 +690,14 @@ impl<'tcx> Stacks { let stack = Stack { borrows: vec![item], unknown_bottom: None }; Stacks { - stacks: RefCell::new(RangeMap::new(size, stack)), - history: RefCell::new(AllocHistory::new()), - exposed_tags: RefCell::new(FxHashSet::default()), + stacks: RangeMap::new(size, stack), + history: AllocHistory::new(), + exposed_tags: FxHashSet::default(), } } /// Call `f` on every stack in the range. fn for_each( - &self, - range: AllocRange, - mut f: impl FnMut( - Size, - &mut Stack, - &mut AllocHistory, - &mut FxHashSet, - ) -> InterpResult<'tcx>, - ) -> InterpResult<'tcx> { - let mut stacks = self.stacks.borrow_mut(); - let history = &mut *self.history.borrow_mut(); - let exposed_tags = &mut *self.exposed_tags.borrow_mut(); - for (offset, stack) in stacks.iter_mut(range.start, range.size) { - f(offset, stack, history, exposed_tags)?; - } - Ok(()) - } - - /// Call `f` on every stack in the range. - fn for_each_mut( &mut self, range: AllocRange, mut f: impl FnMut( @@ -726,11 +707,8 @@ impl<'tcx> Stacks { &mut FxHashSet, ) -> InterpResult<'tcx>, ) -> InterpResult<'tcx> { - let stacks = self.stacks.get_mut(); - let history = &mut *self.history.get_mut(); - let exposed_tags = self.exposed_tags.get_mut(); - for (offset, stack) in stacks.iter_mut(range.start, range.size) { - f(offset, stack, history, exposed_tags)?; + for (offset, stack) in self.stacks.iter_mut(range.start, range.size) { + f(offset, stack, &mut self.history, &mut self.exposed_tags)?; } Ok(()) } @@ -777,8 +755,8 @@ impl Stacks { (tag, Permission::SharedReadWrite) } }; - let stacks = Stacks::new(size, perm, base_tag); - stacks.history.borrow_mut().log_creation( + let mut stacks = Stacks::new(size, perm, base_tag); + stacks.history.log_creation( None, base_tag, alloc_range(Size::ZERO, size), @@ -789,7 +767,7 @@ impl Stacks { #[inline(always)] pub fn memory_read<'tcx>( - &self, + &mut self, alloc_id: AllocId, tag: SbTagExtra, range: AllocRange, @@ -832,7 +810,7 @@ impl Stacks { range.size.bytes() ); let mut state = state.borrow_mut(); - self.for_each_mut(range, |offset, stack, history, exposed_tags| { + self.for_each(range, |offset, stack, history, exposed_tags| { stack.access( AccessKind::Write, tag, @@ -855,7 +833,7 @@ impl Stacks { ) -> InterpResult<'tcx> { trace!("deallocation with tag {:?}: {:?}, size {}", tag, alloc_id, range.size.bytes()); let state = state.borrow(); - self.for_each_mut(range, |offset, stack, history, exposed_tags| { + self.for_each(range, |offset, stack, history, exposed_tags| { stack.dealloc(tag, (alloc_id, range, offset), &state, history, exposed_tags) })?; Ok(()) @@ -890,17 +868,19 @@ trait EvalContextPrivExt<'mir, 'tcx: 'mir>: crate::MiriEvalContextExt<'mir, 'tcx return Ok(()) }; let extra = this.get_alloc_extra(alloc_id)?; - let stacked_borrows = - extra.stacked_borrows.as_ref().expect("we should have Stacked Borrows data"); - let mut alloc_history = stacked_borrows.history.borrow_mut(); - alloc_history.log_creation( + let mut stacked_borrows = extra + .stacked_borrows + .as_ref() + .expect("we should have Stacked Borrows data") + .borrow_mut(); + stacked_borrows.history.log_creation( Some(orig_tag), new_tag, alloc_range(base_offset, size), current_span, ); if protect { - alloc_history.log_protector(orig_tag, new_tag, current_span); + stacked_borrows.history.log_protector(orig_tag, new_tag, current_span); } Ok(()) }; @@ -976,8 +956,11 @@ trait EvalContextPrivExt<'mir, 'tcx: 'mir>: crate::MiriEvalContextExt<'mir, 'tcx // We have to use shared references to alloc/memory_extra here since // `visit_freeze_sensitive` needs to access the global state. let extra = this.get_alloc_extra(alloc_id)?; - let stacked_borrows = - extra.stacked_borrows.as_ref().expect("we should have Stacked Borrows data"); + let mut stacked_borrows = extra + .stacked_borrows + .as_ref() + .expect("we should have Stacked Borrows data") + .borrow_mut(); this.visit_freeze_sensitive(place, size, |mut range, frozen| { // Adjust range. range.start += base_offset; @@ -1015,13 +998,16 @@ trait EvalContextPrivExt<'mir, 'tcx: 'mir>: crate::MiriEvalContextExt<'mir, 'tcx // Note that this asserts that the allocation is mutable -- but since we are creating a // mutable pointer, that seems reasonable. let (alloc_extra, machine) = this.get_alloc_extra_mut(alloc_id)?; - let stacked_borrows = - alloc_extra.stacked_borrows.as_mut().expect("we should have Stacked Borrows data"); + let mut stacked_borrows = alloc_extra + .stacked_borrows + .as_mut() + .expect("we should have Stacked Borrows data") + .borrow_mut(); let item = Item { perm, tag: new_tag, protector }; let range = alloc_range(base_offset, size); let mut global = machine.stacked_borrows.as_ref().unwrap().borrow_mut(); let current_span = &mut machine.current_span(); // `get_alloc_extra_mut` invalidated our old `current_span` - stacked_borrows.for_each_mut(range, |offset, stack, history, exposed_tags| { + stacked_borrows.for_each(range, |offset, stack, history, exposed_tags| { stack.grant( orig_tag, item, @@ -1177,7 +1163,7 @@ pub trait EvalContextExt<'mir, 'tcx: 'mir>: crate::MiriEvalContextExt<'mir, 'tcx match this.get_alloc_extra(alloc_id) { Ok(alloc_extra) => { trace!("Stacked Borrows tag {tag:?} exposed in {alloc_id}"); - alloc_extra.stacked_borrows.as_ref().unwrap().exposed_tags.borrow_mut().insert(tag); + alloc_extra.stacked_borrows.as_ref().unwrap().borrow_mut().exposed_tags.insert(tag); } Err(err) => { trace!( diff --git a/src/stacked_borrows/diagnostics.rs b/src/stacked_borrows/diagnostics.rs index d3c706c1404a..a4a7f5e7a1e3 100644 --- a/src/stacked_borrows/diagnostics.rs +++ b/src/stacked_borrows/diagnostics.rs @@ -259,6 +259,6 @@ fn error_cause(stack: &Stack, tag: SbTagExtra) -> &'static str { ", but that tag does not exist in the borrow stack for this location" } } else { - ", but no exposed tags are valid in the borrow stack for this location" + ", but no exposed tags have suitable permission in the borrow stack for this location" } } diff --git a/tests/fail/stacked_borrows/exposed_only_ro.stderr b/tests/fail/stacked_borrows/exposed_only_ro.stderr index ceeca61e5587..28fa98b6020c 100644 --- a/tests/fail/stacked_borrows/exposed_only_ro.stderr +++ b/tests/fail/stacked_borrows/exposed_only_ro.stderr @@ -1,10 +1,10 @@ -error: Undefined Behavior: attempting a write access using at ALLOC[0x0], but no exposed tags are valid in the borrow stack for this location +error: Undefined Behavior: attempting a write access using at ALLOC[0x0], but no exposed tags have suitable permission in the borrow stack for this location --> $DIR/exposed_only_ro.rs:LL:CC | LL | unsafe { *ptr = 0 }; | ^^^^^^^^ | | - | attempting a write access using at ALLOC[0x0], but no exposed tags are valid in the borrow stack for this location + | attempting a write access using at ALLOC[0x0], but no exposed tags have suitable permission in the borrow stack for this location | this error occurs as part of an access at ALLOC[0x0..0x4] | = help: this indicates a potential bug in the program: it performed an invalid operation, but the Stacked Borrows rules it violated are still experimental diff --git a/tests/fail/stacked_borrows/illegal_read_despite_exposed2.rs b/tests/fail/stacked_borrows/illegal_read_despite_exposed2.rs index b45b7b5d285c..19d0784591e4 100644 --- a/tests/fail/stacked_borrows/illegal_read_despite_exposed2.rs +++ b/tests/fail/stacked_borrows/illegal_read_despite_exposed2.rs @@ -7,12 +7,14 @@ fn main() { let exposed_ptr = addr as *mut i32; // From the exposed ptr, we get a new unique ptr. let root2 = &mut *exposed_ptr; - // let _fool = root2 as *mut _; // this would [fool] us, since SRW(N+1) remains on the stack - // Stack: Unknown(