make the tests pass

(and some formatting)
This commit is contained in:
Ralf Jung 2022-06-24 18:02:25 -04:00
parent c0f7118342
commit 8d6fdaa024
7 changed files with 202 additions and 161 deletions

View file

@ -358,9 +358,7 @@ to Miri failing to detect cases of undefined behavior in a program.
for pointer-to-int and int-to-pointer casts, respectively. This will
necessarily miss some bugs as those semantics are not efficiently
implementable in a sanitizer, but it will only miss bugs that concerns
memory/pointers which is subject to these operations. Also note that this flag
is currently incompatible with Stacked Borrows, so you will have to also pass
`-Zmiri-disable-stacked-borrows` to use this.
memory/pointers which is subject to these operations.
* `-Zmiri-symbolic-alignment-check` makes the alignment check more strict. By
default, alignment is checked by casting the pointer to an integer, and making
sure that is a multiple of the alignment. This can lead to cases where a

View file

@ -102,12 +102,11 @@ impl<'mir, 'tcx> GlobalStateInner {
}
pub fn expose_ptr(ecx: &mut MiriEvalContext<'mir, 'tcx>, alloc_id: AllocId, sb: SbTag) {
trace!("Exposing allocation id {:?}", alloc_id);
let global_state = ecx.machine.intptrcast.get_mut();
// In legacy and strict mode, we don't need this, so we can save some cycles
// by not tracking it.
if global_state.provenance_mode == ProvenanceMode::Permissive {
trace!("Exposing allocation id {alloc_id:?}");
global_state.exposed.insert(alloc_id);
if ecx.machine.stacked_borrows.is_some() {
ecx.expose_tag(alloc_id, sb);

View file

@ -6,6 +6,7 @@
#![feature(let_else)]
#![feature(io_error_more)]
#![feature(yeet_expr)]
#![feature(is_some_with)]
#![warn(rust_2018_idioms)]
#![allow(
clippy::collapsible_else_if,

View file

@ -104,9 +104,10 @@ pub struct Stack {
/// * Above a `SharedReadOnly` there can only be more `SharedReadOnly`.
/// * Except for `Untagged`, no tag occurs in the stack more than once.
borrows: Vec<Item>,
/// If this is `Some(id)`, then the actual current stack is unknown. What we do know
/// is that `borrows` are at the top of the stack, and below it are arbitrarily many items
/// whose `tag` is either `Untagged` or strictly less than `id`.
/// If this is `Some(id)`, then the actual current stack is unknown. THis can happen when
/// wildcard pointers are used to access this location. What we do know is that `borrows` are at
/// the top of the stack, and below it are arbitrarily many items whose `tag` is either
/// `Untagged` or strictly less than `id`.
unknown_bottom: Option<PtrId>,
}
@ -289,72 +290,72 @@ impl Permission {
impl<'tcx> Stack {
/// Find the item granting the given kind of access to the given tag, and return where
/// it is on the stack.
// TODO: Doc ok with Some(index) or None if unknown_bottom used
// Err if does not match
/// `Ok(None)` indicates it matched the "unknown" part of the stack, or it was a wildcard tag
/// and we have no clue what exactly it matched (but it could have matched something)
/// `Err` indicates it was not found.
fn find_granting(
&self,
access: AccessKind,
tag: Option<SbTag>,
exposed_tags: &FxHashSet<SbTag>,
) -> Result<Option<usize>, ()> {
let res = self
.borrows
.iter()
.enumerate() // we also need to know *where* in the stack
.rev() // search top-to-bottom
// Return permission of first item that grants access.
// We require a permission with the right tag, ensuring U3 and F3.
.find_map(|(idx, item)| {
match tag {
Some(tag) if tag == item.tag && item.perm.grants(access) => Some(idx),
None if exposed_tags.contains(&item.tag) => Some(idx),
_ => None,
}
});
let Some(tag) = tag else {
// Handle the wildcard case.
// Go search the stack for an exposed tag.
let maybe_in_stack = self
.borrows
.iter()
.rev() // search top-to-bottom
.find_map(|item| {
// If the item fits and *might* be this wildcard, use it.
if item.perm.grants(access) && exposed_tags.contains(&item.tag) {
Some(())
} else {
None
}
})
.is_some();
// If we couldn't find it in the stack, check the unknown bottom.
let found = maybe_in_stack || self.unknown_bottom.is_some();
return if found { Ok(None) } else { Err(()) };
};
if res.is_some() {
return Ok(res);
if let Some(idx) =
self.borrows
.iter()
.enumerate() // we also need to know *where* in the stack
.rev() // search top-to-bottom
// Return permission of first item that grants access.
// We require a permission with the right tag, ensuring U3 and F3.
.find_map(|(idx, item)| {
if tag == item.tag && item.perm.grants(access) { Some(idx) } else { None }
})
{
return Ok(Some(idx));
}
match self.unknown_bottom {
Some(id) =>
match tag {
Some(tag) =>
match tag {
SbTag::Tagged(tag_id) if tag_id < id => Ok(None),
SbTag::Untagged => Ok(None),
_ => Err(()),
},
None => Ok(None),
},
None => Err(()),
}
// Couldn't find it in the stack; but if there is an unknown bottom it might be there.
let found = self.unknown_bottom.is_some_and(|&unknown_limit| {
match tag {
SbTag::Tagged(tag_id) => tag_id < unknown_limit, // unknown_limit is an upper bound for what can be in the unknown bottom.
SbTag::Untagged => true, // yeah whatever
}
});
if found { Ok(None) } else { Err(()) }
}
/// Find the first write-incompatible item above the given one --
/// i.e, find the height to which the stack will be truncated when writing to `granting`.
fn find_first_write_incompatible(&self, granting: Option<usize>) -> usize {
let perm = if let Some(idx) = granting {
self.borrows[idx].perm
} else {
// I assume this has to be it?
Permission::SharedReadWrite
};
fn find_first_write_incompatible(&self, granting: usize) -> usize {
let perm = self.borrows[granting].perm;
match perm {
Permission::SharedReadOnly => bug!("Cannot use SharedReadOnly for writing"),
Permission::Disabled => bug!("Cannot use Disabled for anything"),
// On a write, everything above us is incompatible.
Permission::Unique =>
if let Some(idx) = granting {
idx + 1
} else {
0
},
Permission::Unique => granting + 1,
Permission::SharedReadWrite => {
// The SharedReadWrite *just* above us are compatible, to skip those.
let mut idx = if let Some(idx) = granting { idx + 1 } else { 0 };
let mut idx = granting + 1;
while let Some(item) = self.borrows.get(idx) {
if item.perm == Permission::SharedReadWrite {
// Go on.
@ -440,52 +441,57 @@ impl<'tcx> Stack {
alloc_history.access_error(access, tag, alloc_id, alloc_range, offset, self)
})?;
let Some(granting_idx) = granting_idx else {
// The access used a wildcard pointer or matched the unknown bottom.
// Nobody knows what happened, so forget everything.
trace!("access: clearing stack due to wildcard");
self.borrows.clear();
self.unknown_bottom = Some(global.next_ptr_id);
return Ok(());
};
let tag = tag.unwrap(); // only precise tags have precise locations
// Step 2: Remove incompatible items above them. Make sure we do not remove protected
// items. Behavior differs for reads and writes.
if let Some(tag) = tag {
if access == AccessKind::Write {
// Remove everything above the write-compatible items, like a proper stack. This makes sure read-only and unique
// pointers become invalid on write accesses (ensures F2a, and ensures U2 for write accesses).
let first_incompatible_idx = self.find_first_write_incompatible(granting_idx);
for item in self.borrows.drain(first_incompatible_idx..).rev() {
trace!("access: popping item {:?}", item);
if access == AccessKind::Write {
// Remove everything above the write-compatible items, like a proper stack. This makes sure read-only and unique
// pointers become invalid on write accesses (ensures F2a, and ensures U2 for write accesses).
let first_incompatible_idx = self.find_first_write_incompatible(granting_idx);
for item in self.borrows.drain(first_incompatible_idx..).rev() {
trace!("access: popping item {:?}", item);
Stack::check_protector(
&item,
Some((tag, alloc_range, offset, access)),
global,
alloc_history,
)?;
alloc_history.log_invalidation(item.tag, alloc_range, current_span);
}
} else {
// On a read, *disable* all `Unique` above the granting item. This ensures U2 for read accesses.
// The reason this is not following the stack discipline (by removing the first Unique and
// everything on top of it) is that in `let raw = &mut *x as *mut _; let _val = *x;`, the second statement
// would pop the `Unique` from the reborrow of the first statement, and subsequently also pop the
// `SharedReadWrite` for `raw`.
// This pattern occurs a lot in the standard library: create a raw pointer, then also create a shared
// reference and use that.
// We *disable* instead of removing `Unique` to avoid "connecting" two neighbouring blocks of SRWs.
let first_incompatible_idx = granting_idx + 1;
for idx in (first_incompatible_idx..self.borrows.len()).rev() {
let item = &mut self.borrows[idx];
if item.perm == Permission::Unique {
trace!("access: disabling item {:?}", item);
Stack::check_protector(
&item,
item,
Some((tag, alloc_range, offset, access)),
global,
alloc_history,
)?;
item.perm = Permission::Disabled;
alloc_history.log_invalidation(item.tag, alloc_range, current_span);
}
} else {
let start_idx = if let Some(idx) = granting_idx { idx + 1 } else { 0 };
// On a read, *disable* all `Unique` above the granting item. This ensures U2 for read accesses.
// The reason this is not following the stack discipline (by removing the first Unique and
// everything on top of it) is that in `let raw = &mut *x as *mut _; let _val = *x;`, the second statement
// would pop the `Unique` from the reborrow of the first statement, and subsequently also pop the
// `SharedReadWrite` for `raw`.
// This pattern occurs a lot in the standard library: create a raw pointer, then also create a shared
// reference and use that.
// We *disable* instead of removing `Unique` to avoid "connecting" two neighbouring blocks of SRWs.
for idx in (start_idx..self.borrows.len()).rev() {
let item = &mut self.borrows[idx];
if item.perm == Permission::Unique {
trace!("access: disabling item {:?}", item);
Stack::check_protector(
item,
Some((tag, alloc_range, offset, access)),
global,
alloc_history,
)?;
item.perm = Permission::Disabled;
alloc_history.log_invalidation(item.tag, alloc_range, current_span);
}
}
}
} else {
self.borrows.clear();
self.unknown_bottom = Some(global.next_ptr_id);
}
// Done.
@ -502,7 +508,7 @@ impl<'tcx> Stack {
alloc_history: &mut AllocHistory,
exposed_tags: &FxHashSet<SbTag>,
) -> InterpResult<'tcx> {
// Step 1: Find granting item.
// Step 1: Make sure there is a granting item.
self.find_granting(AccessKind::Write, tag, exposed_tags).map_err(|_| {
err_sb_ub(format!(
"no item granting write access for deallocation to tag {:?} at {:?} found in borrow stack",
@ -556,17 +562,20 @@ impl<'tcx> Stack {
"this case only makes sense for stack-like accesses"
);
if derived_from.is_some() {
// SharedReadWrite can coexist with "existing loans", meaning they don't act like a write
// access. Instead of popping the stack, we insert the item at the place the stack would
// be popped to (i.e., we insert it above all the write-compatible items).
// This ensures F2b by adding the new item below any potentially existing `SharedReadOnly`.
self.find_first_write_incompatible(granting_idx)
} else {
// TODO: is this correct
let Some(granting_idx) = granting_idx else {
// The parent is a wildcard pointer or matched the unknown bottom.
// Nobody knows what happened, so forget everything.
trace!("reborrow: clearing stack due to wildcard");
self.borrows.clear();
0
}
self.unknown_bottom = Some(global.next_ptr_id);
return Ok(());
};
// SharedReadWrite can coexist with "existing loans", meaning they don't act like a write
// access. Instead of popping the stack, we insert the item at the place the stack would
// be popped to (i.e., we insert it above all the write-compatible items).
// This ensures F2b by adding the new item below any potentially existing `SharedReadOnly`.
self.find_first_write_incompatible(granting_idx)
} else {
// A "safe" reborrow for a pointer that actually expects some aliasing guarantees.
// Here, creating a reference actually counts as an access.
@ -587,9 +596,11 @@ impl<'tcx> Stack {
// This ensures U1 and F1.
self.borrows.len()
};
// Put the new item there. As an optimization, deduplicate if it is equal to one of its new neighbors.
// `new_idx` might be 0 if we just cleared the entire stack.
if self.borrows.get(new_idx) == Some(&new)
|| new_idx > 0 && self.borrows.get(new_idx - 1) == Some(&new)
|| (new_idx > 0 && self.borrows[new_idx - 1] == new)
{
// Optimization applies, done.
trace!("reborrow: avoiding adding redundant item {:?}", new);
@ -648,7 +659,7 @@ impl<'tcx> Stacks {
) -> InterpResult<'tcx>,
) -> InterpResult<'tcx> {
let stacks = self.stacks.get_mut();
let history = &mut *self.history.borrow_mut();
let history = &mut *self.history.get_mut();
let exposed_tags = self.exposed_tags.get_mut();
for (offset, stack) in stacks.iter_mut(range.start, range.size) {
f(offset, stack, history, exposed_tags)?;
@ -1083,10 +1094,20 @@ pub trait EvalContextExt<'mir, 'tcx: 'mir>: crate::MiriEvalContextExt<'mir, 'tcx
// Function pointers and dead objects don't have an alloc_extra so we ignore them.
// This is okay because accessing them is UB anyway, no need for any Stacked Borrows checks.
// NOT using `get_alloc_extra_mut` since this might be a read-only allocation!
// FIXME: this catches `InterpError`, which we should not usually do.
// We might need a proper fallible API from `memory.rs` to avoid this though.
if let Ok((alloc_extra, _)) = this.get_alloc_extra_mut(alloc_id) {
alloc_extra.stacked_borrows.as_mut().unwrap().exposed_tags.get_mut().insert(tag);
match this.get_alloc_extra(alloc_id) {
Ok(alloc_extra) => {
trace!("Stacked Borrows tag {tag:?} exposed in {alloc_id}");
alloc_extra.stacked_borrows.as_ref().unwrap().exposed_tags.borrow_mut().insert(tag);
}
Err(err) => {
trace!(
"Not exposing Stacked Borrows tag {tag:?} due to error \
when accessing {alloc_id}: {err}"
);
}
}
}
}

View file

@ -8,5 +8,5 @@ fn main() {
let _fool = &mut x as *mut i32; // this would have fooled the old untagged pointer logic
let addr = (&x as *const i32).expose_addr();
let ptr = std::ptr::from_exposed_addr_mut::<i32>(addr);
unsafe { ptr.write(0) }; //~ ERROR: borrow stack
unsafe { *ptr = 0 }; //~ ERROR: borrow stack
}

View file

@ -0,0 +1,18 @@
error: Undefined Behavior: attempting a write access using "<wildcard>" at ALLOC[0x0], but no exposed tags are valid in the borrow stack for this location
--> $DIR/exposed_only_ro.rs:LL:CC
|
LL | unsafe { *ptr = 0 };
| ^^^^^^^^
| |
| attempting a write access using "<wildcard>" at ALLOC[0x0], but no exposed tags are valid in the borrow stack for this location
| this error occurs as part of an access at ALLOC[0x0..0x4]
|
= help: this indicates a potential bug in the program: it performed an invalid operation, but the Stacked Borrows rules it violated are still experimental
= help: see https://github.com/rust-lang/unsafe-code-guidelines/blob/master/wip/stacked-borrows.md for further information
= note: inside `main` at $DIR/exposed_only_ro.rs:LL:CC
note: some details are omitted, run with `MIRIFLAGS=-Zmiri-backtrace=full` for a verbose backtrace
error: aborting due to previous error

View file

@ -13,63 +13,67 @@ fn ref_raw_int_raw() {
}
/// Ensure that we do not just pick the topmost possible item on int2ptr casts.
fn example(variant: bool) { unsafe {
fn not_so_innocent(x: &mut u32) -> usize {
let x_raw4 = x as *mut u32;
x_raw4.expose_addr()
fn example(variant: bool) {
unsafe {
fn not_so_innocent(x: &mut u32) -> usize {
let x_raw4 = x as *mut u32;
x_raw4.expose_addr()
}
let mut c = 42u32;
let x_unique1 = &mut c;
// [..., Unique(1)]
let x_raw2 = x_unique1 as *mut u32;
let x_raw2_addr = x_raw2.expose_addr();
// [..., Unique(1), SharedRW(2)]
let x_unique3 = &mut *x_raw2;
// [.., Unique(1), SharedRW(2), Unique(3)]
assert_eq!(not_so_innocent(x_unique3), x_raw2_addr);
// [.., Unique(1), SharedRW(2), Unique(3), ..., SharedRW(4)]
// Do an int2ptr cast. This can pick tag 2 or 4 (the two previously exposed tags).
// 4 is the "obvious" choice (topmost tag, what we used to do with untagged pointers).
// And indeed if `variant == true` it is the only possible choice.
// But if `variant == false` then 2 is the only possible choice!
let x_wildcard = ptr::from_exposed_addr_mut::<i32>(x_raw2_addr);
if variant {
// If we picked 2, this will invalidate 3.
*x_wildcard = 10;
// Now we use 3. Only possible if above we picked 4.
*x_unique3 = 12;
} else {
// This invalidates tag 4.
*x_unique3 = 10;
// Now try to write with the "guessed" tag; it must be 2.
*x_wildcard = 12;
}
}
}
let mut c = 42u32;
let x_unique1 = &mut c;
// [..., Unique(1)]
let x_raw2 = x_unique1 as *mut u32;
let x_raw2_addr = x_raw2.expose_addr();
// [..., Unique(1), SharedRW(2)]
let x_unique3 = &mut *x_raw2;
// [.., Unique(1), SharedRW(2), Unique(3)]
assert_eq!(not_so_innocent(x_unique3), x_raw2_addr);
// [.., Unique(1), SharedRW(2), Unique(3), ..., SharedRW(4)]
// Do an int2ptr cast. This can pick tag 2 or 4 (the two previously exposed tags).
// 4 is the "obvious" choice (topmost tag, what we used to do with untagged pointers).
// And indeed if `variant == true` it is the only possible choice.
// But if `variant == false` then 2 is the only possible choice!
let x_wildcard = ptr::from_exposed_addr_mut::<i32>(x_raw2_addr);
if variant {
// If we picked 2, this will invalidate 3.
*x_wildcard = 10;
// Now we use 3. Only possible if above we picked 4.
*x_unique3 = 12;
} else {
// This invalidates tag 4.
*x_unique3 = 10;
// Now try to write with the "guessed" tag; it must be 2.
*x_wildcard = 12;
fn test() {
unsafe {
let root = &mut 42;
let root_raw = root as *mut i32;
let addr1 = root_raw as usize;
let child = &mut *root_raw;
let child_raw = child as *mut i32;
let addr2 = child_raw as usize;
assert_eq!(addr1, addr2);
// First use child.
*(addr2 as *mut i32) -= 2; // picks child_raw
*child -= 2;
// Then use root.
*(addr1 as *mut i32) += 2; // picks root_raw
*root += 2;
// Value should be unchanged.
assert_eq!(*root, 42);
}
} }
fn test() { unsafe {
let root = &mut 42;
let root_raw = root as *mut i32;
let addr1 = root_raw as usize;
let child = &mut *root_raw;
let child_raw = child as *mut i32;
let addr2 = child_raw as usize;
assert_eq!(addr1, addr2);
// First use child.
*(addr2 as *mut i32) -= 2; // picks child_raw
*child -= 2;
// Then use root.
*(addr1 as *mut i32) += 2; // picks root_raw
*root += 2;
// Value should be unchanged.
assert_eq!(*root, 42);
} }
}
fn main() {
ref_raw_int_raw();