Merge branch 'master' into format-temporaries

This commit is contained in:
Jon Gjengset 2019-09-28 09:25:47 -04:00
commit c7f6df0d5a
110 changed files with 619 additions and 6620 deletions

View file

@ -69,6 +69,7 @@ David Manescu <david.manescu@gmail.com> <dman2626@uni.sydney.edu.au>
David Ross <daboross@daboross.net>
Derek Chiang <derekchiang93@gmail.com> Derek Chiang (Enchi Jiang) <derekchiang93@gmail.com>
Diggory Hardy <diggory.hardy@gmail.com> Diggory Hardy <github@dhardy.name>
Dustin Bensing <dustin.bensing@googlemail.com>
Dylan Braithwaite <dylanbraithwaite1@gmail.com> <mail@dylanb.me>
Dzmitry Malyshau <kvarkus@gmail.com>
E. Dunham <edunham@mozilla.com> edunham <edunham@mozilla.com>

View file

@ -534,7 +534,7 @@ name = "compiletest"
version = "0.0.0"
dependencies = [
"diff",
"env_logger 0.6.2",
"env_logger 0.7.0",
"getopts",
"lazy_static 1.3.0",
"libc",
@ -3337,17 +3337,6 @@ dependencies = [
"core",
]
[[package]]
name = "rustc_ast_borrowck"
version = "0.0.0"
dependencies = [
"graphviz",
"log",
"rustc",
"rustc_data_structures",
"syntax_pos",
]
[[package]]
name = "rustc_codegen_llvm"
version = "0.0.0"
@ -3420,12 +3409,11 @@ dependencies = [
name = "rustc_driver"
version = "0.0.0"
dependencies = [
"env_logger 0.6.2",
"env_logger 0.7.0",
"graphviz",
"lazy_static 1.3.0",
"log",
"rustc",
"rustc_ast_borrowck",
"rustc_codegen_utils",
"rustc_data_structures",
"rustc_errors",
@ -3483,7 +3471,6 @@ dependencies = [
"once_cell",
"rustc",
"rustc-rayon",
"rustc_ast_borrowck",
"rustc_codegen_ssa",
"rustc_codegen_utils",
"rustc_data_structures",

View file

@ -47,8 +47,6 @@ Stabilized APIs
- [`<*mut T>::cast`]
- [`Duration::as_secs_f32`]
- [`Duration::as_secs_f64`]
- [`Duration::div_duration_f32`]
- [`Duration::div_duration_f64`]
- [`Duration::div_f32`]
- [`Duration::div_f64`]
- [`Duration::from_secs_f32`]
@ -100,8 +98,6 @@ Compatibility Notes
[`<*mut T>::cast`]: https://doc.rust-lang.org/std/primitive.pointer.html#method.cast
[`Duration::as_secs_f32`]: https://doc.rust-lang.org/std/time/struct.Duration.html#method.as_secs_f32
[`Duration::as_secs_f64`]: https://doc.rust-lang.org/std/time/struct.Duration.html#method.as_secs_f64
[`Duration::div_duration_f32`]: https://doc.rust-lang.org/std/time/struct.Duration.html#method.div_duration_f32
[`Duration::div_duration_f64`]: https://doc.rust-lang.org/std/time/struct.Duration.html#method.div_duration_f64
[`Duration::div_f32`]: https://doc.rust-lang.org/std/time/struct.Duration.html#method.div_f32
[`Duration::div_f64`]: https://doc.rust-lang.org/std/time/struct.Duration.html#method.div_f64
[`Duration::from_secs_f32`]: https://doc.rust-lang.org/std/time/struct.Duration.html#method.from_secs_f32

View file

@ -580,7 +580,6 @@ impl<K: Ord, V> BTreeMap<K, V> {
/// # Examples
///
/// ```
/// #![feature(map_get_key_value)]
/// use std::collections::BTreeMap;
///
/// let mut map = BTreeMap::new();
@ -588,7 +587,7 @@ impl<K: Ord, V> BTreeMap<K, V> {
/// assert_eq!(map.get_key_value(&1), Some((&1, &"a")));
/// assert_eq!(map.get_key_value(&2), None);
/// ```
#[unstable(feature = "map_get_key_value", issue = "49347")]
#[stable(feature = "map_get_key_value", since = "1.40.0")]
pub fn get_key_value<Q: ?Sized>(&self, k: &Q) -> Option<(&K, &V)>
where K: Borrow<Q>,
Q: Ord

View file

@ -369,6 +369,8 @@
//! [drop-guarantee]: #drop-guarantee
//! [`poll`]: ../../std/future/trait.Future.html#tymethod.poll
//! [`Pin::get_unchecked_mut`]: struct.Pin.html#method.get_unchecked_mut
//! [`bool`]: ../../std/primitive.bool.html
//! [`i32`]: ../../std/primitive.i32.html
#![stable(feature = "pin", since = "1.33.0")]

View file

@ -86,7 +86,6 @@ macro_rules! arena_types {
rustc::infer::canonical::QueryResponse<'tcx, rustc::ty::Ty<'tcx>>
>,
[few] crate_inherent_impls: rustc::ty::CrateInherentImpls,
[decode] borrowck: rustc::middle::borrowck::BorrowCheckResult,
[few] upstream_monomorphizations:
rustc::util::nodemap::DefIdMap<
rustc_data_structures::fx::FxHashMap<

View file

@ -1,87 +0,0 @@
use rustc_data_structures::fx::FxHashMap;
use std::cell::RefCell;
use std::hash::Hash;
use std::marker::PhantomData;
use crate::util::common::MemoizationMap;
use super::{DepKind, DepNodeIndex, DepGraph};
/// A DepTrackingMap offers a subset of the `Map` API and ensures that
/// we make calls to `read` and `write` as appropriate. We key the
/// maps with a unique type for brevity.
pub struct DepTrackingMap<M: DepTrackingMapConfig> {
phantom: PhantomData<M>,
graph: DepGraph,
map: FxHashMap<M::Key, (M::Value, DepNodeIndex)>,
}
pub trait DepTrackingMapConfig {
type Key: Eq + Hash + Clone;
type Value: Clone;
fn to_dep_kind() -> DepKind;
}
impl<M: DepTrackingMapConfig> DepTrackingMap<M> {
pub fn new(graph: DepGraph) -> DepTrackingMap<M> {
DepTrackingMap {
phantom: PhantomData,
graph,
map: Default::default(),
}
}
}
impl<M: DepTrackingMapConfig> MemoizationMap for RefCell<DepTrackingMap<M>> {
type Key = M::Key;
type Value = M::Value;
/// Memoizes an entry in the dep-tracking-map. If the entry is not
/// already present, then `op` will be executed to compute its value.
/// The resulting dependency graph looks like this:
///
/// [op] -> Map(key) -> CurrentTask
///
/// Here, `[op]` represents whatever nodes `op` reads in the
/// course of execution; `Map(key)` represents the node for this
/// map, and `CurrentTask` represents the current task when
/// `memoize` is invoked.
///
/// **Important:** when `op` is invoked, the current task will be
/// switched to `Map(key)`. Therefore, if `op` makes use of any
/// HIR nodes or shared state accessed through its closure
/// environment, it must explicitly register a read of that
/// state. As an example, see `type_of_item` in `collect`,
/// which looks something like this:
///
/// ```
/// fn type_of_item(..., item: &hir::Item) -> Ty<'tcx> {
/// let item_def_id = ccx.tcx.hir().local_def_id(it.hir_id);
/// ccx.tcx.item_types.memoized(item_def_id, || {
/// ccx.tcx.dep_graph.read(DepNode::Hir(item_def_id)); // (*)
/// compute_type_of_item(ccx, item)
/// });
/// }
/// ```
///
/// The key is the line marked `(*)`: the closure implicitly
/// accesses the body of the item `item`, so we register a read
/// from `Hir(item_def_id)`.
fn memoize<OP>(&self, key: M::Key, op: OP) -> M::Value
where OP: FnOnce() -> M::Value
{
let graph;
{
let this = self.borrow();
if let Some(&(ref result, dep_node)) = this.map.get(&key) {
this.graph.read_index(dep_node);
return result.clone();
}
graph = this.graph.clone();
}
let (result, dep_node) = graph.with_anon_task(M::to_dep_kind(), op);
self.borrow_mut().map.insert(key, (result.clone(), dep_node));
graph.read_index(dep_node);
result
}
}

View file

@ -590,7 +590,7 @@ impl DepGraph {
// mark it as green by recursively marking all of its
// dependencies green.
self.try_mark_previous_green(
tcx.global_tcx(),
tcx,
data,
prev_index,
&dep_node

View file

@ -1,6 +1,5 @@
pub mod debug;
mod dep_node;
mod dep_tracking_map;
mod graph;
mod prev;
mod query;
@ -8,7 +7,6 @@ mod safe;
mod serialized;
pub mod cgu_reuse_tracker;
pub use self::dep_tracking_map::{DepTrackingMap, DepTrackingMapConfig};
pub use self::dep_node::{DepNode, DepKind, DepConstructor, WorkProductId, RecoverKey, label_strs};
pub use self::graph::{DepGraph, WorkProduct, DepNodeIndex, DepNodeColor, TaskDeps, hash_result};
pub use self::graph::WorkProductFileKind;

View file

@ -93,30 +93,35 @@ struct CheckAttrVisitor<'tcx> {
impl CheckAttrVisitor<'tcx> {
/// Checks any attribute.
fn check_attributes(&self, item: &hir::Item, target: Target) {
if target == Target::Fn || target == Target::Const {
self.tcx.codegen_fn_attrs(self.tcx.hir().local_def_id(item.hir_id));
} else if let Some(a) = item.attrs.iter().find(|a| a.check_name(sym::target_feature)) {
self.tcx.sess.struct_span_err(a.span, "attribute should be applied to a function")
.span_label(item.span, "not a function")
.emit();
}
let mut is_valid = true;
for attr in &item.attrs {
if attr.check_name(sym::inline) {
is_valid &= if attr.check_name(sym::inline) {
self.check_inline(attr, &item.span, target)
} else if attr.check_name(sym::non_exhaustive) {
self.check_non_exhaustive(attr, item, target)
} else if attr.check_name(sym::marker) {
self.check_marker(attr, item, target)
}
} else if attr.check_name(sym::target_feature) {
self.check_target_feature(attr, item, target)
} else {
true
};
}
if !is_valid {
return;
}
if target == Target::Fn {
self.tcx.codegen_fn_attrs(self.tcx.hir().local_def_id(item.hir_id));
}
self.check_repr(item, target);
self.check_used(item, target);
}
/// Checks if an `#[inline]` is applied to a function or a closure.
fn check_inline(&self, attr: &hir::Attribute, span: &Span, target: Target) {
/// Checks if an `#[inline]` is applied to a function or a closure. Returns `true` if valid.
fn check_inline(&self, attr: &hir::Attribute, span: &Span, target: Target) -> bool {
if target != Target::Fn && target != Target::Closure {
struct_span_err!(self.tcx.sess,
attr.span,
@ -124,13 +129,21 @@ impl CheckAttrVisitor<'tcx> {
"attribute should be applied to function or closure")
.span_label(*span, "not a function or closure")
.emit();
false
} else {
true
}
}
/// Checks if the `#[non_exhaustive]` attribute on an `item` is valid.
fn check_non_exhaustive(&self, attr: &hir::Attribute, item: &hir::Item, target: Target) {
/// Checks if the `#[non_exhaustive]` attribute on an `item` is valid. Returns `true` if valid.
fn check_non_exhaustive(
&self,
attr: &hir::Attribute,
item: &hir::Item,
target: Target,
) -> bool {
match target {
Target::Struct | Target::Enum => { /* Valid */ },
Target::Struct | Target::Enum => true,
_ => {
struct_span_err!(self.tcx.sess,
attr.span,
@ -138,25 +151,44 @@ impl CheckAttrVisitor<'tcx> {
"attribute can only be applied to a struct or enum")
.span_label(item.span, "not a struct or enum")
.emit();
return;
false
}
}
}
/// Checks if the `#[marker]` attribute on an `item` is valid.
fn check_marker(&self, attr: &hir::Attribute, item: &hir::Item, target: Target) {
/// Checks if the `#[marker]` attribute on an `item` is valid. Returns `true` if valid.
fn check_marker(&self, attr: &hir::Attribute, item: &hir::Item, target: Target) -> bool {
match target {
Target::Trait => { /* Valid */ },
Target::Trait => true,
_ => {
self.tcx.sess
.struct_span_err(attr.span, "attribute can only be applied to a trait")
.span_label(item.span, "not a trait")
.emit();
return;
false
}
}
}
/// Checks if the `#[target_feature]` attribute on `item` is valid. Returns `true` if valid.
fn check_target_feature(
&self,
attr: &hir::Attribute,
item: &hir::Item,
target: Target,
) -> bool {
match target {
Target::Fn => true,
_ => {
self.tcx.sess
.struct_span_err(attr.span, "attribute should be applied to a function")
.span_label(item.span, "not a function")
.emit();
false
},
}
}
/// Checks if the `#[repr]` attributes on `item` are valid.
fn check_repr(&self, item: &hir::Item, target: Target) {
// Extract the names of all repr hints, e.g., [foo, bar, align] for:

View file

@ -705,7 +705,6 @@ impl LoweringContext<'_> {
E0628,
"generators cannot have explicit parameters"
);
self.sess.abort_if_errors();
}
Some(match movability {
Movability::Movable => hir::GeneratorMovability::Movable,
@ -998,7 +997,7 @@ impl LoweringContext<'_> {
E0727,
"`async` generators are not yet supported",
);
self.sess.abort_if_errors();
return hir::ExprKind::Err;
},
None => self.generator_kind = Some(hir::GeneratorKind::Gen),
}

View file

@ -23,8 +23,6 @@ use syntax::source_map::Spanned;
use syntax::ext::base::MacroKind;
use syntax_pos::{Span, DUMMY_SP};
use std::result::Result::Err;
pub mod blocks;
mod collector;
mod def_collector;
@ -183,6 +181,44 @@ pub struct Map<'hir> {
hir_to_node_id: FxHashMap<HirId, NodeId>,
}
struct ParentHirIterator<'map> {
current_id: HirId,
map: &'map Map<'map>,
}
impl<'map> ParentHirIterator<'map> {
fn new(current_id: HirId, map: &'map Map<'map>) -> ParentHirIterator<'map> {
ParentHirIterator {
current_id,
map,
}
}
}
impl<'map> Iterator for ParentHirIterator<'map> {
type Item = (HirId, Node<'map>);
fn next(&mut self) -> Option<Self::Item> {
if self.current_id == CRATE_HIR_ID {
return None;
}
loop { // There are nodes that do not have entries, so we need to skip them.
let parent_id = self.map.get_parent_node(self.current_id);
if parent_id == self.current_id {
self.current_id = CRATE_HIR_ID;
return None;
}
self.current_id = parent_id;
if let Some(entry) = self.map.find_entry(parent_id) {
return Some((parent_id, entry.node));
}
// If this `HirId` doesn't have an `Entry`, skip it and look for its `parent_id`.
}
}
}
impl<'hir> Map<'hir> {
#[inline]
fn lookup(&self, id: HirId) -> Option<&Entry<'hir>> {
@ -682,45 +718,6 @@ impl<'hir> Map<'hir> {
}
}
/// If there is some error when walking the parents (e.g., a node does not
/// have a parent in the map or a node can't be found), then we return the
/// last good `HirId` we found. Note that reaching the crate root (`id == 0`),
/// is not an error, since items in the crate module have the crate root as
/// parent.
fn walk_parent_nodes<F, F2>(&self,
start_id: HirId,
found: F,
bail_early: F2)
-> Result<HirId, HirId>
where F: Fn(&Node<'hir>) -> bool, F2: Fn(&Node<'hir>) -> bool
{
let mut id = start_id;
loop {
let parent_id = self.get_parent_node(id);
if parent_id == CRATE_HIR_ID {
return Ok(CRATE_HIR_ID);
}
if parent_id == id {
return Err(id);
}
if let Some(entry) = self.find_entry(parent_id) {
if let Node::Crate = entry.node {
return Err(id);
}
if found(&entry.node) {
return Ok(parent_id);
} else if bail_early(&entry.node) {
return Err(parent_id);
}
id = parent_id;
} else {
return Err(id);
}
}
}
/// Retrieves the `HirId` for `id`'s enclosing method, unless there's a
/// `while` or `loop` before reaching it, as block tail returns are not
/// available in them.
@ -744,29 +741,46 @@ impl<'hir> Map<'hir> {
/// }
/// ```
pub fn get_return_block(&self, id: HirId) -> Option<HirId> {
let match_fn = |node: &Node<'_>| {
match *node {
let mut iter = ParentHirIterator::new(id, &self).peekable();
let mut ignore_tail = false;
if let Some(entry) = self.find_entry(id) {
if let Node::Expr(Expr { kind: ExprKind::Ret(_), .. }) = entry.node {
// When dealing with `return` statements, we don't care about climbing only tail
// expressions.
ignore_tail = true;
}
}
while let Some((hir_id, node)) = iter.next() {
if let (Some((_, next_node)), false) = (iter.peek(), ignore_tail) {
match next_node {
Node::Block(Block { expr: None, .. }) => return None,
Node::Block(Block { expr: Some(expr), .. }) => {
if hir_id != expr.hir_id {
// The current node is not the tail expression of its parent.
return None;
}
}
_ => {}
}
}
match node {
Node::Item(_) |
Node::ForeignItem(_) |
Node::TraitItem(_) |
Node::Expr(Expr { kind: ExprKind::Closure(..), ..}) |
Node::ImplItem(_) => true,
_ => false,
}
};
let match_non_returning_block = |node: &Node<'_>| {
match *node {
Node::ImplItem(_) => return Some(hir_id),
Node::Expr(ref expr) => {
match expr.kind {
ExprKind::Loop(..) | ExprKind::Ret(..) => true,
_ => false,
// Ignore `return`s on the first iteration
ExprKind::Loop(..) | ExprKind::Ret(..) => return None,
_ => {}
}
}
_ => false,
Node::Local(_) => return None,
_ => {}
}
};
self.walk_parent_nodes(id, match_fn, match_non_returning_block).ok()
}
None
}
/// Retrieves the `HirId` for `id`'s parent item, or `id` itself if no
@ -774,16 +788,17 @@ impl<'hir> Map<'hir> {
/// in the HIR which is recorded by the map and is an item, either an item
/// in a module, trait, or impl.
pub fn get_parent_item(&self, hir_id: HirId) -> HirId {
match self.walk_parent_nodes(hir_id, |node| match *node {
Node::Item(_) |
Node::ForeignItem(_) |
Node::TraitItem(_) |
Node::ImplItem(_) => true,
_ => false,
}, |_| false) {
Ok(id) => id,
Err(id) => id,
for (hir_id, node) in ParentHirIterator::new(hir_id, &self) {
match node {
Node::Crate |
Node::Item(_) |
Node::ForeignItem(_) |
Node::TraitItem(_) |
Node::ImplItem(_) => return hir_id,
_ => {}
}
}
hir_id
}
/// Returns the `DefId` of `id`'s nearest module parent, or `id` itself if no
@ -795,60 +810,64 @@ impl<'hir> Map<'hir> {
/// Returns the `HirId` of `id`'s nearest module parent, or `id` itself if no
/// module parent is in this map.
pub fn get_module_parent_node(&self, hir_id: HirId) -> HirId {
match self.walk_parent_nodes(hir_id, |node| match *node {
Node::Item(&Item { kind: ItemKind::Mod(_), .. }) => true,
_ => false,
}, |_| false) {
Ok(id) => id,
Err(id) => id,
for (hir_id, node) in ParentHirIterator::new(hir_id, &self) {
if let Node::Item(&Item { kind: ItemKind::Mod(_), .. }) = node {
return hir_id;
}
}
CRATE_HIR_ID
}
/// Returns the nearest enclosing scope. A scope is roughly an item or block.
pub fn get_enclosing_scope(&self, hir_id: HirId) -> Option<HirId> {
self.walk_parent_nodes(hir_id, |node| match *node {
Node::Item(i) => {
match i.kind {
ItemKind::Fn(..)
| ItemKind::Mod(..)
| ItemKind::Enum(..)
| ItemKind::Struct(..)
| ItemKind::Union(..)
| ItemKind::Trait(..)
| ItemKind::Impl(..) => true,
_ => false,
}
},
Node::ForeignItem(fi) => {
match fi.kind {
ForeignItemKind::Fn(..) => true,
_ => false,
}
},
Node::TraitItem(ti) => {
match ti.kind {
TraitItemKind::Method(..) => true,
_ => false,
}
},
Node::ImplItem(ii) => {
match ii.kind {
ImplItemKind::Method(..) => true,
_ => false,
}
},
Node::Block(_) => true,
_ => false,
}, |_| false).ok()
for (hir_id, node) in ParentHirIterator::new(hir_id, &self) {
if match node {
Node::Item(i) => {
match i.kind {
ItemKind::Fn(..)
| ItemKind::Mod(..)
| ItemKind::Enum(..)
| ItemKind::Struct(..)
| ItemKind::Union(..)
| ItemKind::Trait(..)
| ItemKind::Impl(..) => true,
_ => false,
}
},
Node::ForeignItem(fi) => {
match fi.kind {
ForeignItemKind::Fn(..) => true,
_ => false,
}
},
Node::TraitItem(ti) => {
match ti.kind {
TraitItemKind::Method(..) => true,
_ => false,
}
},
Node::ImplItem(ii) => {
match ii.kind {
ImplItemKind::Method(..) => true,
_ => false,
}
},
Node::Block(_) => true,
_ => false,
} {
return Some(hir_id);
}
}
None
}
/// Returns the defining scope for an opaque type definition.
pub fn get_defining_scope(&self, id: HirId) -> Option<HirId> {
pub fn get_defining_scope(&self, id: HirId) -> HirId {
let mut scope = id;
loop {
scope = self.get_enclosing_scope(scope)?;
scope = self.get_enclosing_scope(scope).unwrap_or(CRATE_HIR_ID);
if scope == CRATE_HIR_ID {
return Some(CRATE_HIR_ID);
return CRATE_HIR_ID;
}
match self.get(scope) {
Node::Item(i) => {
@ -861,7 +880,7 @@ impl<'hir> Map<'hir> {
_ => break,
}
}
Some(scope)
scope
}
pub fn get_parent_did(&self, id: HirId) -> DefId {
@ -1064,6 +1083,14 @@ impl<'hir> Map<'hir> {
self.as_local_hir_id(id).map(|id| self.span(id))
}
pub fn res_span(&self, res: Res) -> Option<Span> {
match res {
Res::Err => None,
Res::Local(id) => Some(self.span(id)),
res => self.span_if_local(res.opt_def_id()?),
}
}
pub fn node_to_string(&self, id: HirId) -> String {
hir_id_to_string(self, id, true)
}

View file

@ -1291,17 +1291,6 @@ pub struct Arm {
pub body: P<Expr>,
}
impl Arm {
// HACK(or_patterns; Centril | dlrobertson): Remove this and
// correctly handle each case in which this method is used.
pub fn top_pats_hack(&self) -> &[P<Pat>] {
match &self.pat.kind {
PatKind::Or(pats) => pats,
_ => std::slice::from_ref(&self.pat),
}
}
}
#[derive(RustcEncodable, RustcDecodable, Debug, HashStable)]
pub enum Guard {
If(P<Expr>),
@ -1563,7 +1552,7 @@ pub enum ExprKind {
/// Thus, `x.foo::<Bar, Baz>(a, b, c, d)` is represented as
/// `ExprKind::MethodCall(PathSegment { foo, [Bar, Baz] }, [x, a, b, c, d])`.
MethodCall(P<PathSegment>, Span, HirVec<Expr>),
/// A tuple (e.g., `(a, b, c ,d)`).
/// A tuple (e.g., `(a, b, c, d)`).
Tup(HirVec<Expr>),
/// A binary operation (e.g., `a + b`, `a * b`).
Binary(BinOp, P<Expr>, P<Expr>),

View file

@ -93,6 +93,8 @@ impl SuppressRegionErrors {
/// checks, so we should ignore errors if NLL is (unconditionally)
/// enabled.
pub fn when_nll_is_enabled(tcx: TyCtxt<'_>) -> Self {
// FIXME(Centril): Once we actually remove `::Migrate` also make
// this always `true` and then proceed to eliminate the dead code.
match tcx.borrowck_mode() {
// If we're on Migrate mode, report AST region errors
BorrowckMode::Migrate => SuppressRegionErrors { suppressed: false },
@ -1460,7 +1462,7 @@ impl<'a, 'tcx> InferCtxt<'a, 'tcx> {
// type-checking closure types are in local tables only.
if !self.in_progress_tables.is_some() || !ty.has_closure_types() {
if !(param_env, ty).has_local_value() {
return ty.is_copy_modulo_regions(self.tcx.global_tcx(), param_env, span);
return ty.is_copy_modulo_regions(self.tcx, param_env, span);
}
}

View file

@ -561,15 +561,13 @@ impl<'a, 'tcx> InferCtxt<'a, 'tcx> {
def_id, instantiated_ty
);
let gcx = self.tcx.global_tcx();
// Use substs to build up a reverse map from regions to their
// identity mappings. This is necessary because of `impl
// Trait` lifetimes are computed by replacing existing
// lifetimes with 'static and remapping only those used in the
// `impl Trait` return type, resulting in the parameters
// shifting.
let id_substs = InternalSubsts::identity_for_item(gcx, def_id);
let id_substs = InternalSubsts::identity_for_item(self.tcx, def_id);
let map: FxHashMap<GenericArg<'tcx>, GenericArg<'tcx>> = opaque_defn
.substs
.iter()
@ -854,7 +852,7 @@ impl TypeFolder<'tcx> for ReverseMapper<'tcx> {
)
.emit();
self.tcx().global_tcx().mk_region(ty::ReStatic)
self.tcx().mk_region(ty::ReStatic)
},
}
}
@ -1215,7 +1213,7 @@ pub fn may_define_opaque_type(
let mut hir_id = tcx.hir().as_local_hir_id(def_id).unwrap();
// Named opaque types can be defined by any siblings or children of siblings.
let scope = tcx.hir().get_defining_scope(opaque_hir_id).expect("could not get defining scope");
let scope = tcx.hir().get_defining_scope(opaque_hir_id);
// We walk up the node tree until we hit the root or the scope of the opaque type.
while hir_id != scope && hir_id != hir::CRATE_HIR_ID {
hir_id = tcx.hir().get_parent_item(hir_id);

View file

@ -100,7 +100,6 @@ pub mod infer;
pub mod lint;
pub mod middle {
pub mod borrowck;
pub mod expr_use_visitor;
pub mod cstore;
pub mod dead;

View file

@ -1,31 +0,0 @@
use crate::ich::StableHashingContext;
use rustc_data_structures::stable_hasher::{HashStable, StableHasher,
StableHasherResult};
#[derive(Copy, Clone, Debug, RustcEncodable, RustcDecodable)]
pub enum SignalledError { SawSomeError, NoErrorsSeen }
impl Default for SignalledError {
fn default() -> SignalledError {
SignalledError::NoErrorsSeen
}
}
impl_stable_hash_for!(enum self::SignalledError { SawSomeError, NoErrorsSeen });
#[derive(Debug, Default, RustcEncodable, RustcDecodable)]
pub struct BorrowCheckResult {
pub signalled_any_error: SignalledError,
}
impl<'a> HashStable<StableHashingContext<'a>> for BorrowCheckResult {
fn hash_stable<W: StableHasherResult>(&self,
hcx: &mut StableHashingContext<'a>,
hasher: &mut StableHasher<W>) {
let BorrowCheckResult {
ref signalled_any_error,
} = *self;
signalled_any_error.hash_stable(hcx, hasher);
}
}

View file

@ -82,7 +82,7 @@ impl ExprVisitor<'tcx> {
// Special-case transmutting from `typeof(function)` and
// `Option<typeof(function)>` to present a clearer error.
let from = unpack_option_like(self.tcx.global_tcx(), from);
let from = unpack_option_like(self.tcx, from);
if let (&ty::FnDef(..), SizeSkeleton::Known(size_to)) = (&from.kind, sk_to) {
if size_to == Pointer.size(&self.tcx) {
struct_span_err!(self.tcx.sess, span, E0591,

View file

@ -749,7 +749,7 @@ impl<'a, 'tcx> MemCategorizationContext<'a, 'tcx> {
.unwrap_or(ty::ClosureKind::LATTICE_BOTTOM),
None =>
closure_substs.closure_kind(closure_def_id, self.tcx.global_tcx()),
closure_substs.closure_kind(closure_def_id, self.tcx),
}
}
_ => span_bug!(span, "unexpected type for fn in mem_categorization: {:?}", ty),

View file

@ -1504,7 +1504,7 @@ impl<'tcx> TerminatorKind<'tcx> {
Goto { .. } => vec!["".into()],
SwitchInt { ref values, switch_ty, .. } => ty::tls::with(|tcx| {
let param_env = ty::ParamEnv::empty();
let switch_ty = tcx.lift_to_global(&switch_ty).unwrap();
let switch_ty = tcx.lift(&switch_ty).unwrap();
let size = tcx.layout_of(param_env.and(switch_ty)).unwrap().size;
values
.iter()

View file

@ -397,10 +397,6 @@ rustc_queries! {
}
BorrowChecking {
query borrowck(key: DefId) -> &'tcx BorrowCheckResult {
cache_on_disk_if { key.is_local() }
}
/// Borrow-checks the function body. If this is a closure, returns
/// additional requirements that the closure's creator must verify.
query mir_borrowck(key: DefId) -> mir::BorrowCheckResult<'tcx> {
@ -469,7 +465,7 @@ rustc_queries! {
}
TypeChecking {
query check_match(key: DefId) -> SignalledError {
query check_match(key: DefId) {
cache_on_disk_if { key.is_local() }
}

View file

@ -478,14 +478,6 @@ impl BorrowckMode {
BorrowckMode::Migrate => true,
}
}
/// Returns whether we should emit the AST-based borrow checker errors.
pub fn use_ast(self) -> bool {
match self {
BorrowckMode::Mir => false,
BorrowckMode::Migrate => false,
}
}
}
pub enum Input {
@ -1268,14 +1260,6 @@ options! {DebuggingOptions, DebuggingSetter, basic_debugging_options,
save_analysis: bool = (false, parse_bool, [UNTRACKED],
"write syntax and type analysis (in JSON format) information, in \
addition to normal output"),
flowgraph_print_loans: bool = (false, parse_bool, [UNTRACKED],
"include loan analysis data in -Z unpretty flowgraph output"),
flowgraph_print_moves: bool = (false, parse_bool, [UNTRACKED],
"include move analysis data in -Z unpretty flowgraph output"),
flowgraph_print_assigns: bool = (false, parse_bool, [UNTRACKED],
"include assignment analysis data in -Z unpretty flowgraph output"),
flowgraph_print_all: bool = (false, parse_bool, [UNTRACKED],
"include all dataflow analysis data in -Z unpretty flowgraph output"),
print_region_graph: bool = (false, parse_bool, [UNTRACKED],
"prints region inference graph. \
Use with RUST_REGION_GRAPH=help for more info"),
@ -1424,8 +1408,6 @@ options! {DebuggingOptions, DebuggingSetter, basic_debugging_options,
valid types are any of the types for `--pretty`, as well as:
`expanded`, `expanded,identified`,
`expanded,hygiene` (with internal representations),
`flowgraph=<nodeid>` (graphviz formatted flowgraph for node),
`flowgraph,unlabelled=<nodeid>` (unlabelled graphviz formatted flowgraph for node),
`everybody_loops` (all function bodies replaced with `loop {}`),
`hir` (the HIR), `hir,identified`,
`hir,typed` (HIR with types for each node),

View file

@ -589,14 +589,6 @@ fn test_debugging_options_tracking_hash() {
assert_eq!(reference.dep_tracking_hash(), opts.dep_tracking_hash());
opts.debugging_opts.save_analysis = true;
assert_eq!(reference.dep_tracking_hash(), opts.dep_tracking_hash());
opts.debugging_opts.flowgraph_print_loans = true;
assert_eq!(reference.dep_tracking_hash(), opts.dep_tracking_hash());
opts.debugging_opts.flowgraph_print_moves = true;
assert_eq!(reference.dep_tracking_hash(), opts.dep_tracking_hash());
opts.debugging_opts.flowgraph_print_assigns = true;
assert_eq!(reference.dep_tracking_hash(), opts.dep_tracking_hash());
opts.debugging_opts.flowgraph_print_all = true;
assert_eq!(reference.dep_tracking_hash(), opts.dep_tracking_hash());
opts.debugging_opts.print_region_graph = true;
assert_eq!(reference.dep_tracking_hash(), opts.dep_tracking_hash());
opts.debugging_opts.parse_only = true;

View file

@ -108,7 +108,7 @@ impl TraitEngine<'tcx> for FulfillmentContext<'tcx> {
goal: obligation.goal.predicate,
}, &mut orig_values);
match infcx.tcx.global_tcx().evaluate_goal(canonical_goal) {
match infcx.tcx.evaluate_goal(canonical_goal) {
Ok(response) => {
if response.is_proven() {
making_progress = true;

View file

@ -3,12 +3,10 @@
// seems likely that they should eventually be merged into more
// general routines.
use crate::dep_graph::{DepKind, DepTrackingMapConfig};
use std::marker::PhantomData;
use crate::infer::InferCtxt;
use crate::traits::{FulfillmentContext, Obligation, ObligationCause, SelectionContext,
TraitEngine, Vtable};
use crate::ty::{self, Ty, TyCtxt};
use crate::ty::{self, TyCtxt};
use crate::ty::subst::{Subst, SubstsRef};
use crate::ty::fold::TypeFoldable;
@ -100,33 +98,8 @@ impl<'tcx> TyCtxt<'tcx> {
}
}
// Implement DepTrackingMapConfig for `trait_cache`
pub struct TraitSelectionCache<'tcx> {
data: PhantomData<&'tcx ()>
}
impl<'tcx> DepTrackingMapConfig for TraitSelectionCache<'tcx> {
type Key = (ty::ParamEnv<'tcx>, ty::PolyTraitRef<'tcx>);
type Value = Vtable<'tcx, ()>;
fn to_dep_kind() -> DepKind {
DepKind::TraitSelect
}
}
// # Global Cache
pub struct ProjectionCache<'tcx> {
data: PhantomData<&'tcx ()>,
}
impl<'tcx> DepTrackingMapConfig for ProjectionCache<'tcx> {
type Key = Ty<'tcx>;
type Value = Ty<'tcx>;
fn to_dep_kind() -> DepKind {
DepKind::TraitSelect
}
}
impl<'a, 'tcx> InferCtxt<'a, 'tcx> {
/// Finishes processes any obligations that remain in the
/// fulfillment context, and then returns the result with all type

View file

@ -497,7 +497,7 @@ impl<'a, 'tcx> InferCtxt<'a, 'tcx> {
4
};
let normalize = |candidate| self.tcx.global_tcx().infer_ctxt().enter(|ref infcx| {
let normalize = |candidate| self.tcx.infer_ctxt().enter(|ref infcx| {
let normalized = infcx
.at(&ObligationCause::dummy(), ty::ParamEnv::empty())
.normalize(candidate)
@ -783,8 +783,7 @@ impl<'a, 'tcx> InferCtxt<'a, 'tcx> {
}
ty::Predicate::ObjectSafe(trait_def_id) => {
let violations = self.tcx.global_tcx()
.object_safety_violations(trait_def_id);
let violations = self.tcx.object_safety_violations(trait_def_id);
if let Some(err) = self.tcx.report_object_safety_error(
span,
trait_def_id,
@ -920,7 +919,7 @@ impl<'a, 'tcx> InferCtxt<'a, 'tcx> {
}
TraitNotObjectSafe(did) => {
let violations = self.tcx.global_tcx().object_safety_violations(did);
let violations = self.tcx.object_safety_violations(did);
if let Some(err) = self.tcx.report_object_safety_error(span, did, violations) {
err
} else {

View file

@ -495,7 +495,7 @@ impl<'a, 'b, 'tcx> ObligationProcessor for FulfillProcessor<'a, 'b, 'tcx> {
} else {
if !substs.has_local_value() {
let instance = ty::Instance::resolve(
self.selcx.tcx().global_tcx(),
self.selcx.tcx(),
obligation.param_env,
def_id,
substs,

View file

@ -40,12 +40,11 @@ impl<'cx, 'tcx> At<'cx, 'tcx> {
};
}
let gcx = tcx.global_tcx();
let mut orig_values = OriginalQueryValues::default();
let c_ty = self.infcx.canonicalize_query(&self.param_env.and(ty), &mut orig_values);
let span = self.cause.span;
debug!("c_ty = {:?}", c_ty);
if let Ok(result) = &gcx.dropck_outlives(c_ty) {
if let Ok(result) = &tcx.dropck_outlives(c_ty) {
if result.is_proven() {
if let Ok(InferOk { value, obligations }) =
self.infcx.instantiate_query_response_and_region_obligations(

View file

@ -50,7 +50,7 @@ impl<'cx, 'tcx> InferCtxt<'cx, 'tcx> {
// Run canonical query. If overflow occurs, rerun from scratch but this time
// in standard trait query mode so that overflow is handled appropriately
// within `SelectionContext`.
self.tcx.global_tcx().evaluate_obligation(c_pred)
self.tcx.evaluate_obligation(c_pred)
}
// Helper function that canonicalizes and runs the query. If an

View file

@ -141,7 +141,7 @@ impl<'cx, 'tcx> TypeFolder<'tcx> for QueryNormalizer<'cx, 'tcx> {
// binder). It would be better to normalize in a
// binding-aware fashion.
let gcx = self.infcx.tcx.global_tcx();
let tcx = self.infcx.tcx;
let mut orig_values = OriginalQueryValues::default();
// HACK(matthewjasper) `'static` is special-cased in selection,
@ -150,7 +150,7 @@ impl<'cx, 'tcx> TypeFolder<'tcx> for QueryNormalizer<'cx, 'tcx> {
&self.param_env.and(*data), &mut orig_values);
debug!("QueryNormalizer: c_data = {:#?}", c_data);
debug!("QueryNormalizer: orig_values = {:#?}", orig_values);
match gcx.normalize_projection_ty(c_data) {
match tcx.normalize_projection_ty(c_data) {
Ok(result) => {
// We don't expect ambiguity.
if result.is_ambiguous() {

View file

@ -97,7 +97,7 @@ impl<'cx, 'tcx> InferCtxt<'cx, 'tcx> {
let mut orig_values = OriginalQueryValues::default();
let key = self.canonicalize_query(&param_env.and(ty), &mut orig_values);
let result = match self.tcx.global_tcx().implied_outlives_bounds(key) {
let result = match self.tcx.implied_outlives_bounds(key) {
Ok(r) => r,
Err(NoSolution) => {
self.tcx.sess.delay_span_bug(

View file

@ -1,4 +1,4 @@
use crate::infer::canonical::{Canonical, Canonicalized, CanonicalizedQueryResponse, QueryResponse};
use crate::infer::canonical::{Canonicalized, CanonicalizedQueryResponse};
use crate::traits::query::Fallible;
use crate::hir::def_id::DefId;
use crate::ty::{ParamEnvAnd, Ty, TyCtxt};
@ -37,12 +37,6 @@ impl<'tcx> super::QueryTypeOp<'tcx> for AscribeUserType<'tcx> {
) -> Fallible<CanonicalizedQueryResponse<'tcx, ()>> {
tcx.type_op_ascribe_user_type(canonicalized)
}
fn shrink_to_tcx_lifetime(
v: &'a CanonicalizedQueryResponse<'tcx, ()>,
) -> &'a Canonical<'tcx, QueryResponse<'tcx, ()>> {
v
}
}
BraceStructTypeFoldableImpl! {

View file

@ -1,4 +1,4 @@
use crate::infer::canonical::{Canonical, Canonicalized, CanonicalizedQueryResponse, QueryResponse};
use crate::infer::canonical::{Canonicalized, CanonicalizedQueryResponse};
use crate::traits::query::Fallible;
use crate::ty::{ParamEnvAnd, Ty, TyCtxt};
@ -34,12 +34,6 @@ impl<'tcx> super::QueryTypeOp<'tcx> for Eq<'tcx> {
) -> Fallible<CanonicalizedQueryResponse<'tcx, ()>> {
tcx.type_op_eq(canonicalized)
}
fn shrink_to_tcx_lifetime(
v: &'a CanonicalizedQueryResponse<'tcx, ()>,
) -> &'a Canonical<'tcx, QueryResponse<'tcx, ()>> {
v
}
}
BraceStructTypeFoldableImpl! {

View file

@ -1,4 +1,4 @@
use crate::infer::canonical::{Canonical, Canonicalized, CanonicalizedQueryResponse, QueryResponse};
use crate::infer::canonical::{Canonicalized, CanonicalizedQueryResponse};
use crate::traits::query::outlives_bounds::OutlivesBound;
use crate::traits::query::Fallible;
use crate::ty::{ParamEnvAnd, Ty, TyCtxt};
@ -38,12 +38,6 @@ impl<'tcx> super::QueryTypeOp<'tcx> for ImpliedOutlivesBounds<'tcx> {
tcx.implied_outlives_bounds(canonicalized)
}
fn shrink_to_tcx_lifetime(
v: &'a CanonicalizedQueryResponse<'tcx, Self::QueryResponse>,
) -> &'a Canonical<'tcx, QueryResponse<'tcx, Self::QueryResponse>> {
v
}
}
BraceStructTypeFoldableImpl! {

View file

@ -1,6 +1,6 @@
use crate::infer::canonical::{
Canonical, Canonicalized, CanonicalizedQueryResponse, OriginalQueryValues,
QueryRegionConstraints, QueryResponse,
Canonicalized, CanonicalizedQueryResponse, OriginalQueryValues,
QueryRegionConstraints,
};
use crate::infer::{InferCtxt, InferOk};
use std::fmt;
@ -66,22 +66,6 @@ pub trait QueryTypeOp<'tcx>: fmt::Debug + Sized + TypeFoldable<'tcx> + 'tcx {
canonicalized: Canonicalized<'tcx, ParamEnvAnd<'tcx, Self>>,
) -> Fallible<CanonicalizedQueryResponse<'tcx, Self::QueryResponse>>;
/// Casts a lifted query result (which is in the gcx lifetime)
/// into the tcx lifetime. This is always just an identity cast,
/// but the generic code doesn't realize it -- put another way, in
/// the generic code, we have a `Lifted<'tcx, Self::QueryResponse>`
/// and we want to convert that to a `Self::QueryResponse`. This is
/// not a priori valid, so we can't do it -- but in practice, it
/// is always a no-op (e.g., the lifted form of a type,
/// `Ty<'tcx>`, is a subtype of `Ty<'tcx>`). So we have to push
/// the operation into the impls that know more specifically what
/// `QueryResponse` is. This operation would (maybe) be nicer with
/// something like HKTs or GATs, since then we could make
/// `QueryResponse` parametric and `'tcx` and `'tcx` etc.
fn shrink_to_tcx_lifetime(
lifted_query_result: &'a CanonicalizedQueryResponse<'tcx, Self::QueryResponse>,
) -> &'a Canonical<'tcx, QueryResponse<'tcx, Self::QueryResponse>>;
fn fully_perform_into(
query_key: ParamEnvAnd<'tcx, Self>,
infcx: &InferCtxt<'_, 'tcx>,
@ -99,7 +83,6 @@ pub trait QueryTypeOp<'tcx>: fmt::Debug + Sized + TypeFoldable<'tcx> + 'tcx {
let canonical_self =
infcx.canonicalize_hr_query_hack(&query_key, &mut canonical_var_values);
let canonical_result = Self::perform_query(infcx.tcx, canonical_self)?;
let canonical_result = Self::shrink_to_tcx_lifetime(&canonical_result);
let param_env = query_key.param_env;

View file

@ -1,4 +1,4 @@
use crate::infer::canonical::{Canonical, Canonicalized, CanonicalizedQueryResponse, QueryResponse};
use crate::infer::canonical::{Canonicalized, CanonicalizedQueryResponse};
use std::fmt;
use crate::traits::query::Fallible;
use crate::ty::fold::TypeFoldable;
@ -38,12 +38,6 @@ where
) -> Fallible<CanonicalizedQueryResponse<'tcx, Self::QueryResponse>> {
T::type_op_method(tcx, canonicalized)
}
fn shrink_to_tcx_lifetime(
v: &'a CanonicalizedQueryResponse<'tcx, T>,
) -> &'a Canonical<'tcx, QueryResponse<'tcx, T>> {
T::shrink_to_tcx_lifetime(v)
}
}
pub trait Normalizable<'tcx>: fmt::Debug + TypeFoldable<'tcx> + Lift<'tcx> + Copy {
@ -51,12 +45,6 @@ pub trait Normalizable<'tcx>: fmt::Debug + TypeFoldable<'tcx> + Lift<'tcx> + Cop
tcx: TyCtxt<'tcx>,
canonicalized: Canonicalized<'tcx, ParamEnvAnd<'tcx, Normalize<Self>>>,
) -> Fallible<CanonicalizedQueryResponse<'tcx, Self>>;
/// Converts from the `'tcx` (lifted) form of `Self` into the `tcx`
/// form of `Self`.
fn shrink_to_tcx_lifetime(
v: &'a CanonicalizedQueryResponse<'tcx, Self>,
) -> &'a Canonical<'tcx, QueryResponse<'tcx, Self>>;
}
impl Normalizable<'tcx> for Ty<'tcx> {
@ -66,12 +54,6 @@ impl Normalizable<'tcx> for Ty<'tcx> {
) -> Fallible<CanonicalizedQueryResponse<'tcx, Self>> {
tcx.type_op_normalize_ty(canonicalized)
}
fn shrink_to_tcx_lifetime(
v: &'a CanonicalizedQueryResponse<'tcx, Self>,
) -> &'a Canonical<'tcx, QueryResponse<'tcx, Self>> {
v
}
}
impl Normalizable<'tcx> for ty::Predicate<'tcx> {
@ -81,12 +63,6 @@ impl Normalizable<'tcx> for ty::Predicate<'tcx> {
) -> Fallible<CanonicalizedQueryResponse<'tcx, Self>> {
tcx.type_op_normalize_predicate(canonicalized)
}
fn shrink_to_tcx_lifetime(
v: &'a CanonicalizedQueryResponse<'tcx, Self>,
) -> &'a Canonical<'tcx, QueryResponse<'tcx, Self>> {
v
}
}
impl Normalizable<'tcx> for ty::PolyFnSig<'tcx> {
@ -96,12 +72,6 @@ impl Normalizable<'tcx> for ty::PolyFnSig<'tcx> {
) -> Fallible<CanonicalizedQueryResponse<'tcx, Self>> {
tcx.type_op_normalize_poly_fn_sig(canonicalized)
}
fn shrink_to_tcx_lifetime(
v: &'a CanonicalizedQueryResponse<'tcx, Self>,
) -> &'a Canonical<'tcx, QueryResponse<'tcx, Self>> {
v
}
}
impl Normalizable<'tcx> for ty::FnSig<'tcx> {
@ -111,12 +81,6 @@ impl Normalizable<'tcx> for ty::FnSig<'tcx> {
) -> Fallible<CanonicalizedQueryResponse<'tcx, Self>> {
tcx.type_op_normalize_fn_sig(canonicalized)
}
fn shrink_to_tcx_lifetime(
v: &'a CanonicalizedQueryResponse<'tcx, Self>,
) -> &'a Canonical<'tcx, QueryResponse<'tcx, Self>> {
v
}
}
BraceStructTypeFoldableImpl! {

View file

@ -1,4 +1,4 @@
use crate::infer::canonical::{Canonical, Canonicalized, CanonicalizedQueryResponse, QueryResponse};
use crate::infer::canonical::{Canonicalized, CanonicalizedQueryResponse};
use crate::traits::query::dropck_outlives::trivial_dropck_outlives;
use crate::traits::query::dropck_outlives::DropckOutlivesResult;
use crate::traits::query::Fallible;
@ -53,12 +53,6 @@ impl super::QueryTypeOp<'tcx> for DropckOutlives<'tcx> {
tcx.dropck_outlives(canonicalized)
}
fn shrink_to_tcx_lifetime(
lifted_query_result: &'a CanonicalizedQueryResponse<'tcx, Self::QueryResponse>,
) -> &'a Canonical<'tcx, QueryResponse<'tcx, Self::QueryResponse>> {
lifted_query_result
}
}
BraceStructTypeFoldableImpl! {

View file

@ -1,4 +1,4 @@
use crate::infer::canonical::{Canonical, Canonicalized, CanonicalizedQueryResponse, QueryResponse};
use crate::infer::canonical::{Canonicalized, CanonicalizedQueryResponse};
use crate::traits::query::Fallible;
use crate::ty::{ParamEnvAnd, Predicate, TyCtxt};
@ -43,12 +43,6 @@ impl<'tcx> super::QueryTypeOp<'tcx> for ProvePredicate<'tcx> {
) -> Fallible<CanonicalizedQueryResponse<'tcx, ()>> {
tcx.type_op_prove_predicate(canonicalized)
}
fn shrink_to_tcx_lifetime(
v: &'a CanonicalizedQueryResponse<'tcx, ()>,
) -> &'a Canonical<'tcx, QueryResponse<'tcx, ()>> {
v
}
}
BraceStructTypeFoldableImpl! {

View file

@ -1,4 +1,4 @@
use crate::infer::canonical::{Canonical, Canonicalized, CanonicalizedQueryResponse, QueryResponse};
use crate::infer::canonical::{Canonicalized, CanonicalizedQueryResponse};
use crate::traits::query::Fallible;
use crate::ty::{ParamEnvAnd, Ty, TyCtxt};
@ -34,12 +34,6 @@ impl<'tcx> super::QueryTypeOp<'tcx> for Subtype<'tcx> {
) -> Fallible<CanonicalizedQueryResponse<'tcx, ()>> {
tcx.type_op_subtype(canonicalized)
}
fn shrink_to_tcx_lifetime(
v: &'a CanonicalizedQueryResponse<'tcx, ()>,
) -> &'a Canonical<'tcx, QueryResponse<'tcx, ()>> {
v
}
}
BraceStructTypeFoldableImpl! {

View file

@ -2491,7 +2491,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
if other.evaluation.must_apply_modulo_regions() {
match victim.candidate {
ImplCandidate(victim_def) => {
let tcx = self.tcx().global_tcx();
let tcx = self.tcx();
return tcx.specializes((other_def, victim_def))
|| tcx.impls_are_allowed_to_overlap(
other_def, victim_def).is_some();

View file

@ -162,7 +162,6 @@ impl<'tcx> Children {
}
};
let tcx = tcx.global_tcx();
let (le, ge) = traits::overlapping_impls(
tcx,
possible_sibling,

View file

@ -661,8 +661,7 @@ impl<'tcx> TyCtxt<'tcx> {
}
}
None => {
self.global_tcx()
.impl_defaultness(node_item_def_id)
self.impl_defaultness(node_item_def_id)
.is_default()
}
}

View file

@ -1067,14 +1067,6 @@ pub struct GlobalCtxt<'tcx> {
}
impl<'tcx> TyCtxt<'tcx> {
/// Gets the global `TyCtxt`.
#[inline]
pub fn global_tcx(self) -> TyCtxt<'tcx> {
TyCtxt {
gcx: self.gcx,
}
}
#[inline(always)]
pub fn hir(self) -> &'tcx hir_map::Map<'tcx> {
&self.hir_map
@ -1156,11 +1148,6 @@ impl<'tcx> TyCtxt<'tcx> {
value.lift_to_tcx(self)
}
/// Like lift, but only tries in the global tcx.
pub fn lift_to_global<T: ?Sized + Lift<'tcx>>(self, value: &T) -> Option<T::Lifted> {
value.lift_to_tcx(self.global_tcx())
}
/// Creates a type context and call the closure with a `TyCtxt` reference
/// to the context. The closure enforces that the type context and any interned
/// value (types, substs, etc.) can only be used while `ty::tls` has a valid
@ -1432,13 +1419,7 @@ impl<'tcx> TyCtxt<'tcx> {
-> Result<(), E::Error>
where E: ty::codec::TyEncoder
{
self.queries.on_disk_cache.serialize(self.global_tcx(), encoder)
}
/// If `true`, we should use the AST-based borrowck (we may *also* use
/// the MIR-based borrowck).
pub fn use_ast_borrowck(self) -> bool {
self.borrowck_mode().use_ast()
self.queries.on_disk_cache.serialize(self, encoder)
}
/// If `true`, we should use the MIR-based borrowck, but also
@ -1606,7 +1587,7 @@ impl<'tcx> GlobalCtxt<'tcx> {
let tcx = TyCtxt {
gcx: self,
};
ty::tls::with_related_context(tcx.global_tcx(), |icx| {
ty::tls::with_related_context(tcx, |icx| {
let new_icx = ty::tls::ImplicitCtxt {
tcx,
query: icx.query.clone(),
@ -2431,7 +2412,7 @@ impl<'tcx> TyCtxt<'tcx> {
#[inline]
pub fn mk_array(self, ty: Ty<'tcx>, n: u64) -> Ty<'tcx> {
self.mk_ty(Array(ty, ty::Const::from_usize(self.global_tcx(), n)))
self.mk_ty(Array(ty, ty::Const::from_usize(self, n)))
}
#[inline]
@ -2646,7 +2627,7 @@ impl<'tcx> TyCtxt<'tcx> {
if ts.len() == 0 {
List::empty()
} else {
self.global_tcx()._intern_canonical_var_infos(ts)
self._intern_canonical_var_infos(ts)
}
}

View file

@ -193,7 +193,7 @@ impl<'tcx> ty::TyS<'tcx> {
ty::Adt(def, _) => format!("{} `{}`", def.descr(), tcx.def_path_str(def.did)).into(),
ty::Foreign(def_id) => format!("extern type `{}`", tcx.def_path_str(def_id)).into(),
ty::Array(_, n) => {
let n = tcx.lift_to_global(&n).unwrap();
let n = tcx.lift(&n).unwrap();
match n.try_eval_usize(tcx, ty::ParamEnv::empty()) {
Some(n) => {
format!("array of {} element{}", n, pluralise!(n)).into()

View file

@ -210,7 +210,7 @@ impl<'tcx> Instance<'tcx> {
}
pub fn mono(tcx: TyCtxt<'tcx>, def_id: DefId) -> Instance<'tcx> {
Instance::new(def_id, tcx.global_tcx().empty_substs_for_def_id(def_id))
Instance::new(def_id, tcx.empty_substs_for_def_id(def_id))
}
#[inline]

View file

@ -1883,7 +1883,7 @@ impl<'tcx> HasDataLayout for TyCtxt<'tcx> {
impl<'tcx> HasTyCtxt<'tcx> for TyCtxt<'tcx> {
fn tcx(&self) -> TyCtxt<'tcx> {
self.global_tcx()
*self
}
}
@ -2003,7 +2003,7 @@ impl TyCtxt<'tcx> {
pub fn layout_of(self, param_env_and_ty: ty::ParamEnvAnd<'tcx, Ty<'tcx>>)
-> Result<TyLayout<'tcx>, LayoutError<'tcx>> {
let cx = LayoutCx {
tcx: self.global_tcx(),
tcx: self,
param_env: param_env_and_ty.param_env
};
cx.layout_of(param_env_and_ty.value)
@ -2017,7 +2017,7 @@ impl ty::query::TyCtxtAt<'tcx> {
pub fn layout_of(self, param_env_and_ty: ty::ParamEnvAnd<'tcx, Ty<'tcx>>)
-> Result<TyLayout<'tcx>, LayoutError<'tcx>> {
let cx = LayoutCx {
tcx: self.global_tcx().at(self.span),
tcx: self.at(self.span),
param_env: param_env_and_ty.param_env
};
cx.layout_of(param_env_and_ty.value)

View file

@ -2378,7 +2378,7 @@ impl<'tcx> AdtDef {
pub fn eval_explicit_discr(&self, tcx: TyCtxt<'tcx>, expr_did: DefId) -> Option<Discr<'tcx>> {
let param_env = tcx.param_env(expr_did);
let repr_type = self.repr.discr_type();
let substs = InternalSubsts::identity_for_item(tcx.global_tcx(), expr_did);
let substs = InternalSubsts::identity_for_item(tcx, expr_did);
let instance = ty::Instance::new(expr_did, substs);
let cid = GlobalId {
instance,
@ -2387,7 +2387,7 @@ impl<'tcx> AdtDef {
match tcx.const_eval(param_env.and(cid)) {
Ok(val) => {
// FIXME: Find the right type and use it instead of `val.ty` here
if let Some(b) = val.try_eval_bits(tcx.global_tcx(), param_env, val.ty) {
if let Some(b) = val.try_eval_bits(tcx, param_env, val.ty) {
trace!("discriminants: {} ({:?})", b, repr_type);
Some(Discr {
val: b,
@ -2423,7 +2423,7 @@ impl<'tcx> AdtDef {
tcx: TyCtxt<'tcx>,
) -> impl Iterator<Item = (VariantIdx, Discr<'tcx>)> + Captures<'tcx> {
let repr_type = self.repr.discr_type();
let initial = repr_type.initial_discriminant(tcx.global_tcx());
let initial = repr_type.initial_discriminant(tcx);
let mut prev_discr = None::<Discr<'tcx>>;
self.variants.iter_enumerated().map(move |(i, v)| {
let mut discr = prev_discr.map_or(initial, |d| d.wrap_incr(tcx));
@ -2457,7 +2457,7 @@ impl<'tcx> AdtDef {
let (val, offset) = self.discriminant_def_for_variant(variant_index);
let explicit_value = val
.and_then(|expr_did| self.eval_explicit_discr(tcx, expr_did))
.unwrap_or_else(|| self.repr.discr_type().initial_discriminant(tcx.global_tcx()));
.unwrap_or_else(|| self.repr.discr_type().initial_discriminant(tcx));
explicit_value.checked_add(tcx, offset as u128).0
}

View file

@ -917,7 +917,7 @@ pub trait PrettyPrinter<'tcx>:
let min = 1u128 << (bit_size - 1);
let max = min - 1;
let ty = self.tcx().lift_to_global(&ct.ty).unwrap();
let ty = self.tcx().lift(&ct.ty).unwrap();
let size = self.tcx().layout_of(ty::ParamEnv::empty().and(ty))
.unwrap()
.size;

View file

@ -4,7 +4,6 @@ use crate::hir::def::{DefKind, Export};
use crate::hir::{self, TraitCandidate, ItemLocalId, CodegenFnAttrs};
use crate::infer::canonical::{self, Canonical};
use crate::lint;
use crate::middle::borrowck::{BorrowCheckResult, SignalledError};
use crate::middle::cstore::{ExternCrate, LinkagePreference, NativeLibrary, ForeignModule};
use crate::middle::cstore::{NativeLibraryKind, DepKind, CrateSource};
use crate::middle::privacy::AccessLevels;

View file

@ -265,7 +265,7 @@ impl<'tcx> TyCtxt<'tcx> {
tls::with_related_context(self, move |current_icx| {
// Update the `ImplicitCtxt` to point to our new query job.
let new_icx = tls::ImplicitCtxt {
tcx: self.global_tcx(),
tcx: self,
query: Some(job),
diagnostics,
layout_depth: current_icx.layout_depth,
@ -274,7 +274,7 @@ impl<'tcx> TyCtxt<'tcx> {
// Use the `ImplicitCtxt` while we execute the query.
tls::enter_context(&new_icx, |_| {
compute(self.global_tcx())
compute(self)
})
})
}
@ -384,7 +384,7 @@ impl<'tcx> TyCtxt<'tcx> {
let ((result, dep_node_index), diagnostics) = with_diagnostics(|diagnostics| {
self.start_query(job.job.clone(), diagnostics, |tcx| {
tcx.dep_graph.with_anon_task(Q::dep_kind(), || {
Q::compute(tcx.global_tcx(), key)
Q::compute(tcx, key)
})
})
});
@ -445,10 +445,10 @@ impl<'tcx> TyCtxt<'tcx> {
debug_assert!(self.dep_graph.is_green(dep_node));
// First we try to load the result from the on-disk cache.
let result = if Q::cache_on_disk(self.global_tcx(), key.clone(), None) &&
let result = if Q::cache_on_disk(self, key.clone(), None) &&
self.sess.opts.debugging_opts.incremental_queries {
self.sess.profiler(|p| p.incremental_load_result_start(Q::NAME));
let result = Q::try_load_from_disk(self.global_tcx(), prev_dep_node_index);
let result = Q::try_load_from_disk(self, prev_dep_node_index);
self.sess.profiler(|p| p.incremental_load_result_end(Q::NAME));
// We always expect to find a cached result for things that
@ -643,7 +643,7 @@ impl<'tcx> TyCtxt<'tcx> {
macro_rules! handle_cycle_error {
([][$tcx: expr, $error:expr]) => {{
$tcx.report_cycle($error).emit();
Value::from_cycle_error($tcx.global_tcx())
Value::from_cycle_error($tcx)
}};
([fatal_cycle$(, $modifiers:ident)*][$tcx:expr, $error:expr]) => {{
$tcx.report_cycle($error).emit();
@ -652,7 +652,7 @@ macro_rules! handle_cycle_error {
}};
([cycle_delay_bug$(, $modifiers:ident)*][$tcx:expr, $error:expr]) => {{
$tcx.report_cycle($error).delay_as_bug();
Value::from_cycle_error($tcx.global_tcx())
Value::from_cycle_error($tcx)
}};
([$other:ident$(, $modifiers:ident)*][$($args:tt)*]) => {
handle_cycle_error!([$($modifiers),*][$($args)*])
@ -999,7 +999,7 @@ macro_rules! define_queries_inner {
// would be missing appropriate entries in `providers`.
.unwrap_or(&tcx.queries.fallback_extern_providers)
.$name;
provider(tcx.global_tcx(), key)
provider(tcx, key)
})
}

View file

@ -1,10 +1,9 @@
#![allow(non_camel_case_types)]
use rustc_data_structures::{fx::FxHashMap, sync::Lock};
use rustc_data_structures::sync::Lock;
use std::cell::{RefCell, Cell};
use std::cell::Cell;
use std::fmt::Debug;
use std::hash::Hash;
use std::time::{Duration, Instant};
use std::sync::mpsc::{Sender};
@ -279,39 +278,3 @@ pub fn indenter() -> Indenter {
debug!(">>");
Indenter { _cannot_construct_outside_of_this_module: () }
}
pub trait MemoizationMap {
type Key: Clone;
type Value: Clone;
/// If `key` is present in the map, return the value,
/// otherwise invoke `op` and store the value in the map.
///
/// N.B., if the receiver is a `DepTrackingMap`, special care is
/// needed in the `op` to ensure that the correct edges are
/// added into the dep graph. See the `DepTrackingMap` impl for
/// more details!
fn memoize<OP>(&self, key: Self::Key, op: OP) -> Self::Value
where OP: FnOnce() -> Self::Value;
}
impl<K, V> MemoizationMap for RefCell<FxHashMap<K,V>>
where K: Hash+Eq+Clone, V: Clone
{
type Key = K;
type Value = V;
fn memoize<OP>(&self, key: K, op: OP) -> V
where OP: FnOnce() -> V
{
let result = self.borrow().get(&key).cloned();
match result {
Some(result) => result,
None => {
let result = op();
self.borrow_mut().insert(key, result.clone());
result
}
}
}
}

View file

@ -1,20 +0,0 @@
[package]
authors = ["The Rust Project Developers"]
name = "rustc_ast_borrowck"
version = "0.0.0"
edition = "2018"
[lib]
name = "rustc_ast_borrowck"
path = "lib.rs"
test = false
doctest = false
[dependencies]
log = "0.4"
syntax_pos = { path = "../libsyntax_pos" }
# for "clarity", rename the graphviz crate to dot; graphviz within `borrowck`
# refers to the borrowck-specific graphviz adapter traits.
dot = { path = "../libgraphviz", package = "graphviz" }
rustc = { path = "../librustc" }
rustc_data_structures = { path = "../librustc_data_structures" }

File diff suppressed because it is too large Load diff

View file

@ -1,680 +0,0 @@
// ----------------------------------------------------------------------
// Checking loans
//
// Phase 2 of check: we walk down the tree and check that:
// 1. assignments are always made to mutable locations;
// 2. loans made in overlapping scopes do not conflict
// 3. assignments do not affect things loaned out as immutable
// 4. moves do not affect things loaned out in any way
use crate::borrowck::*;
use crate::borrowck::InteriorKind::{InteriorElement, InteriorField};
use rustc::middle::expr_use_visitor as euv;
use rustc::middle::expr_use_visitor::MutateMode;
use rustc::middle::mem_categorization as mc;
use rustc::middle::mem_categorization::Categorization;
use rustc::middle::region;
use rustc::ty::{self, TyCtxt, RegionKind};
use syntax_pos::Span;
use rustc::hir;
use rustc::hir::Node;
use log::debug;
use std::rc::Rc;
// FIXME (#16118): These functions are intended to allow the borrow checker to
// be less precise in its handling of Box while still allowing moves out of a
// Box. They should be removed when Unique is removed from LoanPath.
fn owned_ptr_base_path<'a, 'tcx>(loan_path: &'a LoanPath<'tcx>) -> &'a LoanPath<'tcx> {
//! Returns the base of the leftmost dereference of an Unique in
//! `loan_path`. If there is no dereference of an Unique in `loan_path`,
//! then it just returns `loan_path` itself.
return match helper(loan_path) {
Some(new_loan_path) => new_loan_path,
None => loan_path,
};
fn helper<'a, 'tcx>(loan_path: &'a LoanPath<'tcx>) -> Option<&'a LoanPath<'tcx>> {
match loan_path.kind {
LpVar(_) | LpUpvar(_) => None,
LpExtend(ref lp_base, _, LpDeref(mc::Unique)) => {
match helper(&lp_base) {
v @ Some(_) => v,
None => Some(&lp_base)
}
}
LpDowncast(ref lp_base, _) |
LpExtend(ref lp_base, ..) => helper(&lp_base)
}
}
}
fn owned_ptr_base_path_rc<'tcx>(loan_path: &Rc<LoanPath<'tcx>>) -> Rc<LoanPath<'tcx>> {
//! The equivalent of `owned_ptr_base_path` for an &Rc<LoanPath> rather than
//! a &LoanPath.
return match helper(loan_path) {
Some(new_loan_path) => new_loan_path,
None => loan_path.clone()
};
fn helper<'tcx>(loan_path: &Rc<LoanPath<'tcx>>) -> Option<Rc<LoanPath<'tcx>>> {
match loan_path.kind {
LpVar(_) | LpUpvar(_) => None,
LpExtend(ref lp_base, _, LpDeref(mc::Unique)) => {
match helper(lp_base) {
v @ Some(_) => v,
None => Some(lp_base.clone())
}
}
LpDowncast(ref lp_base, _) |
LpExtend(ref lp_base, ..) => helper(lp_base)
}
}
}
struct CheckLoanCtxt<'a, 'tcx> {
bccx: &'a BorrowckCtxt<'a, 'tcx>,
dfcx_loans: &'a LoanDataFlow<'tcx>,
move_data: &'a move_data::FlowedMoveData<'tcx>,
all_loans: &'a [Loan<'tcx>],
movable_generator: bool,
}
impl<'a, 'tcx> euv::Delegate<'tcx> for CheckLoanCtxt<'a, 'tcx> {
fn consume(&mut self,
consume_id: hir::HirId,
_: Span,
cmt: &mc::cmt_<'tcx>,
mode: euv::ConsumeMode) {
debug!("consume(consume_id={}, cmt={:?})", consume_id, cmt);
self.consume_common(consume_id.local_id, cmt, mode);
}
fn matched_pat(&mut self,
_matched_pat: &hir::Pat,
_cmt: &mc::cmt_<'_>,
_mode: euv::MatchMode) { }
fn consume_pat(&mut self,
consume_pat: &hir::Pat,
cmt: &mc::cmt_<'tcx>,
mode: euv::ConsumeMode) {
debug!("consume_pat(consume_pat={:?}, cmt={:?})", consume_pat, cmt);
self.consume_common(consume_pat.hir_id.local_id, cmt, mode);
}
fn borrow(&mut self,
borrow_id: hir::HirId,
borrow_span: Span,
cmt: &mc::cmt_<'tcx>,
loan_region: ty::Region<'tcx>,
bk: ty::BorrowKind,
loan_cause: euv::LoanCause)
{
debug!("borrow(borrow_id={}, cmt={:?}, loan_region={:?}, \
bk={:?}, loan_cause={:?})",
borrow_id, cmt, loan_region,
bk, loan_cause);
if let Some(lp) = opt_loan_path(cmt) {
self.check_if_path_is_moved(borrow_id.local_id, &lp);
}
self.check_for_conflicting_loans(borrow_id.local_id);
self.check_for_loans_across_yields(cmt, loan_region, borrow_span);
}
fn mutate(&mut self,
assignment_id: hir::HirId,
_: Span,
assignee_cmt: &mc::cmt_<'tcx>,
mode: euv::MutateMode)
{
debug!("mutate(assignment_id={}, assignee_cmt={:?})",
assignment_id, assignee_cmt);
if let Some(lp) = opt_loan_path(assignee_cmt) {
match mode {
MutateMode::Init | MutateMode::JustWrite => {
// In a case like `path = 1`, then path does not
// have to be *FULLY* initialized, but we still
// must be careful lest it contains derefs of
// pointers.
self.check_if_assigned_path_is_moved(assignee_cmt.hir_id.local_id, &lp);
}
MutateMode::WriteAndRead => {
// In a case like `path += 1`, then path must be
// fully initialized, since we will read it before
// we write it.
self.check_if_path_is_moved(assignee_cmt.hir_id.local_id,
&lp);
}
}
}
self.check_assignment(assignment_id.local_id, assignee_cmt);
}
fn decl_without_init(&mut self, _id: hir::HirId, _span: Span) { }
}
pub fn check_loans<'a, 'tcx>(
bccx: &BorrowckCtxt<'a, 'tcx>,
dfcx_loans: &LoanDataFlow<'tcx>,
move_data: &move_data::FlowedMoveData<'tcx>,
all_loans: &[Loan<'tcx>],
body: &hir::Body,
) {
debug!("check_loans(body id={})", body.value.hir_id);
let def_id = bccx.tcx.hir().body_owner_def_id(body.id());
let hir_id = bccx.tcx.hir().as_local_hir_id(def_id).unwrap();
let movable_generator = !match bccx.tcx.hir().get(hir_id) {
Node::Expr(&hir::Expr {
kind: hir::ExprKind::Closure(.., Some(hir::GeneratorMovability::Static)),
..
}) => true,
_ => false,
};
let param_env = bccx.tcx.param_env(def_id);
let mut clcx = CheckLoanCtxt {
bccx,
dfcx_loans,
move_data,
all_loans,
movable_generator,
};
let rvalue_promotable_map = bccx.tcx.rvalue_promotable_map(def_id);
euv::ExprUseVisitor::new(&mut clcx,
bccx.tcx,
def_id,
param_env,
&bccx.region_scope_tree,
bccx.tables,
Some(rvalue_promotable_map))
.consume_body(body);
}
fn compatible_borrow_kinds(borrow_kind1: ty::BorrowKind,
borrow_kind2: ty::BorrowKind)
-> bool {
borrow_kind1 == ty::ImmBorrow && borrow_kind2 == ty::ImmBorrow
}
impl<'a, 'tcx> CheckLoanCtxt<'a, 'tcx> {
pub fn tcx(&self) -> TyCtxt<'tcx> { self.bccx.tcx }
pub fn each_issued_loan<F>(&self, node: hir::ItemLocalId, mut op: F) -> bool where
F: FnMut(&Loan<'tcx>) -> bool,
{
//! Iterates over each loan that has been issued
//! on entrance to `node`, regardless of whether it is
//! actually *in scope* at that point. Sometimes loans
//! are issued for future scopes and thus they may have been
//! *issued* but not yet be in effect.
self.dfcx_loans.each_bit_on_entry(node, |loan_index| {
let loan = &self.all_loans[loan_index];
op(loan)
})
}
pub fn each_in_scope_loan<F>(&self, scope: region::Scope, mut op: F) -> bool where
F: FnMut(&Loan<'tcx>) -> bool,
{
//! Like `each_issued_loan()`, but only considers loans that are
//! currently in scope.
self.each_issued_loan(scope.item_local_id(), |loan| {
if self.bccx.region_scope_tree.is_subscope_of(scope, loan.kill_scope) {
op(loan)
} else {
true
}
})
}
fn each_in_scope_loan_affecting_path<F>(&self,
scope: region::Scope,
loan_path: &LoanPath<'tcx>,
mut op: F)
-> bool where
F: FnMut(&Loan<'tcx>) -> bool,
{
//! Iterates through all of the in-scope loans affecting `loan_path`,
//! calling `op`, and ceasing iteration if `false` is returned.
// First, we check for a loan restricting the path P being used. This
// accounts for borrows of P but also borrows of subpaths, like P.a.b.
// Consider the following example:
//
// let x = &mut a.b.c; // Restricts a, a.b, and a.b.c
// let y = a; // Conflicts with restriction
let loan_path = owned_ptr_base_path(loan_path);
let cont = self.each_in_scope_loan(scope, |loan| {
let mut ret = true;
for restr_path in &loan.restricted_paths {
if **restr_path == *loan_path {
if !op(loan) {
ret = false;
break;
}
}
}
ret
});
if !cont {
return false;
}
// Next, we must check for *loans* (not restrictions) on the path P or
// any base path. This rejects examples like the following:
//
// let x = &mut a.b;
// let y = a.b.c;
//
// Limiting this search to *loans* and not *restrictions* means that
// examples like the following continue to work:
//
// let x = &mut a.b;
// let y = a.c;
let mut loan_path = loan_path;
loop {
match loan_path.kind {
LpVar(_) | LpUpvar(_) => {
break;
}
LpDowncast(ref lp_base, _) |
LpExtend(ref lp_base, ..) => {
loan_path = &lp_base;
}
}
let cont = self.each_in_scope_loan(scope, |loan| {
if *loan.loan_path == *loan_path {
op(loan)
} else {
true
}
});
if !cont {
return false;
}
}
return true;
}
pub fn loans_generated_by(&self, node: hir::ItemLocalId) -> Vec<usize> {
//! Returns a vector of the loans that are generated as
//! we enter `node`.
let mut result = Vec::new();
self.dfcx_loans.each_gen_bit(node, |loan_index| {
result.push(loan_index);
true
});
return result;
}
pub fn check_for_loans_across_yields(&self,
cmt: &mc::cmt_<'tcx>,
loan_region: ty::Region<'tcx>,
borrow_span: Span) {
pub fn borrow_of_local_data(cmt: &mc::cmt_<'_>) -> bool {
match cmt.cat {
// Borrows of static items is allowed
Categorization::StaticItem => false,
// Reborrow of already borrowed data is ignored
// Any errors will be caught on the initial borrow
Categorization::Deref(..) => false,
// By-ref upvars has Derefs so they will get ignored.
// Generators counts as FnOnce so this leaves only
// by-move upvars, which is local data for generators
Categorization::Upvar(..) => true,
Categorization::ThreadLocal(region) |
Categorization::Rvalue(region) => {
// Rvalues promoted to 'static are no longer local
if let RegionKind::ReStatic = *region {
false
} else {
true
}
}
// Borrow of local data must be checked
Categorization::Local(..) => true,
// For interior references and downcasts, find out if the base is local
Categorization::Downcast(ref cmt_base, _) |
Categorization::Interior(ref cmt_base, _) => borrow_of_local_data(&cmt_base),
}
}
if !self.movable_generator {
return;
}
if !borrow_of_local_data(cmt) {
return;
}
let scope = match *loan_region {
// A concrete region in which we will look for a yield expression
RegionKind::ReScope(scope) => scope,
// There cannot be yields inside an empty region
RegionKind::ReEmpty => return,
// Local data cannot have these lifetimes
RegionKind::ReEarlyBound(..) |
RegionKind::ReLateBound(..) |
RegionKind::ReFree(..) |
RegionKind::ReStatic => {
self.bccx
.tcx
.sess.delay_span_bug(borrow_span,
&format!("unexpected region for local data {:?}",
loan_region));
return
}
// These cannot exist in borrowck
RegionKind::ReVar(..) |
RegionKind::RePlaceholder(..) |
RegionKind::ReClosureBound(..) |
RegionKind::ReErased => span_bug!(borrow_span,
"unexpected region in borrowck {:?}",
loan_region),
};
let body_id = self.bccx.body.value.hir_id.local_id;
if self.bccx.region_scope_tree.containing_body(scope) != Some(body_id) {
// We are borrowing local data longer than its storage.
// This should result in other borrowck errors.
self.bccx.tcx.sess.delay_span_bug(borrow_span,
"borrowing local data longer than its storage");
return;
}
if let Some(_) = self.bccx.region_scope_tree
.yield_in_scope_for_expr(scope, cmt.hir_id, self.bccx.body)
{
self.bccx.signal_error();
}
}
pub fn check_for_conflicting_loans(&self, node: hir::ItemLocalId) {
//! Checks to see whether any of the loans that are issued
//! on entrance to `node` conflict with loans that have already been
//! issued when we enter `node` (for example, we do not
//! permit two `&mut` borrows of the same variable).
//!
//! (Note that some loans can be *issued* without necessarily
//! taking effect yet.)
debug!("check_for_conflicting_loans(node={:?})", node);
let new_loan_indices = self.loans_generated_by(node);
debug!("new_loan_indices = {:?}", new_loan_indices);
for &new_loan_index in &new_loan_indices {
self.each_issued_loan(node, |issued_loan| {
let new_loan = &self.all_loans[new_loan_index];
// Only report an error for the first issued loan that conflicts
// to avoid O(n^2) errors.
self.report_error_if_loans_conflict(issued_loan, new_loan)
});
}
for (i, &x) in new_loan_indices.iter().enumerate() {
let old_loan = &self.all_loans[x];
for &y in &new_loan_indices[(i+1) ..] {
let new_loan = &self.all_loans[y];
self.report_error_if_loans_conflict(old_loan, new_loan);
}
}
}
pub fn report_error_if_loans_conflict(
&self,
old_loan: &Loan<'tcx>,
new_loan: &Loan<'tcx>,
) -> bool {
//! Checks whether `old_loan` and `new_loan` can safely be issued
//! simultaneously.
debug!("report_error_if_loans_conflict(old_loan={:?}, new_loan={:?})",
old_loan,
new_loan);
// Should only be called for loans that are in scope at the same time.
assert!(self.bccx.region_scope_tree.scopes_intersect(old_loan.kill_scope,
new_loan.kill_scope));
self.report_error_if_loan_conflicts_with_restriction(
old_loan, new_loan)
&& self.report_error_if_loan_conflicts_with_restriction(
new_loan, old_loan)
}
pub fn report_error_if_loan_conflicts_with_restriction(
&self,
loan1: &Loan<'tcx>,
loan2: &Loan<'tcx>,
) -> bool {
//! Checks whether the restrictions introduced by `loan1` would
//! prohibit `loan2`.
debug!("report_error_if_loan_conflicts_with_restriction(\
loan1={:?}, loan2={:?})",
loan1,
loan2);
if compatible_borrow_kinds(loan1.kind, loan2.kind) {
return true;
}
let loan2_base_path = owned_ptr_base_path_rc(&loan2.loan_path);
for restr_path in &loan1.restricted_paths {
if *restr_path != loan2_base_path { continue; }
self.bccx.signal_error();
return false;
}
true
}
fn consume_common(
&self,
id: hir::ItemLocalId,
cmt: &mc::cmt_<'tcx>,
mode: euv::ConsumeMode,
) {
if let Some(lp) = opt_loan_path(cmt) {
match mode {
euv::Copy => {
self.check_for_copy_of_frozen_path(id, &lp);
}
euv::Move(_) => {
// Sometimes moves aren't from a move path;
// this either means that the original move
// was from something illegal to move,
// or was moved from referent of an unsafe
// pointer or something like that.
if self.move_data.is_move_path(id, &lp) {
self.check_for_move_of_borrowed_path(id, &lp);
}
}
}
self.check_if_path_is_moved(id, &lp);
}
}
fn check_for_copy_of_frozen_path(&self,
id: hir::ItemLocalId,
copy_path: &LoanPath<'tcx>) {
self.analyze_restrictions_on_use(id, copy_path, ty::ImmBorrow);
}
fn check_for_move_of_borrowed_path(&self,
id: hir::ItemLocalId,
move_path: &LoanPath<'tcx>) {
// We want to detect if there are any loans at all, so we search for
// any loans incompatible with MutBorrrow, since all other kinds of
// loans are incompatible with that.
self.analyze_restrictions_on_use(id, move_path, ty::MutBorrow);
}
fn analyze_restrictions_on_use(&self,
expr_id: hir::ItemLocalId,
use_path: &LoanPath<'tcx>,
borrow_kind: ty::BorrowKind) {
debug!("analyze_restrictions_on_use(expr_id={:?}, use_path={:?})",
expr_id, use_path);
let scope = region::Scope {
id: expr_id,
data: region::ScopeData::Node
};
self.each_in_scope_loan_affecting_path(
scope, use_path, |loan| {
if !compatible_borrow_kinds(loan.kind, borrow_kind) {
self.bccx.signal_error();
false
} else {
true
}
});
}
/// Reports an error if `expr` (which should be a path)
/// is using a moved/uninitialized value
fn check_if_path_is_moved(&self,
id: hir::ItemLocalId,
lp: &Rc<LoanPath<'tcx>>) {
debug!("check_if_path_is_moved(id={:?}, lp={:?})", id, lp);
// FIXME: if you find yourself tempted to cut and paste
// the body below and then specializing the error reporting,
// consider refactoring this instead!
let base_lp = owned_ptr_base_path_rc(lp);
self.move_data.each_move_of(id, &base_lp, |_, _| {
self.bccx.signal_error();
false
});
}
/// Reports an error if assigning to `lp` will use a
/// moved/uninitialized value. Mainly this is concerned with
/// detecting derefs of uninitialized pointers.
///
/// For example:
///
/// ```
/// let a: i32;
/// a = 10; // ok, even though a is uninitialized
/// ```
///
/// ```
/// struct Point { x: u32, y: u32 }
/// let mut p: Point;
/// p.x = 22; // ok, even though `p` is uninitialized
/// ```
///
/// ```compile_fail,E0381
/// # struct Point { x: u32, y: u32 }
/// let mut p: Box<Point>;
/// (*p).x = 22; // not ok, p is uninitialized, can't deref
/// ```
fn check_if_assigned_path_is_moved(&self,
id: hir::ItemLocalId,
lp: &Rc<LoanPath<'tcx>>)
{
match lp.kind {
LpVar(_) | LpUpvar(_) => {
// assigning to `x` does not require that `x` is initialized
}
LpDowncast(ref lp_base, _) => {
// assigning to `(P->Variant).f` is ok if assigning to `P` is ok
self.check_if_assigned_path_is_moved(id, lp_base);
}
LpExtend(ref lp_base, _, LpInterior(_, InteriorField(_))) => {
match lp_base.to_type().kind {
ty::Adt(def, _) if def.has_dtor(self.tcx()) => {
// In the case where the owner implements drop, then
// the path must be initialized to prevent a case of
// partial reinitialization
//
// FIXME: could refactor via hypothetical
// generalized check_if_path_is_moved
let loan_path = owned_ptr_base_path_rc(lp_base);
self.move_data.each_move_of(id, &loan_path, |_, _| {
self.bccx
.signal_error();
false
});
return;
},
_ => {},
}
// assigning to `P.f` is ok if assigning to `P` is ok
self.check_if_assigned_path_is_moved(id, lp_base);
}
LpExtend(ref lp_base, _, LpInterior(_, InteriorElement)) |
LpExtend(ref lp_base, _, LpDeref(_)) => {
// assigning to `P[i]` requires `P` is initialized
// assigning to `(*P)` requires `P` is initialized
self.check_if_path_is_moved(id, lp_base);
}
}
}
fn check_assignment(&self,
assignment_id: hir::ItemLocalId,
assignee_cmt: &mc::cmt_<'tcx>) {
debug!("check_assignment(assignee_cmt={:?})", assignee_cmt);
// Check that we don't invalidate any outstanding loans
if let Some(loan_path) = opt_loan_path(assignee_cmt) {
let scope = region::Scope {
id: assignment_id,
data: region::ScopeData::Node
};
self.each_in_scope_loan_affecting_path(scope, &loan_path, |_| {
self.bccx.signal_error();
false
});
}
// Check for reassignments to (immutable) local variables. This
// needs to be done here instead of in check_loans because we
// depend on move data.
if let Categorization::Local(_) = assignee_cmt.cat {
let lp = opt_loan_path(assignee_cmt).unwrap();
self.move_data.each_assignment_of(assignment_id, &lp, |_| {
if !assignee_cmt.mutbl.is_mutable() {
self.bccx.signal_error();
}
false
});
return
}
}
}

View file

@ -1,135 +0,0 @@
//! Computes moves.
use crate::borrowck::*;
use crate::borrowck::move_data::*;
use rustc::middle::mem_categorization as mc;
use rustc::middle::mem_categorization::Categorization;
use rustc::middle::mem_categorization::InteriorOffsetKind as Kind;
use rustc::ty::{self, Ty};
use std::rc::Rc;
use syntax_pos::Span;
use log::debug;
struct GatherMoveInfo<'c, 'tcx> {
id: hir::ItemLocalId,
cmt: &'c mc::cmt_<'tcx>,
}
pub fn gather_decl<'a, 'tcx>(bccx: &BorrowckCtxt<'a, 'tcx>,
move_data: &MoveData<'tcx>,
var_id: hir::HirId,
var_ty: Ty<'tcx>) {
let loan_path = Rc::new(LoanPath::new(LpVar(var_id), var_ty));
move_data.add_move(bccx.tcx, loan_path, var_id.local_id);
}
pub fn gather_move_from_expr<'a, 'tcx>(bccx: &BorrowckCtxt<'a, 'tcx>,
move_data: &MoveData<'tcx>,
move_expr_id: hir::ItemLocalId,
cmt: &mc::cmt_<'tcx>) {
let move_info = GatherMoveInfo {
id: move_expr_id,
cmt,
};
gather_move(bccx, move_data, move_info);
}
pub fn gather_move_from_pat<'a, 'c, 'tcx>(
bccx: &BorrowckCtxt<'a, 'tcx>,
move_data: &MoveData<'tcx>,
move_pat: &hir::Pat,
cmt: &'c mc::cmt_<'tcx>,
) {
let move_info = GatherMoveInfo {
id: move_pat.hir_id.local_id,
cmt,
};
debug!("gather_move_from_pat: move_pat={:?}", move_pat);
gather_move(bccx, move_data, move_info);
}
fn gather_move<'a, 'c, 'tcx>(
bccx: &BorrowckCtxt<'a, 'tcx>,
move_data: &MoveData<'tcx>,
move_info: GatherMoveInfo<'c, 'tcx>,
) {
debug!("gather_move(move_id={:?}, cmt={:?})",
move_info.id, move_info.cmt);
let potentially_illegal_move = check_and_get_illegal_move_origin(bccx, move_info.cmt);
if let Some(_) = potentially_illegal_move {
bccx.signal_error();
return;
}
match opt_loan_path(&move_info.cmt) {
Some(loan_path) => {
move_data.add_move(bccx.tcx, loan_path,
move_info.id);
}
None => {
// move from rvalue or raw pointer, hence ok
}
}
}
pub fn gather_assignment<'a, 'tcx>(bccx: &BorrowckCtxt<'a, 'tcx>,
move_data: &MoveData<'tcx>,
assignment_id: hir::ItemLocalId,
assignment_span: Span,
assignee_loan_path: Rc<LoanPath<'tcx>>) {
move_data.add_assignment(bccx.tcx,
assignee_loan_path,
assignment_id,
assignment_span);
}
// (keep in sync with move_error::report_cannot_move_out_of )
fn check_and_get_illegal_move_origin<'a, 'tcx>(bccx: &BorrowckCtxt<'a, 'tcx>,
cmt: &mc::cmt_<'tcx>)
-> Option<mc::cmt_<'tcx>> {
match cmt.cat {
Categorization::Deref(_, mc::BorrowedPtr(..)) |
Categorization::Deref(_, mc::UnsafePtr(..)) |
Categorization::ThreadLocal(..) |
Categorization::StaticItem => {
Some(cmt.clone())
}
Categorization::Rvalue(..) |
Categorization::Local(..) |
Categorization::Upvar(..) => {
None
}
Categorization::Downcast(ref b, _) |
Categorization::Interior(ref b, mc::InteriorField(_)) |
Categorization::Interior(ref b, mc::InteriorElement(Kind::Pattern)) => {
match b.ty.kind {
ty::Adt(def, _) => {
if def.has_dtor(bccx.tcx) {
Some(cmt.clone())
} else {
check_and_get_illegal_move_origin(bccx, b)
}
}
ty::Slice(..) => Some(cmt.clone()),
_ => {
check_and_get_illegal_move_origin(bccx, b)
}
}
}
Categorization::Interior(_, mc::InteriorElement(Kind::Index)) => {
// Forbid move of arr[i] for arr: [T; 3]; see RFC 533.
Some(cmt.clone())
}
Categorization::Deref(ref b, mc::Unique) => {
check_and_get_illegal_move_origin(bccx, b)
}
}
}

View file

@ -1,113 +0,0 @@
//! This module implements the check that the lifetime of a borrow
//! does not exceed the lifetime of the value being borrowed.
use crate::borrowck::*;
use rustc::hir::HirId;
use rustc::middle::mem_categorization as mc;
use rustc::middle::mem_categorization::Categorization;
use rustc::middle::region;
use rustc::ty;
use log::debug;
type R = Result<(),()>;
pub fn guarantee_lifetime<'a, 'tcx>(bccx: &BorrowckCtxt<'a, 'tcx>,
item_scope: region::Scope,
cmt: &'a mc::cmt_<'tcx>,
loan_region: ty::Region<'tcx>)
-> Result<(),()> {
//! Reports error if `loan_region` is larger than S
//! where S is `item_scope` if `cmt` is an upvar,
//! and is scope of `cmt` otherwise.
debug!("guarantee_lifetime(cmt={:?}, loan_region={:?})",
cmt, loan_region);
let ctxt = GuaranteeLifetimeContext { bccx, item_scope, loan_region };
ctxt.check(cmt, None)
}
///////////////////////////////////////////////////////////////////////////
// Private
struct GuaranteeLifetimeContext<'a, 'tcx> {
bccx: &'a BorrowckCtxt<'a, 'tcx>,
// the scope of the function body for the enclosing item
item_scope: region::Scope,
loan_region: ty::Region<'tcx>,
}
impl<'a, 'tcx> GuaranteeLifetimeContext<'a, 'tcx> {
fn check(&self, cmt: &mc::cmt_<'tcx>, discr_scope: Option<HirId>) -> R {
//! Main routine. Walks down `cmt` until we find the
//! "guarantor". Reports an error if `self.loan_region` is
//! larger than scope of `cmt`.
debug!("guarantee_lifetime.check(cmt={:?}, loan_region={:?})",
cmt,
self.loan_region);
match cmt.cat {
Categorization::Rvalue(..) |
Categorization::ThreadLocal(..) |
Categorization::Local(..) | // L-Local
Categorization::Upvar(..) |
Categorization::Deref(_, mc::BorrowedPtr(..)) | // L-Deref-Borrowed
Categorization::Deref(_, mc::UnsafePtr(..)) => {
self.check_scope(self.scope(cmt))
}
Categorization::StaticItem => {
Ok(())
}
Categorization::Downcast(ref base, _) |
Categorization::Deref(ref base, mc::Unique) | // L-Deref-Send
Categorization::Interior(ref base, _) => { // L-Field
self.check(base, discr_scope)
}
}
}
fn check_scope(&self, max_scope: ty::Region<'tcx>) -> R {
//! Reports an error if `loan_region` is larger than `max_scope`
if !self.bccx.is_subregion_of(self.loan_region, max_scope) {
Err(self.bccx.signal_error())
} else {
Ok(())
}
}
fn scope(&self, cmt: &mc::cmt_<'tcx>) -> ty::Region<'tcx> {
//! Returns the maximal region scope for the which the
//! place `cmt` is guaranteed to be valid without any
//! rooting etc, and presuming `cmt` is not mutated.
match cmt.cat {
Categorization::ThreadLocal(temp_scope) |
Categorization::Rvalue(temp_scope) => {
temp_scope
}
Categorization::Upvar(..) => {
self.bccx.tcx.mk_region(ty::ReScope(self.item_scope))
}
Categorization::Local(hir_id) => {
self.bccx.tcx.mk_region(ty::ReScope(
self.bccx.region_scope_tree.var_scope(hir_id.local_id)))
}
Categorization::StaticItem |
Categorization::Deref(_, mc::UnsafePtr(..)) => {
self.bccx.tcx.lifetimes.re_static
}
Categorization::Deref(_, mc::BorrowedPtr(_, r)) => {
r
}
Categorization::Downcast(ref cmt, _) |
Categorization::Deref(ref cmt, mc::Unique) |
Categorization::Interior(ref cmt, _) => {
self.scope(cmt)
}
}
}
}

View file

@ -1,433 +0,0 @@
// ----------------------------------------------------------------------
// Gathering loans
//
// The borrow check proceeds in two phases. In phase one, we gather the full
// set of loans that are required at any point. These are sorted according to
// their associated scopes. In phase two, checking loans, we will then make
// sure that all of these loans are honored.
use crate::borrowck::*;
use crate::borrowck::move_data::MoveData;
use rustc::middle::expr_use_visitor as euv;
use rustc::middle::mem_categorization as mc;
use rustc::middle::mem_categorization::Categorization;
use rustc::middle::region;
use rustc::ty::{self, TyCtxt};
use syntax_pos::Span;
use rustc::hir;
use log::debug;
use restrictions::RestrictionResult;
mod lifetime;
mod restrictions;
mod gather_moves;
pub fn gather_loans_in_fn<'a, 'tcx>(bccx: &BorrowckCtxt<'a, 'tcx>,
body: hir::BodyId)
-> (Vec<Loan<'tcx>>, move_data::MoveData<'tcx>) {
let def_id = bccx.tcx.hir().body_owner_def_id(body);
let param_env = bccx.tcx.param_env(def_id);
let mut glcx = GatherLoanCtxt {
bccx,
all_loans: Vec::new(),
item_ub: region::Scope {
id: bccx.tcx.hir().body(body).value.hir_id.local_id,
data: region::ScopeData::Node
},
move_data: MoveData::default(),
};
let rvalue_promotable_map = bccx.tcx.rvalue_promotable_map(def_id);
euv::ExprUseVisitor::new(&mut glcx,
bccx.tcx,
def_id,
param_env,
&bccx.region_scope_tree,
bccx.tables,
Some(rvalue_promotable_map))
.consume_body(bccx.body);
let GatherLoanCtxt { all_loans, move_data, .. } = glcx;
(all_loans, move_data)
}
struct GatherLoanCtxt<'a, 'tcx> {
bccx: &'a BorrowckCtxt<'a, 'tcx>,
move_data: move_data::MoveData<'tcx>,
all_loans: Vec<Loan<'tcx>>,
/// `item_ub` is used as an upper-bound on the lifetime whenever we
/// ask for the scope of an expression categorized as an upvar.
item_ub: region::Scope,
}
impl<'a, 'tcx> euv::Delegate<'tcx> for GatherLoanCtxt<'a, 'tcx> {
fn consume(&mut self,
consume_id: hir::HirId,
_consume_span: Span,
cmt: &mc::cmt_<'tcx>,
mode: euv::ConsumeMode) {
debug!("consume(consume_id={}, cmt={:?}, mode={:?})",
consume_id, cmt, mode);
match mode {
euv::Move(_) => {
gather_moves::gather_move_from_expr(
self.bccx, &self.move_data,
consume_id.local_id, cmt);
}
euv::Copy => { }
}
}
fn matched_pat(&mut self,
matched_pat: &hir::Pat,
cmt: &mc::cmt_<'tcx>,
mode: euv::MatchMode) {
debug!("matched_pat(matched_pat={:?}, cmt={:?}, mode={:?})",
matched_pat,
cmt,
mode);
}
fn consume_pat(&mut self,
consume_pat: &hir::Pat,
cmt: &mc::cmt_<'tcx>,
mode: euv::ConsumeMode) {
debug!("consume_pat(consume_pat={:?}, cmt={:?}, mode={:?})",
consume_pat,
cmt,
mode);
match mode {
euv::Copy => { return; }
euv::Move(_) => { }
}
gather_moves::gather_move_from_pat(
self.bccx, &self.move_data,
consume_pat, cmt);
}
fn borrow(&mut self,
borrow_id: hir::HirId,
_: Span,
cmt: &mc::cmt_<'tcx>,
loan_region: ty::Region<'tcx>,
bk: ty::BorrowKind,
loan_cause: euv::LoanCause)
{
debug!("borrow(borrow_id={}, cmt={:?}, loan_region={:?}, \
bk={:?}, loan_cause={:?})",
borrow_id, cmt, loan_region,
bk, loan_cause);
self.guarantee_valid(borrow_id.local_id,
cmt,
bk,
loan_region);
}
fn mutate(&mut self,
assignment_id: hir::HirId,
assignment_span: Span,
assignee_cmt: &mc::cmt_<'tcx>,
_: euv::MutateMode)
{
self.guarantee_assignment_valid(assignment_id,
assignment_span,
assignee_cmt);
}
fn decl_without_init(&mut self, id: hir::HirId, _span: Span) {
let ty = self.bccx
.tables
.node_type(id);
gather_moves::gather_decl(self.bccx, &self.move_data, id, ty);
}
fn nested_body(&mut self, body_id: hir::BodyId) {
debug!("nested_body(body_id={:?})", body_id);
// rust-lang/rust#58776: MIR and AST borrow check disagree on where
// certain closure errors are reported. As such migrate borrowck has to
// operate at the level of items, rather than bodies. Check if the
// contained closure had any errors and set `signalled_any_error` if it
// has.
let bccx = self.bccx;
if bccx.tcx.migrate_borrowck() {
if let SignalledError::NoErrorsSeen = bccx.signalled_any_error.get() {
let closure_def_id = bccx.tcx.hir().body_owner_def_id(body_id);
debug!("checking closure: {:?}", closure_def_id);
bccx.signalled_any_error.set(bccx.tcx.borrowck(closure_def_id).signalled_any_error);
}
}
}
}
/// Implements the A-* rules in README.md.
fn check_aliasability<'a, 'tcx>(bccx: &BorrowckCtxt<'a, 'tcx>,
cmt: &mc::cmt_<'tcx>,
req_kind: ty::BorrowKind)
-> Result<(),()> {
let aliasability = cmt.freely_aliasable();
debug!("check_aliasability aliasability={:?} req_kind={:?}",
aliasability, req_kind);
match (aliasability, req_kind) {
(mc::Aliasability::NonAliasable, _) => {
/* Uniquely accessible path -- OK for `&` and `&mut` */
Ok(())
}
(mc::Aliasability::FreelyAliasable(mc::AliasableStatic), ty::ImmBorrow) => {
// Borrow of an immutable static item.
Ok(())
}
(mc::Aliasability::FreelyAliasable(mc::AliasableStaticMut), _) => {
// Even touching a static mut is considered unsafe. We assume the
// user knows what they're doing in these cases.
Ok(())
}
(mc::Aliasability::FreelyAliasable(_), ty::UniqueImmBorrow) |
(mc::Aliasability::FreelyAliasable(_), ty::MutBorrow) => {
bccx.signal_error();
Err(())
}
(..) => {
Ok(())
}
}
}
/// Implements the M-* rules in README.md.
fn check_mutability<'a, 'tcx>(bccx: &BorrowckCtxt<'a, 'tcx>,
cmt: &mc::cmt_<'tcx>,
req_kind: ty::BorrowKind)
-> Result<(),()> {
debug!("check_mutability(cmt={:?} req_kind={:?}", cmt, req_kind);
match req_kind {
ty::UniqueImmBorrow | ty::ImmBorrow => {
match cmt.mutbl {
// I am intentionally leaving this here to help
// refactoring if, in the future, we should add new
// kinds of mutability.
mc::McImmutable | mc::McDeclared | mc::McInherited => {
// both imm and mut data can be lent as imm;
// for mutable data, this is a freeze
Ok(())
}
}
}
ty::MutBorrow => {
// Only mutable data can be lent as mutable.
if !cmt.mutbl.is_mutable() {
Err(bccx.signal_error())
} else {
Ok(())
}
}
}
}
impl<'a, 'tcx> GatherLoanCtxt<'a, 'tcx> {
pub fn tcx(&self) -> TyCtxt<'tcx> { self.bccx.tcx }
/// Guarantees that `cmt` is assignable, or reports an error.
fn guarantee_assignment_valid(&mut self,
assignment_id: hir::HirId,
assignment_span: Span,
cmt: &mc::cmt_<'tcx>) {
let opt_lp = opt_loan_path(cmt);
debug!("guarantee_assignment_valid(assignment_id={}, cmt={:?}) opt_lp={:?}",
assignment_id, cmt, opt_lp);
if let Categorization::Local(..) = cmt.cat {
// Only re-assignments to locals require it to be
// mutable - this is checked in check_loans.
} else {
// Check that we don't allow assignments to non-mutable data.
if check_mutability(self.bccx, cmt, ty::MutBorrow).is_err() {
return; // reported an error, no sense in reporting more.
}
}
// Check that we don't allow assignments to aliasable data
if check_aliasability(self.bccx, cmt, ty::MutBorrow).is_err() {
return; // reported an error, no sense in reporting more.
}
match opt_lp {
Some(lp) => {
gather_moves::gather_assignment(self.bccx, &self.move_data,
assignment_id.local_id,
assignment_span,
lp);
}
None => {
// This can occur with e.g., `*foo() = 5`. In such
// cases, there is no need to check for conflicts
// with moves etc, just ignore.
}
}
}
/// Guarantees that `addr_of(cmt)` will be valid for the duration of `static_scope_r`, or
/// reports an error. This may entail taking out loans, which will be added to the
/// `req_loan_map`.
fn guarantee_valid(&mut self,
borrow_id: hir::ItemLocalId,
cmt: &mc::cmt_<'tcx>,
req_kind: ty::BorrowKind,
loan_region: ty::Region<'tcx>) {
debug!("guarantee_valid(borrow_id={:?}, cmt={:?}, \
req_mutbl={:?}, loan_region={:?})",
borrow_id,
cmt,
req_kind,
loan_region);
// a loan for the empty region can never be dereferenced, so
// it is always safe
if *loan_region == ty::ReEmpty {
return;
}
// Check that the lifetime of the borrow does not exceed
// the lifetime of the data being borrowed.
if lifetime::guarantee_lifetime(self.bccx, self.item_ub, cmt, loan_region).is_err() {
return; // reported an error, no sense in reporting more.
}
// Check that we don't allow mutable borrows of non-mutable data.
if check_mutability(self.bccx, cmt, req_kind).is_err() {
return; // reported an error, no sense in reporting more.
}
// Check that we don't allow mutable borrows of aliasable data.
if check_aliasability(self.bccx, cmt, req_kind).is_err() {
return; // reported an error, no sense in reporting more.
}
// Compute the restrictions that are required to enforce the
// loan is safe.
let restr = restrictions::compute_restrictions(self.bccx, &cmt, loan_region);
debug!("guarantee_valid(): restrictions={:?}", restr);
// Create the loan record (if needed).
let loan = match restr {
RestrictionResult::Safe => {
// No restrictions---no loan record necessary
return;
}
RestrictionResult::SafeIf(loan_path, restricted_paths) => {
let loan_scope = match *loan_region {
ty::ReScope(scope) => scope,
ty::ReEarlyBound(ref br) => {
self.bccx.region_scope_tree.early_free_scope(self.tcx(), br)
}
ty::ReFree(ref fr) => {
self.bccx.region_scope_tree.free_scope(self.tcx(), fr)
}
ty::ReStatic => self.item_ub,
ty::ReEmpty |
ty::ReClosureBound(..) |
ty::ReLateBound(..) |
ty::ReVar(..) |
ty::RePlaceholder(..) |
ty::ReErased => {
span_bug!(
cmt.span,
"invalid borrow lifetime: {:?}",
loan_region);
}
};
debug!("loan_scope = {:?}", loan_scope);
let borrow_scope = region::Scope {
id: borrow_id,
data: region::ScopeData::Node
};
let gen_scope = self.compute_gen_scope(borrow_scope, loan_scope);
debug!("gen_scope = {:?}", gen_scope);
let kill_scope = self.compute_kill_scope(loan_scope, &loan_path);
debug!("kill_scope = {:?}", kill_scope);
Loan {
index: self.all_loans.len(),
loan_path,
kind: req_kind,
gen_scope,
kill_scope,
restricted_paths,
}
}
};
debug!("guarantee_valid(borrow_id={:?}), loan={:?}",
borrow_id, loan);
// let loan_path = loan.loan_path;
// let loan_gen_scope = loan.gen_scope;
// let loan_kill_scope = loan.kill_scope;
self.all_loans.push(loan);
}
pub fn compute_gen_scope(&self,
borrow_scope: region::Scope,
loan_scope: region::Scope)
-> region::Scope {
//! Determine when to introduce the loan. Typically the loan
//! is introduced at the point of the borrow, but in some cases,
//! notably method arguments, the loan may be introduced only
//! later, once it comes into scope.
if self.bccx.region_scope_tree.is_subscope_of(borrow_scope, loan_scope) {
borrow_scope
} else {
loan_scope
}
}
pub fn compute_kill_scope(&self, loan_scope: region::Scope, lp: &LoanPath<'tcx>)
-> region::Scope {
//! Determine when the loan restrictions go out of scope.
//! This is either when the lifetime expires or when the
//! local variable which roots the loan-path goes out of scope,
//! whichever happens faster.
//!
//! It may seem surprising that we might have a loan region
//! larger than the variable which roots the loan-path; this can
//! come about when variables of `&mut` type are re-borrowed,
//! as in this example:
//!
//! struct Foo { counter: u32 }
//!
//! fn counter<'a>(v: &'a mut Foo) -> &'a mut u32 {
//! &mut v.counter
//! }
//!
//! In this case, the reference (`'a`) outlives the
//! variable `v` that hosts it. Note that this doesn't come up
//! with immutable `&` pointers, because borrows of such pointers
//! do not require restrictions and hence do not cause a loan.
let lexical_scope = lp.kill_scope(self.bccx);
if self.bccx.region_scope_tree.is_subscope_of(lexical_scope, loan_scope) {
lexical_scope
} else {
assert!(self.bccx.region_scope_tree.is_subscope_of(loan_scope, lexical_scope));
loan_scope
}
}
}

View file

@ -1,179 +0,0 @@
//! Computes the restrictions that result from a borrow.
use crate::borrowck::*;
use rustc::middle::mem_categorization as mc;
use rustc::middle::mem_categorization::Categorization;
use rustc::ty;
use log::debug;
use crate::borrowck::ToInteriorKind;
use std::rc::Rc;
#[derive(Debug)]
pub enum RestrictionResult<'tcx> {
Safe,
SafeIf(Rc<LoanPath<'tcx>>, Vec<Rc<LoanPath<'tcx>>>)
}
pub fn compute_restrictions<'a, 'tcx>(bccx: &BorrowckCtxt<'a, 'tcx>,
cmt: &mc::cmt_<'tcx>,
loan_region: ty::Region<'tcx>)
-> RestrictionResult<'tcx> {
let ctxt = RestrictionsContext { bccx, loan_region };
ctxt.restrict(cmt)
}
///////////////////////////////////////////////////////////////////////////
// Private
struct RestrictionsContext<'a, 'tcx> {
bccx: &'a BorrowckCtxt<'a, 'tcx>,
loan_region: ty::Region<'tcx>,
}
impl<'a, 'tcx> RestrictionsContext<'a, 'tcx> {
fn restrict(&self,
cmt: &mc::cmt_<'tcx>) -> RestrictionResult<'tcx> {
debug!("restrict(cmt={:?})", cmt);
let new_lp = |v: LoanPathKind<'tcx>| Rc::new(LoanPath::new(v, cmt.ty));
match cmt.cat.clone() {
Categorization::Rvalue(..) => {
// Effectively, rvalues are stored into a
// non-aliasable temporary on the stack. Since they
// are inherently non-aliasable, they can only be
// accessed later through the borrow itself and hence
// must inherently comply with its terms.
RestrictionResult::Safe
}
Categorization::ThreadLocal(..) => {
// Thread-locals are statics that have a scope, with
// no underlying structure to provide restrictions.
RestrictionResult::Safe
}
Categorization::Local(local_id) => {
// R-Variable, locally declared
let lp = new_lp(LpVar(local_id));
RestrictionResult::SafeIf(lp.clone(), vec![lp])
}
Categorization::Upvar(mc::Upvar { id, .. }) => {
// R-Variable, captured into closure
let lp = new_lp(LpUpvar(id));
RestrictionResult::SafeIf(lp.clone(), vec![lp])
}
Categorization::Downcast(cmt_base, _) => {
// When we borrow the interior of an enum, we have to
// ensure the enum itself is not mutated, because that
// could cause the type of the memory to change.
self.restrict(&cmt_base)
}
Categorization::Interior(cmt_base, interior) => {
// R-Field
//
// Overwriting the base would not change the type of
// the memory, so no additional restrictions are
// needed.
let opt_variant_id = match cmt_base.cat {
Categorization::Downcast(_, variant_id) => Some(variant_id),
_ => None
};
let interior = interior.cleaned();
let base_ty = cmt_base.ty;
let result = self.restrict(&cmt_base);
// Borrowing one union field automatically borrows all its fields.
match base_ty.kind {
ty::Adt(adt_def, _) if adt_def.is_union() => match result {
RestrictionResult::Safe => RestrictionResult::Safe,
RestrictionResult::SafeIf(base_lp, mut base_vec) => {
for (i, field) in adt_def.non_enum_variant().fields.iter().enumerate() {
let field = InteriorKind::InteriorField(
mc::FieldIndex(i, field.ident.name)
);
let field_ty = if field == interior {
cmt.ty
} else {
self.bccx.tcx.types.err // Doesn't matter
};
let sibling_lp_kind = LpExtend(base_lp.clone(), cmt.mutbl,
LpInterior(opt_variant_id, field));
let sibling_lp = Rc::new(LoanPath::new(sibling_lp_kind, field_ty));
base_vec.push(sibling_lp);
}
let lp = new_lp(LpExtend(base_lp, cmt.mutbl,
LpInterior(opt_variant_id, interior)));
RestrictionResult::SafeIf(lp, base_vec)
}
},
_ => self.extend(result, &cmt, LpInterior(opt_variant_id, interior))
}
}
Categorization::StaticItem => {
RestrictionResult::Safe
}
Categorization::Deref(cmt_base, pk) => {
match pk {
mc::Unique => {
// R-Deref-Send-Pointer
//
// When we borrow the interior of a box, we
// cannot permit the base to be mutated, because that
// would cause the unique pointer to be freed.
//
// Eventually we should make these non-special and
// just rely on Deref<T> implementation.
let result = self.restrict(&cmt_base);
self.extend(result, &cmt, LpDeref(pk))
}
mc::BorrowedPtr(bk, lt) => {
// R-Deref-[Mut-]Borrowed
if !self.bccx.is_subregion_of(self.loan_region, lt) {
self.bccx.signal_error();
return RestrictionResult::Safe;
}
match bk {
ty::ImmBorrow => RestrictionResult::Safe,
ty::MutBorrow | ty::UniqueImmBorrow => {
// R-Deref-Mut-Borrowed
//
// The referent can be aliased after the
// references lifetime ends (by a newly-unfrozen
// borrow).
let result = self.restrict(&cmt_base);
self.extend(result, &cmt, LpDeref(pk))
}
}
}
// Borrowck is not relevant for raw pointers
mc::UnsafePtr(..) => RestrictionResult::Safe
}
}
}
}
fn extend(&self,
result: RestrictionResult<'tcx>,
cmt: &mc::cmt_<'tcx>,
elem: LoanPathElem<'tcx>) -> RestrictionResult<'tcx> {
match result {
RestrictionResult::Safe => RestrictionResult::Safe,
RestrictionResult::SafeIf(base_lp, mut base_vec) => {
let v = LpExtend(base_lp, cmt.mutbl, elem);
let lp = Rc::new(LoanPath::new(v, cmt.ty));
base_vec.push(lp.clone());
RestrictionResult::SafeIf(lp, base_vec)
}
}
}
}

View file

@ -1,621 +0,0 @@
//! See The Book chapter on the borrow checker for more details.
#![allow(non_camel_case_types)]
pub use LoanPathKind::*;
pub use LoanPathElem::*;
use InteriorKind::*;
use rustc::hir::HirId;
use rustc::hir::Node;
use rustc::middle::borrowck::{BorrowCheckResult, SignalledError};
use rustc::hir::def_id::{DefId, LocalDefId};
use rustc::middle::mem_categorization as mc;
use rustc::middle::mem_categorization::Categorization;
use rustc::middle::region;
use rustc::middle::free_region::RegionRelations;
use rustc::ty::{self, Ty, TyCtxt};
use rustc::ty::query::Providers;
use std::borrow::Cow;
use std::cell::{Cell};
use std::fmt;
use std::rc::Rc;
use std::hash::{Hash, Hasher};
use log::debug;
use rustc::hir;
use crate::cfg;
use crate::dataflow::{DataFlowContext, BitwiseOperator, DataFlowOperator, KillFrom};
pub mod check_loans;
pub mod gather_loans;
pub mod move_data;
#[derive(Clone, Copy)]
pub struct LoanDataFlowOperator;
pub type LoanDataFlow<'tcx> = DataFlowContext<'tcx, LoanDataFlowOperator>;
pub fn check_crate(tcx: TyCtxt<'_>) {
tcx.par_body_owners(|body_owner_def_id| {
tcx.ensure().borrowck(body_owner_def_id);
});
}
pub fn provide(providers: &mut Providers<'_>) {
*providers = Providers {
borrowck,
..*providers
};
}
/// Collection of conclusions determined via borrow checker analyses.
pub struct AnalysisData<'tcx> {
pub all_loans: Vec<Loan<'tcx>>,
pub loans: DataFlowContext<'tcx, LoanDataFlowOperator>,
pub move_data: move_data::FlowedMoveData<'tcx>,
}
fn borrowck(tcx: TyCtxt<'_>, owner_def_id: DefId) -> &BorrowCheckResult {
assert!(tcx.use_ast_borrowck() || tcx.migrate_borrowck());
debug!("borrowck(body_owner_def_id={:?})", owner_def_id);
let signalled_error = tcx.check_match(owner_def_id);
if let SignalledError::SawSomeError = signalled_error {
return tcx.arena.alloc(BorrowCheckResult {
signalled_any_error: SignalledError::SawSomeError,
})
}
let owner_id = tcx.hir().as_local_hir_id(owner_def_id).unwrap();
match tcx.hir().get(owner_id) {
Node::Ctor(..) => {
// We get invoked with anything that has MIR, but some of
// those things (notably the synthesized constructors from
// tuple structs/variants) do not have an associated body
// and do not need borrowchecking.
return tcx.arena.alloc(BorrowCheckResult {
signalled_any_error: SignalledError::NoErrorsSeen,
})
}
_ => { }
}
let body_id = tcx.hir().body_owned_by(owner_id);
let tables = tcx.typeck_tables_of(owner_def_id);
let region_scope_tree = tcx.region_scope_tree(owner_def_id);
let body = tcx.hir().body(body_id);
let mut bccx = BorrowckCtxt {
tcx,
tables,
region_scope_tree,
owner_def_id,
body,
signalled_any_error: Cell::new(SignalledError::NoErrorsSeen),
};
// Eventually, borrowck will always read the MIR, but at the
// moment we do not. So, for now, we always force MIR to be
// constructed for a given fn, since this may result in errors
// being reported and we want that to happen.
//
// Note that `mir_validated` is a "stealable" result; the
// thief, `optimized_mir()`, forces borrowck, so we know that
// is not yet stolen.
tcx.ensure().mir_validated(owner_def_id);
// option dance because you can't capture an uninitialized variable
// by mut-ref.
let mut cfg = None;
if let Some(AnalysisData { all_loans,
loans: loan_dfcx,
move_data: flowed_moves }) =
build_borrowck_dataflow_data(&mut bccx, false, body_id,
|bccx| {
cfg = Some(cfg::CFG::new(bccx.tcx, &body));
cfg.as_mut().unwrap()
})
{
check_loans::check_loans(&mut bccx, &loan_dfcx, &flowed_moves, &all_loans, body);
}
tcx.arena.alloc(BorrowCheckResult {
signalled_any_error: bccx.signalled_any_error.into_inner(),
})
}
fn build_borrowck_dataflow_data<'a, 'c, 'tcx, F>(this: &mut BorrowckCtxt<'a, 'tcx>,
force_analysis: bool,
body_id: hir::BodyId,
get_cfg: F)
-> Option<AnalysisData<'tcx>>
where F: FnOnce(&mut BorrowckCtxt<'a, 'tcx>) -> &'c cfg::CFG
{
// Check the body of fn items.
let (all_loans, move_data) =
gather_loans::gather_loans_in_fn(this, body_id);
if !force_analysis && move_data.is_empty() && all_loans.is_empty() {
// large arrays of data inserted as constants can take a lot of
// time and memory to borrow-check - see issue #36799. However,
// they don't have places, so no borrow-check is actually needed.
// Recognize that case and skip borrow-checking.
debug!("skipping loan propagation for {:?} because of no loans", body_id);
return None;
} else {
debug!("propagating loans in {:?}", body_id);
}
let cfg = get_cfg(this);
let mut loan_dfcx =
DataFlowContext::new(this.tcx,
"borrowck",
Some(this.body),
cfg,
LoanDataFlowOperator,
all_loans.len());
for (loan_idx, loan) in all_loans.iter().enumerate() {
loan_dfcx.add_gen(loan.gen_scope.item_local_id(), loan_idx);
loan_dfcx.add_kill(KillFrom::ScopeEnd,
loan.kill_scope.item_local_id(),
loan_idx);
}
loan_dfcx.add_kills_from_flow_exits(cfg);
loan_dfcx.propagate(cfg, this.body);
let flowed_moves = move_data::FlowedMoveData::new(move_data,
this,
cfg,
this.body);
Some(AnalysisData { all_loans,
loans: loan_dfcx,
move_data:flowed_moves })
}
/// Accessor for introspective clients inspecting `AnalysisData` and
/// the `BorrowckCtxt` itself , e.g., the flowgraph visualizer.
pub fn build_borrowck_dataflow_data_for_fn<'a, 'tcx>(
tcx: TyCtxt<'tcx>,
body_id: hir::BodyId,
cfg: &cfg::CFG)
-> (BorrowckCtxt<'a, 'tcx>, AnalysisData<'tcx>)
{
let owner_id = tcx.hir().body_owner(body_id);
let owner_def_id = tcx.hir().local_def_id(owner_id);
let tables = tcx.typeck_tables_of(owner_def_id);
let region_scope_tree = tcx.region_scope_tree(owner_def_id);
let body = tcx.hir().body(body_id);
let mut bccx = BorrowckCtxt {
tcx,
tables,
region_scope_tree,
owner_def_id,
body,
signalled_any_error: Cell::new(SignalledError::NoErrorsSeen),
};
let dataflow_data = build_borrowck_dataflow_data(&mut bccx, true, body_id, |_| cfg);
(bccx, dataflow_data.unwrap())
}
// ----------------------------------------------------------------------
// Type definitions
pub struct BorrowckCtxt<'a, 'tcx> {
tcx: TyCtxt<'tcx>,
// tables for the current thing we are checking; set to
// Some in `borrowck_fn` and cleared later
tables: &'a ty::TypeckTables<'tcx>,
region_scope_tree: &'tcx region::ScopeTree,
owner_def_id: DefId,
body: &'tcx hir::Body,
signalled_any_error: Cell<SignalledError>,
}
impl<'a, 'tcx: 'a> BorrowckCtxt<'a, 'tcx> {
fn signal_error(&self) {
self.signalled_any_error.set(SignalledError::SawSomeError);
}
}
///////////////////////////////////////////////////////////////////////////
// Loans and loan paths
/// Record of a loan that was issued.
pub struct Loan<'tcx> {
index: usize,
loan_path: Rc<LoanPath<'tcx>>,
kind: ty::BorrowKind,
restricted_paths: Vec<Rc<LoanPath<'tcx>>>,
/// gen_scope indicates where loan is introduced. Typically the
/// loan is introduced at the point of the borrow, but in some
/// cases, notably method arguments, the loan may be introduced
/// only later, once it comes into scope. See also
/// `GatherLoanCtxt::compute_gen_scope`.
gen_scope: region::Scope,
/// kill_scope indicates when the loan goes out of scope. This is
/// either when the lifetime expires or when the local variable
/// which roots the loan-path goes out of scope, whichever happens
/// faster. See also `GatherLoanCtxt::compute_kill_scope`.
kill_scope: region::Scope,
}
impl<'tcx> Loan<'tcx> {
pub fn loan_path(&self) -> Rc<LoanPath<'tcx>> {
self.loan_path.clone()
}
}
#[derive(Eq)]
pub struct LoanPath<'tcx> {
kind: LoanPathKind<'tcx>,
ty: Ty<'tcx>,
}
impl<'tcx> PartialEq for LoanPath<'tcx> {
fn eq(&self, that: &LoanPath<'tcx>) -> bool {
self.kind == that.kind
}
}
impl<'tcx> Hash for LoanPath<'tcx> {
fn hash<H: Hasher>(&self, state: &mut H) {
self.kind.hash(state);
}
}
#[derive(PartialEq, Eq, Hash, Debug)]
pub enum LoanPathKind<'tcx> {
LpVar(hir::HirId), // `x` in README.md
LpUpvar(ty::UpvarId), // `x` captured by-value into closure
LpDowncast(Rc<LoanPath<'tcx>>, DefId), // `x` downcast to particular enum variant
LpExtend(Rc<LoanPath<'tcx>>, mc::MutabilityCategory, LoanPathElem<'tcx>)
}
impl<'tcx> LoanPath<'tcx> {
fn new(kind: LoanPathKind<'tcx>, ty: Ty<'tcx>) -> LoanPath<'tcx> {
LoanPath { kind: kind, ty: ty }
}
fn to_type(&self) -> Ty<'tcx> { self.ty }
}
// FIXME (pnkfelix): See discussion here
// https://github.com/pnkfelix/rust/commit/
// b2b39e8700e37ad32b486b9a8409b50a8a53aa51#commitcomment-7892003
const DOWNCAST_PRINTED_OPERATOR: &'static str = " as ";
// A local, "cleaned" version of `mc::InteriorKind` that drops
// information that is not relevant to loan-path analysis. (In
// particular, the distinction between how precisely an array-element
// is tracked is irrelevant here.)
#[derive(Clone, Copy, PartialEq, Eq, Hash)]
pub enum InteriorKind {
InteriorField(mc::FieldIndex),
InteriorElement,
}
trait ToInteriorKind { fn cleaned(self) -> InteriorKind; }
impl ToInteriorKind for mc::InteriorKind {
fn cleaned(self) -> InteriorKind {
match self {
mc::InteriorField(name) => InteriorField(name),
mc::InteriorElement(_) => InteriorElement,
}
}
}
// This can be:
// - a pointer dereference (`*P` in README.md)
// - a field reference, with an optional definition of the containing
// enum variant (`P.f` in README.md)
// `DefId` is present when the field is part of struct that is in
// a variant of an enum. For instance in:
// `enum E { X { foo: u32 }, Y { foo: u32 }}`
// each `foo` is qualified by the definitition id of the variant (`X` or `Y`).
#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)]
pub enum LoanPathElem<'tcx> {
LpDeref(mc::PointerKind<'tcx>),
LpInterior(Option<DefId>, InteriorKind),
}
fn closure_to_block(closure_id: LocalDefId, tcx: TyCtxt<'_>) -> HirId {
let closure_id = tcx.hir().local_def_id_to_hir_id(closure_id);
match tcx.hir().get(closure_id) {
Node::Expr(expr) => match expr.kind {
hir::ExprKind::Closure(.., body_id, _, _) => {
body_id.hir_id
}
_ => {
bug!("encountered non-closure id: {}", closure_id)
}
},
_ => bug!("encountered non-expr id: {}", closure_id)
}
}
impl<'a, 'tcx> LoanPath<'tcx> {
pub fn kill_scope(&self, bccx: &BorrowckCtxt<'a, 'tcx>) -> region::Scope {
match self.kind {
LpVar(hir_id) => {
bccx.region_scope_tree.var_scope(hir_id.local_id)
}
LpUpvar(upvar_id) => {
let block_id = closure_to_block(upvar_id.closure_expr_id, bccx.tcx);
region::Scope { id: block_id.local_id, data: region::ScopeData::Node }
}
LpDowncast(ref base, _) |
LpExtend(ref base, ..) => base.kill_scope(bccx),
}
}
}
// Avoid "cannot borrow immutable field `self.x` as mutable" as that implies that a field *can* be
// mutable independently of the struct it belongs to. (#35937)
pub fn opt_loan_path_is_field<'tcx>(cmt: &mc::cmt_<'tcx>) -> (Option<Rc<LoanPath<'tcx>>>, bool) {
let new_lp = |v: LoanPathKind<'tcx>| Rc::new(LoanPath::new(v, cmt.ty));
match cmt.cat {
Categorization::Rvalue(..) |
Categorization::ThreadLocal(..) |
Categorization::StaticItem => {
(None, false)
}
Categorization::Local(id) => {
(Some(new_lp(LpVar(id))), false)
}
Categorization::Upvar(mc::Upvar { id, .. }) => {
(Some(new_lp(LpUpvar(id))), false)
}
Categorization::Deref(ref cmt_base, pk) => {
let lp = opt_loan_path_is_field(cmt_base);
(lp.0.map(|lp| {
new_lp(LpExtend(lp, cmt.mutbl, LpDeref(pk)))
}), lp.1)
}
Categorization::Interior(ref cmt_base, ik) => {
(opt_loan_path(cmt_base).map(|lp| {
let opt_variant_id = match cmt_base.cat {
Categorization::Downcast(_, did) => Some(did),
_ => None
};
new_lp(LpExtend(lp, cmt.mutbl, LpInterior(opt_variant_id, ik.cleaned())))
}), true)
}
Categorization::Downcast(ref cmt_base, variant_def_id) => {
let lp = opt_loan_path_is_field(cmt_base);
(lp.0.map(|lp| {
new_lp(LpDowncast(lp, variant_def_id))
}), lp.1)
}
}
}
/// Computes the `LoanPath` (if any) for a `cmt`.
/// Note that this logic is somewhat duplicated in
/// the method `compute()` found in `gather_loans::restrictions`,
/// which allows it to share common loan path pieces as it
/// traverses the CMT.
pub fn opt_loan_path<'tcx>(cmt: &mc::cmt_<'tcx>) -> Option<Rc<LoanPath<'tcx>>> {
opt_loan_path_is_field(cmt).0
}
///////////////////////////////////////////////////////////////////////////
// Misc
impl<'a, 'tcx> BorrowckCtxt<'a, 'tcx> {
pub fn is_subregion_of(&self,
r_sub: ty::Region<'tcx>,
r_sup: ty::Region<'tcx>)
-> bool
{
let region_rels = RegionRelations::new(self.tcx,
self.owner_def_id,
&self.region_scope_tree,
&self.tables.free_region_map);
region_rels.is_subregion_of(r_sub, r_sup)
}
pub fn append_loan_path_to_string(&self,
loan_path: &LoanPath<'tcx>,
out: &mut String) {
match loan_path.kind {
LpUpvar(ty::UpvarId { var_path: ty::UpvarPath { hir_id: id }, closure_expr_id: _ }) => {
out.push_str(&self.tcx.hir().name(id).as_str());
}
LpVar(id) => {
out.push_str(&self.tcx.hir().name(id).as_str());
}
LpDowncast(ref lp_base, variant_def_id) => {
out.push('(');
self.append_loan_path_to_string(&lp_base, out);
out.push_str(DOWNCAST_PRINTED_OPERATOR);
out.push_str(&self.tcx.def_path_str(variant_def_id));
out.push(')');
}
LpExtend(ref lp_base, _, LpInterior(_, InteriorField(mc::FieldIndex(_, info)))) => {
self.append_autoderefd_loan_path_to_string(&lp_base, out);
out.push('.');
out.push_str(&info.as_str());
}
LpExtend(ref lp_base, _, LpInterior(_, InteriorElement)) => {
self.append_autoderefd_loan_path_to_string(&lp_base, out);
out.push_str("[..]");
}
LpExtend(ref lp_base, _, LpDeref(_)) => {
out.push('*');
self.append_loan_path_to_string(&lp_base, out);
}
}
}
pub fn append_autoderefd_loan_path_to_string(&self,
loan_path: &LoanPath<'tcx>,
out: &mut String) {
match loan_path.kind {
LpExtend(ref lp_base, _, LpDeref(_)) => {
// For a path like `(*x).f` or `(*x)[3]`, autoderef
// rules would normally allow users to omit the `*x`.
// So just serialize such paths to `x.f` or x[3]` respectively.
self.append_autoderefd_loan_path_to_string(&lp_base, out)
}
LpDowncast(ref lp_base, variant_def_id) => {
out.push('(');
self.append_autoderefd_loan_path_to_string(&lp_base, out);
out.push_str(DOWNCAST_PRINTED_OPERATOR);
out.push_str(&self.tcx.def_path_str(variant_def_id));
out.push(')');
}
LpVar(..) | LpUpvar(..) | LpExtend(.., LpInterior(..)) => {
self.append_loan_path_to_string(loan_path, out)
}
}
}
pub fn loan_path_to_string(&self, loan_path: &LoanPath<'tcx>) -> String {
let mut result = String::new();
self.append_loan_path_to_string(loan_path, &mut result);
result
}
pub fn cmt_to_cow_str(&self, cmt: &mc::cmt_<'tcx>) -> Cow<'static, str> {
cmt.descriptive_string(self.tcx)
}
pub fn cmt_to_path_or_string(&self, cmt: &mc::cmt_<'tcx>) -> String {
match opt_loan_path(cmt) {
Some(lp) => format!("`{}`", self.loan_path_to_string(&lp)),
None => self.cmt_to_cow_str(cmt).into_owned(),
}
}
}
impl BitwiseOperator for LoanDataFlowOperator {
#[inline]
fn join(&self, succ: usize, pred: usize) -> usize {
succ | pred // loans from both preds are in scope
}
}
impl DataFlowOperator for LoanDataFlowOperator {
#[inline]
fn initial_value(&self) -> bool {
false // no loans in scope by default
}
}
impl fmt::Debug for InteriorKind {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match *self {
InteriorField(mc::FieldIndex(_, info)) => write!(f, "{}", info),
InteriorElement => write!(f, "[]"),
}
}
}
impl<'tcx> fmt::Debug for Loan<'tcx> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "Loan_{}({:?}, {:?}, {:?}-{:?}, {:?})",
self.index,
self.loan_path,
self.kind,
self.gen_scope,
self.kill_scope,
self.restricted_paths)
}
}
impl<'tcx> fmt::Debug for LoanPath<'tcx> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self.kind {
LpVar(id) => {
write!(f, "$({})", ty::tls::with(|tcx| tcx.hir().node_to_string(id)))
}
LpUpvar(ty::UpvarId{ var_path: ty::UpvarPath {hir_id: var_id}, closure_expr_id }) => {
let s = ty::tls::with(|tcx| {
tcx.hir().node_to_string(var_id)
});
write!(f, "$({} captured by id={:?})", s, closure_expr_id)
}
LpDowncast(ref lp, variant_def_id) => {
let variant_str = if variant_def_id.is_local() {
ty::tls::with(|tcx| tcx.def_path_str(variant_def_id))
} else {
format!("{:?}", variant_def_id)
};
write!(f, "({:?}{}{})", lp, DOWNCAST_PRINTED_OPERATOR, variant_str)
}
LpExtend(ref lp, _, LpDeref(_)) => {
write!(f, "{:?}.*", lp)
}
LpExtend(ref lp, _, LpInterior(_, ref interior)) => {
write!(f, "{:?}.{:?}", lp, interior)
}
}
}
}
impl<'tcx> fmt::Display for LoanPath<'tcx> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self.kind {
LpVar(id) => {
write!(f, "$({})", ty::tls::with(|tcx| tcx.hir().hir_to_user_string(id)))
}
LpUpvar(ty::UpvarId{ var_path: ty::UpvarPath { hir_id }, closure_expr_id: _ }) => {
let s = ty::tls::with(|tcx| {
tcx.hir().node_to_string(hir_id)
});
write!(f, "$({} captured by closure)", s)
}
LpDowncast(ref lp, variant_def_id) => {
let variant_str = if variant_def_id.is_local() {
ty::tls::with(|tcx| tcx.def_path_str(variant_def_id))
} else {
format!("{:?}", variant_def_id)
};
write!(f, "({}{}{})", lp, DOWNCAST_PRINTED_OPERATOR, variant_str)
}
LpExtend(ref lp, _, LpDeref(_)) => {
write!(f, "{}.*", lp)
}
LpExtend(ref lp, _, LpInterior(_, ref interior)) => {
write!(f, "{}.{:?}", lp, interior)
}
}
}
}

View file

@ -1,730 +0,0 @@
//! Data structures used for tracking moves. Please see the extensive
//! comments in the section "Moves and initialization" in `README.md`.
use crate::dataflow::{DataFlowContext, BitwiseOperator, DataFlowOperator, KillFrom};
use crate::borrowck::*;
use crate::cfg;
use rustc::ty::{self, TyCtxt};
use rustc::util::nodemap::FxHashMap;
use std::cell::RefCell;
use std::rc::Rc;
use std::usize;
use syntax_pos::Span;
use rustc::hir;
use log::debug;
#[derive(Default)]
pub struct MoveData<'tcx> {
/// Move paths. See section "Move paths" in `README.md`.
pub paths: RefCell<Vec<MovePath<'tcx>>>,
/// Cache of loan path to move path index, for easy lookup.
pub path_map: RefCell<FxHashMap<Rc<LoanPath<'tcx>>, MovePathIndex>>,
/// Each move or uninitialized variable gets an entry here.
pub moves: RefCell<Vec<Move>>,
/// Assignments to a variable, like `x = foo`. These are assigned
/// bits for dataflow, since we must track them to ensure that
/// immutable variables are assigned at most once along each path.
pub var_assignments: RefCell<Vec<Assignment>>,
/// Assignments to a path, like `x.f = foo`. These are not
/// assigned dataflow bits, but we track them because they still
/// kill move bits.
pub path_assignments: RefCell<Vec<Assignment>>,
}
pub struct FlowedMoveData<'tcx> {
pub move_data: MoveData<'tcx>,
pub dfcx_moves: MoveDataFlow<'tcx>,
// We could (and maybe should, for efficiency) combine both move
// and assign data flow into one, but this way it's easier to
// distinguish the bits that correspond to moves and assignments.
pub dfcx_assign: AssignDataFlow<'tcx>,
}
/// Index into `MoveData.paths`, used like a pointer
#[derive(Copy, PartialEq, Eq, PartialOrd, Ord, Debug)]
pub struct MovePathIndex(usize);
impl MovePathIndex {
fn get(&self) -> usize {
let MovePathIndex(v) = *self; v
}
}
impl Clone for MovePathIndex {
fn clone(&self) -> MovePathIndex {
MovePathIndex(self.get())
}
}
#[allow(non_upper_case_globals)]
const InvalidMovePathIndex: MovePathIndex = MovePathIndex(usize::MAX);
/// Index into `MoveData.moves`, used like a pointer
#[derive(Copy, Clone, PartialEq)]
pub struct MoveIndex(usize);
impl MoveIndex {
fn get(&self) -> usize {
let MoveIndex(v) = *self; v
}
}
#[allow(non_upper_case_globals)]
const InvalidMoveIndex: MoveIndex = MoveIndex(usize::MAX);
pub struct MovePath<'tcx> {
/// Loan path corresponding to this move path
pub loan_path: Rc<LoanPath<'tcx>>,
/// Parent pointer, `InvalidMovePathIndex` if root
pub parent: MovePathIndex,
/// Head of linked list of moves to this path,
/// `InvalidMoveIndex` if not moved
pub first_move: MoveIndex,
/// First node in linked list of children, `InvalidMovePathIndex` if leaf
pub first_child: MovePathIndex,
/// Next node in linked list of parent's children (siblings),
/// `InvalidMovePathIndex` if none.
pub next_sibling: MovePathIndex,
}
#[derive(Copy, Clone)]
pub struct Move {
/// Path being moved.
pub path: MovePathIndex,
/// ID of node that is doing the move.
pub id: hir::ItemLocalId,
/// Next node in linked list of moves from `path`, or `InvalidMoveIndex`
pub next_move: MoveIndex
}
#[derive(Copy, Clone)]
pub struct Assignment {
/// Path being assigned.
pub path: MovePathIndex,
/// ID where assignment occurs
pub id: hir::ItemLocalId,
/// span of node where assignment occurs
pub span: Span,
}
#[derive(Clone, Copy)]
pub struct MoveDataFlowOperator;
pub type MoveDataFlow<'tcx> = DataFlowContext<'tcx, MoveDataFlowOperator>;
#[derive(Clone, Copy)]
pub struct AssignDataFlowOperator;
pub type AssignDataFlow<'tcx> = DataFlowContext<'tcx, AssignDataFlowOperator>;
fn loan_path_is_precise(loan_path: &LoanPath<'_>) -> bool {
match loan_path.kind {
LpVar(_) | LpUpvar(_) => {
true
}
LpExtend(.., LpInterior(_, InteriorKind::InteriorElement)) => {
// Paths involving element accesses a[i] do not refer to a unique
// location, as there is no accurate tracking of the indices.
//
// (Paths involving element accesses via slice pattern bindings
// can in principle be tracked precisely, but that is future
// work. For now, continue claiming that they are imprecise.)
false
}
LpDowncast(ref lp_base, _) |
LpExtend(ref lp_base, ..) => {
loan_path_is_precise(&lp_base)
}
}
}
impl MoveData<'tcx> {
/// Returns `true` if there are no trackable assignments or moves
/// in this move data -- that means that there is nothing that
/// could cause a borrow error.
pub fn is_empty(&self) -> bool {
self.moves.borrow().is_empty() &&
self.path_assignments.borrow().is_empty() &&
self.var_assignments.borrow().is_empty()
}
pub fn path_loan_path(&self, index: MovePathIndex) -> Rc<LoanPath<'tcx>> {
(*self.paths.borrow())[index.get()].loan_path.clone()
}
fn path_parent(&self, index: MovePathIndex) -> MovePathIndex {
(*self.paths.borrow())[index.get()].parent
}
fn path_first_move(&self, index: MovePathIndex) -> MoveIndex {
(*self.paths.borrow())[index.get()].first_move
}
/// Returns the index of first child, or `InvalidMovePathIndex` if
/// `index` is leaf.
fn path_first_child(&self, index: MovePathIndex) -> MovePathIndex {
(*self.paths.borrow())[index.get()].first_child
}
fn path_next_sibling(&self, index: MovePathIndex) -> MovePathIndex {
(*self.paths.borrow())[index.get()].next_sibling
}
fn set_path_first_move(&self,
index: MovePathIndex,
first_move: MoveIndex) {
(*self.paths.borrow_mut())[index.get()].first_move = first_move
}
fn set_path_first_child(&self,
index: MovePathIndex,
first_child: MovePathIndex) {
(*self.paths.borrow_mut())[index.get()].first_child = first_child
}
fn move_next_move(&self, index: MoveIndex) -> MoveIndex {
//! Type safe indexing operator
(*self.moves.borrow())[index.get()].next_move
}
fn is_var_path(&self, index: MovePathIndex) -> bool {
//! True if `index` refers to a variable
self.path_parent(index) == InvalidMovePathIndex
}
/// Returns the existing move path index for `lp`, if any, and otherwise adds a new index for
/// `lp` and any of its base paths that do not yet have an index.
pub fn move_path(&self, tcx: TyCtxt<'tcx>, lp: Rc<LoanPath<'tcx>>) -> MovePathIndex {
if let Some(&index) = self.path_map.borrow().get(&lp) {
return index;
}
let index = match lp.kind {
LpVar(..) | LpUpvar(..) => {
let index = MovePathIndex(self.paths.borrow().len());
self.paths.borrow_mut().push(MovePath {
loan_path: lp.clone(),
parent: InvalidMovePathIndex,
first_move: InvalidMoveIndex,
first_child: InvalidMovePathIndex,
next_sibling: InvalidMovePathIndex,
});
index
}
LpDowncast(ref base, _) |
LpExtend(ref base, ..) => {
let parent_index = self.move_path(tcx, base.clone());
let index = MovePathIndex(self.paths.borrow().len());
let next_sibling = self.path_first_child(parent_index);
self.set_path_first_child(parent_index, index);
self.paths.borrow_mut().push(MovePath {
loan_path: lp.clone(),
parent: parent_index,
first_move: InvalidMoveIndex,
first_child: InvalidMovePathIndex,
next_sibling,
});
index
}
};
debug!("move_path(lp={:?}, index={:?})",
lp,
index);
assert_eq!(index.get(), self.paths.borrow().len() - 1);
self.path_map.borrow_mut().insert(lp, index);
return index;
}
fn existing_move_path(&self, lp: &Rc<LoanPath<'tcx>>)
-> Option<MovePathIndex> {
self.path_map.borrow().get(lp).cloned()
}
fn existing_base_paths(&self, lp: &Rc<LoanPath<'tcx>>)
-> Vec<MovePathIndex> {
let mut result = vec![];
self.add_existing_base_paths(lp, &mut result);
result
}
/// Adds any existing move path indices for `lp` and any base paths of `lp` to `result`, but
/// does not add new move paths
fn add_existing_base_paths(&self, lp: &Rc<LoanPath<'tcx>>,
result: &mut Vec<MovePathIndex>) {
match self.path_map.borrow().get(lp).cloned() {
Some(index) => {
self.each_base_path(index, |p| {
result.push(p);
true
});
}
None => {
match lp.kind {
LpVar(..) | LpUpvar(..) => { }
LpDowncast(ref b, _) |
LpExtend(ref b, ..) => {
self.add_existing_base_paths(b, result);
}
}
}
}
}
/// Adds a new move entry for a move of `lp` that occurs at location `id` with kind `kind`.
pub fn add_move(
&self,
tcx: TyCtxt<'tcx>,
orig_lp: Rc<LoanPath<'tcx>>,
id: hir::ItemLocalId,
) {
// Moving one union field automatically moves all its fields. Also move siblings of
// all parent union fields, moves do not propagate upwards automatically.
let mut lp = orig_lp.clone();
while let LpExtend(ref base_lp, mutbl, lp_elem) = lp.clone().kind {
if let (&ty::Adt(adt_def, _), LpInterior(opt_variant_id, interior))
= (&base_lp.ty.kind, lp_elem) {
if adt_def.is_union() {
for (i, field) in adt_def.non_enum_variant().fields.iter().enumerate() {
let field =
InteriorKind::InteriorField(mc::FieldIndex(i, field.ident.name));
if field != interior {
let sibling_lp_kind =
LpExtend(base_lp.clone(), mutbl, LpInterior(opt_variant_id, field));
let sibling_lp = Rc::new(LoanPath::new(sibling_lp_kind, tcx.types.err));
self.add_move_helper(tcx, sibling_lp, id);
}
}
}
}
lp = base_lp.clone();
}
self.add_move_helper(tcx, orig_lp, id);
}
fn add_move_helper(
&self,
tcx: TyCtxt<'tcx>,
lp: Rc<LoanPath<'tcx>>,
id: hir::ItemLocalId,
) {
debug!("add_move(lp={:?}, id={:?})", lp, id);
let path_index = self.move_path(tcx, lp);
let move_index = MoveIndex(self.moves.borrow().len());
let next_move = self.path_first_move(path_index);
self.set_path_first_move(path_index, move_index);
self.moves.borrow_mut().push(Move {
path: path_index,
id,
next_move,
});
}
/// Adds a new record for an assignment to `lp` that occurs at location `id` with the given
/// `span`.
pub fn add_assignment(
&self,
tcx: TyCtxt<'tcx>,
lp: Rc<LoanPath<'tcx>>,
assign_id: hir::ItemLocalId,
span: Span,
) {
// Assigning to one union field automatically assigns to all its fields.
if let LpExtend(ref base_lp, mutbl, LpInterior(opt_variant_id, interior)) = lp.kind {
if let ty::Adt(adt_def, _) = base_lp.ty.kind {
if adt_def.is_union() {
for (i, field) in adt_def.non_enum_variant().fields.iter().enumerate() {
let field =
InteriorKind::InteriorField(mc::FieldIndex(i, field.ident.name));
let field_ty = if field == interior {
lp.ty
} else {
tcx.types.err // Doesn't matter
};
let sibling_lp_kind = LpExtend(base_lp.clone(), mutbl,
LpInterior(opt_variant_id, field));
let sibling_lp = Rc::new(LoanPath::new(sibling_lp_kind, field_ty));
self.add_assignment_helper(tcx, sibling_lp, assign_id,
span);
}
return;
}
}
}
self.add_assignment_helper(tcx, lp, assign_id, span);
}
fn add_assignment_helper(
&self,
tcx: TyCtxt<'tcx>,
lp: Rc<LoanPath<'tcx>>,
assign_id: hir::ItemLocalId,
span: Span,
) {
debug!("add_assignment(lp={:?}, assign_id={:?}", lp, assign_id);
let path_index = self.move_path(tcx, lp.clone());
let assignment = Assignment {
path: path_index,
id: assign_id,
span,
};
if self.is_var_path(path_index) {
debug!("add_assignment[var](lp={:?}, assignment={}, path_index={:?})",
lp, self.var_assignments.borrow().len(), path_index);
self.var_assignments.borrow_mut().push(assignment);
} else {
debug!("add_assignment[path](lp={:?}, path_index={:?})",
lp, path_index);
self.path_assignments.borrow_mut().push(assignment);
}
}
/// Adds the gen/kills for the various moves and
/// assignments into the provided data flow contexts.
/// Moves are generated by moves and killed by assignments and
/// scoping. Assignments are generated by assignment to variables and
/// killed by scoping. See `README.md` for more details.
fn add_gen_kills(
&self,
bccx: &BorrowckCtxt<'_, 'tcx>,
dfcx_moves: &mut MoveDataFlow<'_>,
dfcx_assign: &mut AssignDataFlow<'_>,
) {
for (i, the_move) in self.moves.borrow().iter().enumerate() {
dfcx_moves.add_gen(the_move.id, i);
}
for (i, assignment) in self.var_assignments.borrow().iter().enumerate() {
dfcx_assign.add_gen(assignment.id, i);
self.kill_moves(assignment.path, assignment.id,
KillFrom::Execution, dfcx_moves);
}
for assignment in self.path_assignments.borrow().iter() {
self.kill_moves(assignment.path, assignment.id,
KillFrom::Execution, dfcx_moves);
}
// Kill all moves related to a variable `x` when
// it goes out of scope:
for path in self.paths.borrow().iter() {
match path.loan_path.kind {
LpVar(..) | LpUpvar(..) | LpDowncast(..) => {
let kill_scope = path.loan_path.kill_scope(bccx);
let path = *self.path_map.borrow().get(&path.loan_path).unwrap();
self.kill_moves(path, kill_scope.item_local_id(),
KillFrom::ScopeEnd, dfcx_moves);
}
LpExtend(..) => {}
}
}
// Kill all assignments when the variable goes out of scope:
for (assignment_index, assignment) in
self.var_assignments.borrow().iter().enumerate() {
let lp = self.path_loan_path(assignment.path);
match lp.kind {
LpVar(..) | LpUpvar(..) | LpDowncast(..) => {
let kill_scope = lp.kill_scope(bccx);
dfcx_assign.add_kill(KillFrom::ScopeEnd,
kill_scope.item_local_id(),
assignment_index);
}
LpExtend(..) => {
bug!("var assignment for non var path");
}
}
}
}
fn each_base_path<F>(&self, index: MovePathIndex, mut f: F) -> bool where
F: FnMut(MovePathIndex) -> bool,
{
let mut p = index;
while p != InvalidMovePathIndex {
if !f(p) {
return false;
}
p = self.path_parent(p);
}
return true;
}
// FIXME(#19596) This is a workaround, but there should be better way to do this
fn each_extending_path_<F>(&self, index: MovePathIndex, f: &mut F) -> bool where
F: FnMut(MovePathIndex) -> bool,
{
if !(*f)(index) {
return false;
}
let mut p = self.path_first_child(index);
while p != InvalidMovePathIndex {
if !self.each_extending_path_(p, f) {
return false;
}
p = self.path_next_sibling(p);
}
return true;
}
fn each_extending_path<F>(&self, index: MovePathIndex, mut f: F) -> bool where
F: FnMut(MovePathIndex) -> bool,
{
self.each_extending_path_(index, &mut f)
}
fn each_applicable_move<F>(&self, index0: MovePathIndex, mut f: F) -> bool where
F: FnMut(MoveIndex) -> bool,
{
let mut ret = true;
self.each_extending_path(index0, |index| {
let mut p = self.path_first_move(index);
while p != InvalidMoveIndex {
if !f(p) {
ret = false;
break;
}
p = self.move_next_move(p);
}
ret
});
ret
}
fn kill_moves(
&self,
path: MovePathIndex,
kill_id: hir::ItemLocalId,
kill_kind: KillFrom,
dfcx_moves: &mut MoveDataFlow<'_>,
) {
// We can only perform kills for paths that refer to a unique location,
// since otherwise we may kill a move from one location with an
// assignment referring to another location.
let loan_path = self.path_loan_path(path);
if loan_path_is_precise(&loan_path) {
self.each_applicable_move(path, |move_index| {
debug!("kill_moves add_kill {:?} kill_id={:?} move_index={}",
kill_kind, kill_id, move_index.get());
dfcx_moves.add_kill(kill_kind, kill_id, move_index.get());
true
});
}
}
}
impl<'tcx> FlowedMoveData<'tcx> {
pub fn new(
move_data: MoveData<'tcx>,
bccx: &BorrowckCtxt<'_, 'tcx>,
cfg: &cfg::CFG,
body: &hir::Body,
) -> FlowedMoveData<'tcx> {
let tcx = bccx.tcx;
let mut dfcx_moves =
DataFlowContext::new(tcx,
"flowed_move_data_moves",
Some(body),
cfg,
MoveDataFlowOperator,
move_data.moves.borrow().len());
let mut dfcx_assign =
DataFlowContext::new(tcx,
"flowed_move_data_assigns",
Some(body),
cfg,
AssignDataFlowOperator,
move_data.var_assignments.borrow().len());
move_data.add_gen_kills(bccx,
&mut dfcx_moves,
&mut dfcx_assign);
dfcx_moves.add_kills_from_flow_exits(cfg);
dfcx_assign.add_kills_from_flow_exits(cfg);
dfcx_moves.propagate(cfg, body);
dfcx_assign.propagate(cfg, body);
FlowedMoveData {
move_data,
dfcx_moves,
dfcx_assign,
}
}
pub fn is_move_path(&self, id: hir::ItemLocalId, loan_path: &Rc<LoanPath<'tcx>>) -> bool {
//! Returns the kind of a move of `loan_path` by `id`, if one exists.
let mut ret = false;
if let Some(loan_path_index) = self.move_data.path_map.borrow().get(&*loan_path) {
self.dfcx_moves.each_gen_bit(id, |move_index| {
let the_move = self.move_data.moves.borrow();
let the_move = (*the_move)[move_index];
if the_move.path == *loan_path_index {
ret = true;
false
} else {
true
}
});
}
ret
}
/// Iterates through each move of `loan_path` (or some base path of `loan_path`) that *may*
/// have occurred on entry to `id` without an intervening assignment. In other words, any moves
/// that would invalidate a reference to `loan_path` at location `id`.
pub fn each_move_of<F>(&self,
id: hir::ItemLocalId,
loan_path: &Rc<LoanPath<'tcx>>,
mut f: F)
-> bool where
F: FnMut(&Move, &LoanPath<'tcx>) -> bool,
{
// Bad scenarios:
//
// 1. Move of `a.b.c`, use of `a.b.c`
// 2. Move of `a.b.c`, use of `a.b.c.d`
// 3. Move of `a.b.c`, use of `a` or `a.b`
//
// OK scenario:
//
// 4. move of `a.b.c`, use of `a.b.d`
let base_indices = self.move_data.existing_base_paths(loan_path);
if base_indices.is_empty() {
return true;
}
let opt_loan_path_index = self.move_data.existing_move_path(loan_path);
let mut ret = true;
self.dfcx_moves.each_bit_on_entry(id, |index| {
let the_move = self.move_data.moves.borrow();
let the_move = &(*the_move)[index];
let moved_path = the_move.path;
if base_indices.iter().any(|x| x == &moved_path) {
// Scenario 1 or 2: `loan_path` or some base path of
// `loan_path` was moved.
if !f(the_move, &self.move_data.path_loan_path(moved_path)) {
ret = false;
}
} else {
if let Some(loan_path_index) = opt_loan_path_index {
let cont = self.move_data.each_base_path(moved_path, |p| {
if p == loan_path_index {
// Scenario 3: some extension of `loan_path`
// was moved
f(the_move,
&self.move_data.path_loan_path(moved_path))
} else {
true
}
});
if !cont { ret = false; }
}
}
ret
})
}
/// Iterates through every assignment to `loan_path` that may have occurred on entry to `id`.
/// `loan_path` must be a single variable.
pub fn each_assignment_of<F>(&self,
id: hir::ItemLocalId,
loan_path: &Rc<LoanPath<'tcx>>,
mut f: F)
-> bool where
F: FnMut(&Assignment) -> bool,
{
let loan_path_index = {
match self.move_data.existing_move_path(loan_path) {
Some(i) => i,
None => {
// if there were any assignments, it'd have an index
return true;
}
}
};
self.dfcx_assign.each_bit_on_entry(id, |index| {
let assignment = self.move_data.var_assignments.borrow();
let assignment = &(*assignment)[index];
if assignment.path == loan_path_index && !f(assignment) {
false
} else {
true
}
})
}
}
impl BitwiseOperator for MoveDataFlowOperator {
#[inline]
fn join(&self, succ: usize, pred: usize) -> usize {
succ | pred // moves from both preds are in scope
}
}
impl DataFlowOperator for MoveDataFlowOperator {
#[inline]
fn initial_value(&self) -> bool {
false // no loans in scope by default
}
}
impl BitwiseOperator for AssignDataFlowOperator {
#[inline]
fn join(&self, succ: usize, pred: usize) -> usize {
succ | pred // moves from both preds are in scope
}
}
impl DataFlowOperator for AssignDataFlowOperator {
#[inline]
fn initial_value(&self) -> bool {
false // no assignments in scope by default
}
}

View file

@ -1,545 +0,0 @@
use crate::cfg::*;
use rustc::hir::{self, PatKind};
use rustc::hir::def_id::DefId;
use rustc::hir::ptr::P;
use rustc::middle::region;
use rustc::ty::{self, TyCtxt};
use rustc_data_structures::graph::implementation as graph;
struct CFGBuilder<'a, 'tcx> {
tcx: TyCtxt<'tcx>,
owner_def_id: DefId,
tables: &'a ty::TypeckTables<'tcx>,
graph: CFGGraph,
fn_exit: CFGIndex,
loop_scopes: Vec<LoopScope>,
breakable_block_scopes: Vec<BlockScope>,
}
#[derive(Copy, Clone)]
struct BlockScope {
block_expr_id: hir::ItemLocalId, // ID of breakable block expr node
break_index: CFGIndex, // where to go on `break`
}
#[derive(Copy, Clone)]
struct LoopScope {
loop_id: hir::ItemLocalId, // ID of `loop`/`while` node
continue_index: CFGIndex, // where to go on a `loop`
break_index: CFGIndex, // where to go on a `break`
}
pub(super) fn construct(tcx: TyCtxt<'_>, body: &hir::Body) -> CFG {
let mut graph = graph::Graph::new();
let entry = graph.add_node(CFGNodeData::Entry);
// `fn_exit` is target of return exprs, which lies somewhere
// outside input `body`. (Distinguishing `fn_exit` and `body_exit`
// also resolves chicken-and-egg problem that arises if you try to
// have return exprs jump to `body_exit` during construction.)
let fn_exit = graph.add_node(CFGNodeData::Exit);
let body_exit;
// Find the tables for this body.
let owner_def_id = tcx.hir().body_owner_def_id(body.id());
let tables = tcx.typeck_tables_of(owner_def_id);
let mut cfg_builder = CFGBuilder {
tcx,
owner_def_id,
tables,
graph,
fn_exit,
loop_scopes: Vec::new(),
breakable_block_scopes: Vec::new(),
};
body_exit = cfg_builder.expr(&body.value, entry);
cfg_builder.add_contained_edge(body_exit, fn_exit);
let CFGBuilder { graph, .. } = cfg_builder;
CFG {
owner_def_id,
graph,
entry,
exit: fn_exit,
}
}
impl<'a, 'tcx> CFGBuilder<'a, 'tcx> {
fn block(&mut self, blk: &hir::Block, pred: CFGIndex) -> CFGIndex {
if blk.targeted_by_break {
let expr_exit = self.add_ast_node(blk.hir_id.local_id, &[]);
self.breakable_block_scopes.push(BlockScope {
block_expr_id: blk.hir_id.local_id,
break_index: expr_exit,
});
let mut stmts_exit = pred;
for stmt in &blk.stmts {
stmts_exit = self.stmt(stmt, stmts_exit);
}
let blk_expr_exit = self.opt_expr(&blk.expr, stmts_exit);
self.add_contained_edge(blk_expr_exit, expr_exit);
self.breakable_block_scopes.pop();
expr_exit
} else {
let mut stmts_exit = pred;
for stmt in &blk.stmts {
stmts_exit = self.stmt(stmt, stmts_exit);
}
let expr_exit = self.opt_expr(&blk.expr, stmts_exit);
self.add_ast_node(blk.hir_id.local_id, &[expr_exit])
}
}
fn stmt(&mut self, stmt: &hir::Stmt, pred: CFGIndex) -> CFGIndex {
let exit = match stmt.kind {
hir::StmtKind::Local(ref local) => {
let init_exit = self.opt_expr(&local.init, pred);
self.pat(&local.pat, init_exit)
}
hir::StmtKind::Item(_) => pred,
hir::StmtKind::Expr(ref expr) |
hir::StmtKind::Semi(ref expr) => {
self.expr(&expr, pred)
}
};
self.add_ast_node(stmt.hir_id.local_id, &[exit])
}
fn pat(&mut self, pat: &hir::Pat, pred: CFGIndex) -> CFGIndex {
match pat.kind {
PatKind::Binding(.., None) |
PatKind::Path(_) |
PatKind::Lit(..) |
PatKind::Range(..) |
PatKind::Wild => self.add_ast_node(pat.hir_id.local_id, &[pred]),
PatKind::Box(ref subpat) |
PatKind::Ref(ref subpat, _) |
PatKind::Binding(.., Some(ref subpat)) => {
let subpat_exit = self.pat(&subpat, pred);
self.add_ast_node(pat.hir_id.local_id, &[subpat_exit])
}
PatKind::TupleStruct(_, ref subpats, _) |
PatKind::Tuple(ref subpats, _) => {
let pats_exit = self.pats_all(subpats.iter(), pred);
self.add_ast_node(pat.hir_id.local_id, &[pats_exit])
}
PatKind::Struct(_, ref subpats, _) => {
let pats_exit = self.pats_all(subpats.iter().map(|f| &f.pat), pred);
self.add_ast_node(pat.hir_id.local_id, &[pats_exit])
}
PatKind::Or(ref pats) => {
let branches: Vec<_> = pats.iter().map(|p| self.pat(p, pred)).collect();
self.add_ast_node(pat.hir_id.local_id, &branches)
}
PatKind::Slice(ref pre, ref vec, ref post) => {
let pre_exit = self.pats_all(pre.iter(), pred);
let vec_exit = self.pats_all(vec.iter(), pre_exit);
let post_exit = self.pats_all(post.iter(), vec_exit);
self.add_ast_node(pat.hir_id.local_id, &[post_exit])
}
}
}
/// Handles case where all of the patterns must match.
fn pats_all<'b, I: Iterator<Item = &'b P<hir::Pat>>>(
&mut self,
pats: I,
pred: CFGIndex,
) -> CFGIndex {
pats.fold(pred, |pred, pat| self.pat(&pat, pred))
}
fn expr(&mut self, expr: &hir::Expr, pred: CFGIndex) -> CFGIndex {
match expr.kind {
hir::ExprKind::Block(ref blk, _) => {
let blk_exit = self.block(&blk, pred);
self.add_ast_node(expr.hir_id.local_id, &[blk_exit])
}
hir::ExprKind::Loop(ref body, _, _) => {
//
// [pred]
// |
// v 1
// [loopback] <---+
// | 4 |
// v 3 |
// [body] ------+
//
// [expr] 2
//
// Note that `break` and `loop` statements
// may cause additional edges.
let loopback = self.add_dummy_node(&[pred]); // 1
let expr_exit = self.add_ast_node(expr.hir_id.local_id, &[]); // 2
self.loop_scopes.push(LoopScope {
loop_id: expr.hir_id.local_id,
continue_index: loopback,
break_index: expr_exit,
});
let body_exit = self.block(&body, loopback); // 3
self.add_contained_edge(body_exit, loopback); // 4
self.loop_scopes.pop();
expr_exit
}
hir::ExprKind::Match(ref discr, ref arms, _) => {
self.match_(expr.hir_id.local_id, &discr, &arms, pred)
}
hir::ExprKind::Binary(op, ref l, ref r) if op.node.is_lazy() => {
//
// [pred]
// |
// v 1
// [l]
// |
// / \
// / \
// v 2 *
// [r] |
// | |
// v 3 v 4
// [..exit..]
//
let l_exit = self.expr(&l, pred); // 1
let r_exit = self.expr(&r, l_exit); // 2
self.add_ast_node(expr.hir_id.local_id, &[l_exit, r_exit]) // 3,4
}
hir::ExprKind::Ret(ref v) => {
let v_exit = self.opt_expr(v, pred);
let b = self.add_ast_node(expr.hir_id.local_id, &[v_exit]);
self.add_returning_edge(expr, b);
self.add_unreachable_node()
}
hir::ExprKind::Break(destination, ref opt_expr) => {
let v = self.opt_expr(opt_expr, pred);
let (target_scope, break_dest) =
self.find_scope_edge(expr, destination, ScopeCfKind::Break);
let b = self.add_ast_node(expr.hir_id.local_id, &[v]);
self.add_exiting_edge(expr, b, target_scope, break_dest);
self.add_unreachable_node()
}
hir::ExprKind::Continue(destination) => {
let (target_scope, cont_dest) =
self.find_scope_edge(expr, destination, ScopeCfKind::Continue);
let a = self.add_ast_node(expr.hir_id.local_id, &[pred]);
self.add_exiting_edge(expr, a, target_scope, cont_dest);
self.add_unreachable_node()
}
hir::ExprKind::Array(ref elems) => {
self.straightline(expr, pred, elems.iter().map(|e| &*e))
}
hir::ExprKind::Call(ref func, ref args) => {
self.call(expr, pred, &func, args.iter().map(|e| &*e))
}
hir::ExprKind::MethodCall(.., ref args) => {
self.call(expr, pred, &args[0], args[1..].iter().map(|e| &*e))
}
hir::ExprKind::Index(ref l, ref r) |
hir::ExprKind::Binary(_, ref l, ref r) if self.tables.is_method_call(expr) => {
self.call(expr, pred, &l, Some(&**r).into_iter())
}
hir::ExprKind::Unary(_, ref e) if self.tables.is_method_call(expr) => {
self.call(expr, pred, &e, None::<hir::Expr>.iter())
}
hir::ExprKind::Tup(ref exprs) => {
self.straightline(expr, pred, exprs.iter().map(|e| &*e))
}
hir::ExprKind::Struct(_, ref fields, ref base) => {
let field_cfg = self.straightline(expr, pred, fields.iter().map(|f| &*f.expr));
self.opt_expr(base, field_cfg)
}
hir::ExprKind::Assign(ref l, ref r) |
hir::ExprKind::AssignOp(_, ref l, ref r) => {
self.straightline(expr, pred, [r, l].iter().map(|&e| &**e))
}
hir::ExprKind::Index(ref l, ref r) |
hir::ExprKind::Binary(_, ref l, ref r) => { // N.B., && and || handled earlier
self.straightline(expr, pred, [l, r].iter().map(|&e| &**e))
}
hir::ExprKind::Box(ref e) |
hir::ExprKind::AddrOf(_, ref e) |
hir::ExprKind::Cast(ref e, _) |
hir::ExprKind::Type(ref e, _) |
hir::ExprKind::DropTemps(ref e) |
hir::ExprKind::Unary(_, ref e) |
hir::ExprKind::Field(ref e, _) |
hir::ExprKind::Yield(ref e, _) |
hir::ExprKind::Repeat(ref e, _) => {
self.straightline(expr, pred, Some(&**e).into_iter())
}
hir::ExprKind::InlineAsm(_, ref outputs, ref inputs) => {
let post_outputs = self.exprs(outputs.iter().map(|e| &*e), pred);
let post_inputs = self.exprs(inputs.iter().map(|e| &*e), post_outputs);
self.add_ast_node(expr.hir_id.local_id, &[post_inputs])
}
hir::ExprKind::Closure(..) |
hir::ExprKind::Lit(..) |
hir::ExprKind::Path(_) |
hir::ExprKind::Err => {
self.straightline(expr, pred, None::<hir::Expr>.iter())
}
}
}
fn call<'b, I: Iterator<Item = &'b hir::Expr>>(
&mut self,
call_expr: &hir::Expr,
pred: CFGIndex,
func_or_rcvr: &hir::Expr,
args: I,
) -> CFGIndex {
let func_or_rcvr_exit = self.expr(func_or_rcvr, pred);
let ret = self.straightline(call_expr, func_or_rcvr_exit, args);
let m = self.tcx.hir().get_module_parent(call_expr.hir_id);
if self.tcx.is_ty_uninhabited_from(m, self.tables.expr_ty(call_expr)) {
self.add_unreachable_node()
} else {
ret
}
}
/// Constructs graph for `exprs` evaluated in order.
fn exprs<'b, I: Iterator<Item = &'b hir::Expr>>(
&mut self,
exprs: I,
pred: CFGIndex,
) -> CFGIndex {
exprs.fold(pred, |p, e| self.expr(e, p))
}
/// Constructs graph for `opt_expr` evaluated, if `Some`.
fn opt_expr(
&mut self,
opt_expr: &Option<P<hir::Expr>>,
pred: CFGIndex,
) -> CFGIndex {
opt_expr.iter().fold(pred, |p, e| self.expr(&e, p))
}
/// Handles case of an expression that evaluates `subexprs` in order.
fn straightline<'b, I: Iterator<Item = &'b hir::Expr>>(
&mut self,
expr: &hir::Expr,
pred: CFGIndex,
subexprs: I,
) -> CFGIndex {
let subexprs_exit = self.exprs(subexprs, pred);
self.add_ast_node(expr.hir_id.local_id, &[subexprs_exit])
}
fn match_(&mut self, id: hir::ItemLocalId, discr: &hir::Expr,
arms: &[hir::Arm], pred: CFGIndex) -> CFGIndex {
// The CFG for match expressions is quite complex, so no ASCII
// art for it (yet).
//
// The CFG generated below matches roughly what MIR contains.
// Each pattern and guard is visited in parallel, with
// arms containing multiple patterns generating multiple nodes
// for the same guard expression. The guard expressions chain
// into each other from top to bottom, with a specific
// exception to allow some additional valid programs
// (explained below). MIR differs slightly in that the
// pattern matching may continue after a guard but the visible
// behaviour should be the same.
//
// What is going on is explained in further comments.
// Visit the discriminant expression.
let discr_exit = self.expr(discr, pred);
// Add a node for the exit of the match expression as a whole.
let expr_exit = self.add_ast_node(id, &[]);
// Keep track of the previous guard expressions.
let mut prev_guard = None;
let match_scope = region::Scope { id, data: region::ScopeData::Node };
for arm in arms {
// Add an exit node for when we've visited all the
// patterns and the guard (if there is one) in the arm.
let bindings_exit = self.add_dummy_node(&[]);
for pat in arm.top_pats_hack() {
// Visit the pattern, coming from the discriminant exit
let mut pat_exit = self.pat(&pat, discr_exit);
// If there is a guard expression, handle it here.
if let Some(ref guard) = arm.guard {
// Add a dummy node for the previous guard
// expression to target.
let guard_start = self.add_dummy_node(&[pat_exit]);
// Visit the guard expression.
let guard_exit = match guard {
hir::Guard::If(ref e) => (&**e, self.expr(e, guard_start)),
};
// #47295: We used to have very special case code
// here for when a pair of arms are both formed
// solely from constants, and if so, not add these
// edges. But this was not actually sound without
// other constraints that we stopped enforcing at
// some point.
if let Some((prev_guard, prev_index)) = prev_guard.take() {
self.add_exiting_edge(prev_guard, prev_index, match_scope, guard_start);
}
// Push the guard onto the list of previous guards.
prev_guard = Some(guard_exit);
// Update the exit node for the pattern.
pat_exit = guard_exit.1;
}
// Add an edge from the exit of this pattern to the exit of the arm.
self.add_contained_edge(pat_exit, bindings_exit);
}
// Visit the body of this arm.
let body_exit = self.expr(&arm.body, bindings_exit);
let arm_exit = self.add_ast_node(arm.hir_id.local_id, &[body_exit]);
// Link the body to the exit of the expression.
self.add_contained_edge(arm_exit, expr_exit);
}
expr_exit
}
fn add_dummy_node(&mut self, preds: &[CFGIndex]) -> CFGIndex {
self.add_node(CFGNodeData::Dummy, preds)
}
fn add_ast_node(&mut self, id: hir::ItemLocalId, preds: &[CFGIndex]) -> CFGIndex {
self.add_node(CFGNodeData::AST(id), preds)
}
fn add_unreachable_node(&mut self) -> CFGIndex {
self.add_node(CFGNodeData::Unreachable, &[])
}
fn add_node(&mut self, data: CFGNodeData, preds: &[CFGIndex]) -> CFGIndex {
let node = self.graph.add_node(data);
for &pred in preds {
self.add_contained_edge(pred, node);
}
node
}
fn add_contained_edge(
&mut self,
source: CFGIndex,
target: CFGIndex,
) {
let data = CFGEdgeData {exiting_scopes: vec![] };
self.graph.add_edge(source, target, data);
}
fn add_exiting_edge(
&mut self,
from_expr: &hir::Expr,
from_index: CFGIndex,
target_scope: region::Scope,
to_index: CFGIndex,
) {
let mut data = CFGEdgeData { exiting_scopes: vec![] };
let mut scope = region::Scope {
id: from_expr.hir_id.local_id,
data: region::ScopeData::Node
};
let region_scope_tree = self.tcx.region_scope_tree(self.owner_def_id);
while scope != target_scope {
data.exiting_scopes.push(scope.item_local_id());
scope = region_scope_tree.encl_scope(scope);
}
self.graph.add_edge(from_index, to_index, data);
}
fn add_returning_edge(
&mut self,
_from_expr: &hir::Expr,
from_index: CFGIndex,
) {
let data = CFGEdgeData {
exiting_scopes: self.loop_scopes.iter()
.rev()
.map(|&LoopScope { loop_id: id, .. }| id)
.collect()
};
self.graph.add_edge(from_index, self.fn_exit, data);
}
fn find_scope_edge(
&self,
expr: &hir::Expr,
destination: hir::Destination,
scope_cf_kind: ScopeCfKind,
) -> (region::Scope, CFGIndex) {
match destination.target_id {
Ok(loop_id) => {
for b in &self.breakable_block_scopes {
if b.block_expr_id == loop_id.local_id {
let scope = region::Scope {
id: loop_id.local_id,
data: region::ScopeData::Node
};
return (scope, match scope_cf_kind {
ScopeCfKind::Break => b.break_index,
ScopeCfKind::Continue => bug!("can't continue to block"),
});
}
}
for l in &self.loop_scopes {
if l.loop_id == loop_id.local_id {
let scope = region::Scope {
id: loop_id.local_id,
data: region::ScopeData::Node
};
return (scope, match scope_cf_kind {
ScopeCfKind::Break => l.break_index,
ScopeCfKind::Continue => l.continue_index,
});
}
}
span_bug!(expr.span, "no scope for ID {}", loop_id);
}
Err(err) => span_bug!(expr.span, "scope error: {}", err),
}
}
}
#[derive(Copy, Clone, Eq, PartialEq)]
enum ScopeCfKind {
Break,
Continue,
}

View file

@ -1,119 +0,0 @@
/// This module provides linkage between `rustc::middle::graph` and
/// libgraphviz traits.
use crate::cfg;
use rustc::hir;
use rustc::ty::TyCtxt;
pub(crate) type Node<'a> = (cfg::CFGIndex, &'a cfg::CFGNode);
pub(crate) type Edge<'a> = &'a cfg::CFGEdge;
pub struct LabelledCFG<'a, 'tcx> {
pub tcx: TyCtxt<'tcx>,
pub cfg: &'a cfg::CFG,
pub name: String,
/// `labelled_edges` controls whether we emit labels on the edges.
pub labelled_edges: bool,
}
impl<'a, 'tcx> LabelledCFG<'a, 'tcx> {
fn local_id_to_string(&self, local_id: hir::ItemLocalId) -> String {
assert!(self.cfg.owner_def_id.is_local());
let hir_id = hir::HirId {
owner: self.tcx.hir().def_index_to_hir_id(self.cfg.owner_def_id.index).owner,
local_id
};
let s = self.tcx.hir().node_to_string(hir_id);
// Replacing newlines with `\\l` causes each line to be left-aligned,
// improving presentation of (long) pretty-printed expressions.
if s.contains("\n") {
let mut s = s.replace("\n", "\\l");
// Apparently left-alignment applies to the line that precedes
// `\l`, not the line that follows; so, add `\l` at end of string
// if not already present, ensuring last line gets left-aligned
// as well.
let mut last_two: Vec<_> =
s.chars().rev().take(2).collect();
last_two.reverse();
if last_two != ['\\', 'l'] {
s.push_str("\\l");
}
s
} else {
s
}
}
}
impl<'a, 'hir> dot::Labeller<'a> for LabelledCFG<'a, 'hir> {
type Node = Node<'a>;
type Edge = Edge<'a>;
fn graph_id(&'a self) -> dot::Id<'a> { dot::Id::new(&self.name[..]).unwrap() }
fn node_id(&'a self, &(i,_): &Node<'a>) -> dot::Id<'a> {
dot::Id::new(format!("N{}", i.node_id())).unwrap()
}
fn node_label(&'a self, &(i, n): &Node<'a>) -> dot::LabelText<'a> {
if i == self.cfg.entry {
dot::LabelText::LabelStr("entry".into())
} else if i == self.cfg.exit {
dot::LabelText::LabelStr("exit".into())
} else if n.data.id() == hir::DUMMY_ITEM_LOCAL_ID {
dot::LabelText::LabelStr("(dummy_node)".into())
} else {
let s = self.local_id_to_string(n.data.id());
dot::LabelText::EscStr(s.into())
}
}
fn edge_label(&self, e: &Edge<'a>) -> dot::LabelText<'a> {
let mut label = String::new();
if !self.labelled_edges {
return dot::LabelText::EscStr(label.into());
}
let mut put_one = false;
for (i, &id) in e.data.exiting_scopes.iter().enumerate() {
if put_one {
label.push_str(",\\l");
} else {
put_one = true;
}
let s = self.local_id_to_string(id);
label.push_str(&format!("exiting scope_{} {}",
i,
&s[..]));
}
dot::LabelText::EscStr(label.into())
}
}
impl<'a> dot::GraphWalk<'a> for &'a cfg::CFG {
type Node = Node<'a>;
type Edge = Edge<'a>;
fn nodes(&'a self) -> dot::Nodes<'a, Node<'a>> {
let v: Vec<_> = self.graph.enumerated_nodes().collect();
v.into()
}
fn edges(&'a self) -> dot::Edges<'a, Edge<'a>> {
self.graph.all_edges().iter().collect()
}
fn source(&'a self, edge: &Edge<'a>) -> Node<'a> {
let i = edge.source();
(i, self.graph.node(i))
}
fn target(&'a self, edge: &Edge<'a>) -> Node<'a> {
let i = edge.target();
(i, self.graph.node(i))
}
}
impl<'a, 'hir> dot::GraphWalk<'a> for LabelledCFG<'a, 'hir> {
type Node = Node<'a>;
type Edge = Edge<'a>;
fn nodes(&'a self) -> dot::Nodes<'a, Node<'a>> { self.cfg.nodes() }
fn edges(&'a self) -> dot::Edges<'a, Edge<'a>> { self.cfg.edges() }
fn source(&'a self, edge: &Edge<'a>) -> Node<'a> { self.cfg.source(edge) }
fn target(&'a self, edge: &Edge<'a>) -> Node<'a> { self.cfg.target(edge) }
}

View file

@ -1,55 +0,0 @@
//! Module that constructs a control-flow graph representing an item.
//! Uses `Graph` as the underlying representation.
use rustc_data_structures::graph::implementation as graph;
use rustc::ty::TyCtxt;
use rustc::hir;
use rustc::hir::def_id::DefId;
mod construct;
pub mod graphviz;
pub struct CFG {
owner_def_id: DefId,
pub(crate) graph: CFGGraph,
pub(crate) entry: CFGIndex,
exit: CFGIndex,
}
#[derive(Copy, Clone, Debug, PartialEq)]
pub enum CFGNodeData {
AST(hir::ItemLocalId),
Entry,
Exit,
Dummy,
Unreachable,
}
impl CFGNodeData {
pub(crate) fn id(&self) -> hir::ItemLocalId {
if let CFGNodeData::AST(id) = *self {
id
} else {
hir::DUMMY_ITEM_LOCAL_ID
}
}
}
#[derive(Debug)]
pub struct CFGEdgeData {
pub(crate) exiting_scopes: Vec<hir::ItemLocalId>
}
pub(crate) type CFGIndex = graph::NodeIndex;
pub(crate) type CFGGraph = graph::Graph<CFGNodeData, CFGEdgeData>;
pub(crate) type CFGNode = graph::Node<CFGNodeData>;
pub(crate) type CFGEdge = graph::Edge<CFGEdgeData>;
impl CFG {
pub fn new(tcx: TyCtxt<'_>, body: &hir::Body) -> CFG {
construct::construct(tcx, body)
}
}

View file

@ -1,672 +0,0 @@
//! A module for propagating forward dataflow information. The analysis
//! assumes that the items to be propagated can be represented as bits
//! and thus uses bitvectors. Your job is simply to specify the so-called
//! GEN and KILL bits for each expression.
use crate::cfg::{self, CFGIndex};
use std::mem;
use std::usize;
use log::debug;
use rustc_data_structures::graph::implementation::OUTGOING;
use rustc::util::nodemap::FxHashMap;
use rustc::hir;
use rustc::hir::intravisit;
use rustc::hir::print as pprust;
use rustc::ty::TyCtxt;
#[derive(Copy, Clone, Debug)]
pub enum EntryOrExit {
Entry,
Exit,
}
#[derive(Clone)]
pub struct DataFlowContext<'tcx, O> {
tcx: TyCtxt<'tcx>,
/// a name for the analysis using this dataflow instance
analysis_name: &'static str,
/// the data flow operator
oper: O,
/// number of bits to propagate per id
bits_per_id: usize,
/// number of words we will use to store bits_per_id.
/// equal to bits_per_id/usize::BITS rounded up.
words_per_id: usize,
// mapping from node to cfg node index
// FIXME (#6298): Shouldn't this go with CFG?
local_id_to_index: FxHashMap<hir::ItemLocalId, Vec<CFGIndex>>,
// Bit sets per cfg node. The following three fields (`gens`, `kills`,
// and `on_entry`) all have the same structure. For each id in
// `id_range`, there is a range of words equal to `words_per_id`.
// So, to access the bits for any given id, you take a slice of
// the full vector (see the method `compute_id_range()`).
/// bits generated as we exit the cfg node. Updated by `add_gen()`.
gens: Vec<usize>,
/// bits killed as we exit the cfg node, or non-locally jump over
/// it. Updated by `add_kill(KillFrom::ScopeEnd)`.
scope_kills: Vec<usize>,
/// bits killed as we exit the cfg node directly; if it is jumped
/// over, e.g., via `break`, the kills are not reflected in the
/// jump's effects. Updated by `add_kill(KillFrom::Execution)`.
action_kills: Vec<usize>,
/// bits that are valid on entry to the cfg node. Updated by
/// `propagate()`.
on_entry: Vec<usize>,
}
pub trait BitwiseOperator {
/// Joins two predecessor bits together, typically either `|` or `&`
fn join(&self, succ: usize, pred: usize) -> usize;
}
/// Parameterization for the precise form of data flow that is used.
pub trait DataFlowOperator : BitwiseOperator {
/// Specifies the initial value for each bit in the `on_entry` set
fn initial_value(&self) -> bool;
}
struct PropagationContext<'a, 'tcx, O> {
dfcx: &'a mut DataFlowContext<'tcx, O>,
changed: bool,
}
fn get_cfg_indices(id: hir::ItemLocalId,
index: &FxHashMap<hir::ItemLocalId, Vec<CFGIndex>>)
-> &[CFGIndex] {
index.get(&id).map_or(&[], |v| &v[..])
}
impl<'tcx, O: DataFlowOperator> DataFlowContext<'tcx, O> {
fn has_bitset_for_local_id(&self, n: hir::ItemLocalId) -> bool {
assert!(n != hir::DUMMY_ITEM_LOCAL_ID);
self.local_id_to_index.contains_key(&n)
}
}
impl<'tcx, O: DataFlowOperator> pprust::PpAnn for DataFlowContext<'tcx, O> {
fn nested(&self, state: &mut pprust::State<'_>, nested: pprust::Nested) {
pprust::PpAnn::nested(self.tcx.hir(), state, nested)
}
fn pre(&self,
ps: &mut pprust::State<'_>,
node: pprust::AnnNode<'_>) {
let id = match node {
pprust::AnnNode::Name(_) => return,
pprust::AnnNode::Expr(expr) => expr.hir_id.local_id,
pprust::AnnNode::Block(blk) => blk.hir_id.local_id,
pprust::AnnNode::Item(_) |
pprust::AnnNode::SubItem(_) => return,
pprust::AnnNode::Pat(pat) => pat.hir_id.local_id,
pprust::AnnNode::Arm(arm) => arm.hir_id.local_id,
};
if !self.has_bitset_for_local_id(id) {
return;
}
assert!(self.bits_per_id > 0);
let indices = get_cfg_indices(id, &self.local_id_to_index);
for &cfgidx in indices {
let (start, end) = self.compute_id_range(cfgidx);
let on_entry = &self.on_entry[start.. end];
let entry_str = bits_to_string(on_entry);
let gens = &self.gens[start.. end];
let gens_str = if gens.iter().any(|&u| u != 0) {
format!(" gen: {}", bits_to_string(gens))
} else {
String::new()
};
let action_kills = &self.action_kills[start .. end];
let action_kills_str = if action_kills.iter().any(|&u| u != 0) {
format!(" action_kill: {}", bits_to_string(action_kills))
} else {
String::new()
};
let scope_kills = &self.scope_kills[start .. end];
let scope_kills_str = if scope_kills.iter().any(|&u| u != 0) {
format!(" scope_kill: {}", bits_to_string(scope_kills))
} else {
String::new()
};
ps.synth_comment(
format!("id {}: {}{}{}{}", id.as_usize(), entry_str,
gens_str, action_kills_str, scope_kills_str));
ps.s.space();
}
}
}
fn build_local_id_to_index(body: Option<&hir::Body>,
cfg: &cfg::CFG)
-> FxHashMap<hir::ItemLocalId, Vec<CFGIndex>> {
let mut index = FxHashMap::default();
// FIXME(#15020) Would it be better to fold formals from decl
// into cfg itself? i.e., introduce a fn-based flow-graph in
// addition to the current block-based flow-graph, rather than
// have to put traversals like this here?
if let Some(body) = body {
add_entries_from_fn_body(&mut index, body, cfg.entry);
}
cfg.graph.each_node(|node_idx, node| {
if let cfg::CFGNodeData::AST(id) = node.data {
index.entry(id).or_default().push(node_idx);
}
true
});
return index;
/// Adds mappings from the ast nodes for the formal bindings to
/// the entry-node in the graph.
fn add_entries_from_fn_body(index: &mut FxHashMap<hir::ItemLocalId, Vec<CFGIndex>>,
body: &hir::Body,
entry: CFGIndex) {
use rustc::hir::intravisit::Visitor;
struct Formals<'a> {
entry: CFGIndex,
index: &'a mut FxHashMap<hir::ItemLocalId, Vec<CFGIndex>>,
}
let mut formals = Formals { entry: entry, index: index };
for param in &body.params {
formals.visit_pat(&param.pat);
}
impl<'a, 'v> Visitor<'v> for Formals<'a> {
fn nested_visit_map<'this>(&'this mut self) -> intravisit::NestedVisitorMap<'this, 'v> {
intravisit::NestedVisitorMap::None
}
fn visit_pat(&mut self, p: &hir::Pat) {
self.index.entry(p.hir_id.local_id).or_default().push(self.entry);
intravisit::walk_pat(self, p)
}
}
}
}
/// Flag used by `add_kill` to indicate whether the provided kill
/// takes effect only when control flows directly through the node in
/// question, or if the kill's effect is associated with any
/// control-flow directly through or indirectly over the node.
#[derive(Copy, Clone, PartialEq, Debug)]
pub enum KillFrom {
/// A `ScopeEnd` kill is one that takes effect when any control
/// flow goes over the node. A kill associated with the end of the
/// scope of a variable declaration `let x;` is an example of a
/// `ScopeEnd` kill.
ScopeEnd,
/// An `Execution` kill is one that takes effect only when control
/// flow goes through the node to completion. A kill associated
/// with an assignment statement `x = expr;` is an example of an
/// `Execution` kill.
Execution,
}
impl<'tcx, O: DataFlowOperator> DataFlowContext<'tcx, O> {
pub fn new(
tcx: TyCtxt<'tcx>,
analysis_name: &'static str,
body: Option<&hir::Body>,
cfg: &cfg::CFG,
oper: O,
bits_per_id: usize,
) -> DataFlowContext<'tcx, O> {
let usize_bits = mem::size_of::<usize>() * 8;
let words_per_id = (bits_per_id + usize_bits - 1) / usize_bits;
let num_nodes = cfg.graph.all_nodes().len();
debug!("DataFlowContext::new(analysis_name: {}, \
bits_per_id={}, words_per_id={}) \
num_nodes: {}",
analysis_name, bits_per_id, words_per_id,
num_nodes);
let entry = if oper.initial_value() { usize::MAX } else {0};
let zeroes = vec![0; num_nodes * words_per_id];
let gens = zeroes.clone();
let kills1 = zeroes.clone();
let kills2 = zeroes;
let on_entry = vec![entry; num_nodes * words_per_id];
let local_id_to_index = build_local_id_to_index(body, cfg);
DataFlowContext {
tcx,
analysis_name,
words_per_id,
local_id_to_index,
bits_per_id,
oper,
gens,
action_kills: kills1,
scope_kills: kills2,
on_entry,
}
}
pub fn add_gen(&mut self, id: hir::ItemLocalId, bit: usize) {
//! Indicates that `id` generates `bit`
debug!("{} add_gen(id={:?}, bit={})",
self.analysis_name, id, bit);
assert!(self.local_id_to_index.contains_key(&id));
assert!(self.bits_per_id > 0);
let indices = get_cfg_indices(id, &self.local_id_to_index);
for &cfgidx in indices {
let (start, end) = self.compute_id_range(cfgidx);
let gens = &mut self.gens[start.. end];
set_bit(gens, bit);
}
}
pub fn add_kill(&mut self, kind: KillFrom, id: hir::ItemLocalId, bit: usize) {
//! Indicates that `id` kills `bit`
debug!("{} add_kill(id={:?}, bit={})",
self.analysis_name, id, bit);
assert!(self.local_id_to_index.contains_key(&id));
assert!(self.bits_per_id > 0);
let indices = get_cfg_indices(id, &self.local_id_to_index);
for &cfgidx in indices {
let (start, end) = self.compute_id_range(cfgidx);
let kills = match kind {
KillFrom::Execution => &mut self.action_kills[start.. end],
KillFrom::ScopeEnd => &mut self.scope_kills[start.. end],
};
set_bit(kills, bit);
}
}
fn apply_gen_kill(&self, cfgidx: CFGIndex, bits: &mut [usize]) {
//! Applies the gen and kill sets for `cfgidx` to `bits`
debug!("{} apply_gen_kill(cfgidx={:?}, bits={}) [before]",
self.analysis_name, cfgidx, mut_bits_to_string(bits));
assert!(self.bits_per_id > 0);
let (start, end) = self.compute_id_range(cfgidx);
let gens = &self.gens[start.. end];
bitwise(bits, gens, &Union);
let kills = &self.action_kills[start.. end];
bitwise(bits, kills, &Subtract);
let kills = &self.scope_kills[start.. end];
bitwise(bits, kills, &Subtract);
debug!("{} apply_gen_kill(cfgidx={:?}, bits={}) [after]",
self.analysis_name, cfgidx, mut_bits_to_string(bits));
}
fn compute_id_range(&self, cfgidx: CFGIndex) -> (usize, usize) {
let n = cfgidx.node_id();
let start = n * self.words_per_id;
let end = start + self.words_per_id;
assert!(start < self.gens.len());
assert!(end <= self.gens.len());
assert!(self.gens.len() == self.action_kills.len());
assert!(self.gens.len() == self.scope_kills.len());
assert!(self.gens.len() == self.on_entry.len());
(start, end)
}
pub fn each_bit_on_entry<F>(&self, id: hir::ItemLocalId, mut f: F) -> bool where
F: FnMut(usize) -> bool,
{
//! Iterates through each bit that is set on entry to `id`.
//! Only useful after `propagate()` has been called.
if !self.has_bitset_for_local_id(id) {
return true;
}
let indices = get_cfg_indices(id, &self.local_id_to_index);
for &cfgidx in indices {
if !self.each_bit_for_node(EntryOrExit::Entry, cfgidx, |i| f(i)) {
return false;
}
}
return true;
}
pub fn each_bit_for_node<F>(&self, e: EntryOrExit, cfgidx: CFGIndex, f: F) -> bool where
F: FnMut(usize) -> bool,
{
//! Iterates through each bit that is set on entry/exit to `cfgidx`.
//! Only useful after `propagate()` has been called.
if self.bits_per_id == 0 {
// Skip the surprisingly common degenerate case. (Note
// compute_id_range requires self.words_per_id > 0.)
return true;
}
let (start, end) = self.compute_id_range(cfgidx);
let on_entry = &self.on_entry[start.. end];
let temp_bits;
let slice = match e {
EntryOrExit::Entry => on_entry,
EntryOrExit::Exit => {
let mut t = on_entry.to_vec();
self.apply_gen_kill(cfgidx, &mut t);
temp_bits = t;
&temp_bits[..]
}
};
debug!("{} each_bit_for_node({:?}, cfgidx={:?}) bits={}",
self.analysis_name, e, cfgidx, bits_to_string(slice));
self.each_bit(slice, f)
}
pub fn each_gen_bit<F>(&self, id: hir::ItemLocalId, mut f: F) -> bool where
F: FnMut(usize) -> bool,
{
//! Iterates through each bit in the gen set for `id`.
if !self.has_bitset_for_local_id(id) {
return true;
}
if self.bits_per_id == 0 {
// Skip the surprisingly common degenerate case. (Note
// compute_id_range requires self.words_per_id > 0.)
return true;
}
let indices = get_cfg_indices(id, &self.local_id_to_index);
for &cfgidx in indices {
let (start, end) = self.compute_id_range(cfgidx);
let gens = &self.gens[start.. end];
debug!("{} each_gen_bit(id={:?}, gens={})",
self.analysis_name, id, bits_to_string(gens));
if !self.each_bit(gens, |i| f(i)) {
return false;
}
}
return true;
}
fn each_bit<F>(&self, words: &[usize], mut f: F) -> bool where
F: FnMut(usize) -> bool,
{
//! Helper for iterating over the bits in a bit set.
//! Returns false on the first call to `f` that returns false;
//! if all calls to `f` return true, then returns true.
let usize_bits = mem::size_of::<usize>() * 8;
for (word_index, &word) in words.iter().enumerate() {
if word != 0 {
let base_index = word_index * usize_bits;
for offset in 0..usize_bits {
let bit = 1 << offset;
if (word & bit) != 0 {
// N.B., we round up the total number of bits
// that we store in any given bit set so that
// it is an even multiple of usize::BITS. This
// means that there may be some stray bits at
// the end that do not correspond to any
// actual value. So before we callback, check
// whether the bit_index is greater than the
// actual value the user specified and stop
// iterating if so.
let bit_index = base_index + offset as usize;
if bit_index >= self.bits_per_id {
return true;
} else if !f(bit_index) {
return false;
}
}
}
}
}
return true;
}
pub fn add_kills_from_flow_exits(&mut self, cfg: &cfg::CFG) {
//! Whenever you have a `break` or `continue` statement, flow
//! exits through any number of enclosing scopes on its way to
//! the new destination. This function infers the kill bits of
//! those control operators based on the kill bits associated
//! with those scopes.
//!
//! This is usually called (if it is called at all), after
//! all add_gen and add_kill calls, but before propagate.
debug!("{} add_kills_from_flow_exits", self.analysis_name);
if self.bits_per_id == 0 {
// Skip the surprisingly common degenerate case. (Note
// compute_id_range requires self.words_per_id > 0.)
return;
}
cfg.graph.each_edge(|_edge_index, edge| {
let flow_exit = edge.source();
let (start, end) = self.compute_id_range(flow_exit);
let mut orig_kills = self.scope_kills[start.. end].to_vec();
let mut changed = false;
for &id in &edge.data.exiting_scopes {
let opt_cfg_idx = self.local_id_to_index.get(&id);
match opt_cfg_idx {
Some(indices) => {
for &cfg_idx in indices {
let (start, end) = self.compute_id_range(cfg_idx);
let kills = &self.scope_kills[start.. end];
if bitwise(&mut orig_kills, kills, &Union) {
debug!("scope exits: scope id={:?} \
(node={:?} of {:?}) added killset: {}",
id, cfg_idx, indices,
bits_to_string(kills));
changed = true;
}
}
}
None => {
debug!("{} add_kills_from_flow_exits flow_exit={:?} \
no cfg_idx for exiting_scope={:?}",
self.analysis_name, flow_exit, id);
}
}
}
if changed {
let bits = &mut self.scope_kills[start.. end];
debug!("{} add_kills_from_flow_exits flow_exit={:?} bits={} [before]",
self.analysis_name, flow_exit, mut_bits_to_string(bits));
bits.copy_from_slice(&orig_kills[..]);
debug!("{} add_kills_from_flow_exits flow_exit={:?} bits={} [after]",
self.analysis_name, flow_exit, mut_bits_to_string(bits));
}
true
});
}
}
// N.B. `Clone + 'static` only needed for pretty printing.
impl<'tcx, O: DataFlowOperator + Clone + 'static> DataFlowContext<'tcx, O> {
pub fn propagate(&mut self, cfg: &cfg::CFG, body: &hir::Body) {
//! Performs the data flow analysis.
if self.bits_per_id == 0 {
// Optimize the surprisingly common degenerate case.
return;
}
{
let words_per_id = self.words_per_id;
let mut propcx = PropagationContext {
dfcx: &mut *self,
changed: true
};
let nodes_po = cfg.graph.nodes_in_postorder(OUTGOING, cfg.entry);
let mut temp = vec![0; words_per_id];
let mut num_passes = 0;
while propcx.changed {
num_passes += 1;
propcx.changed = false;
propcx.reset(&mut temp);
propcx.walk_cfg(cfg, &nodes_po, &mut temp);
}
debug!("finished in {} iterations", num_passes);
}
debug!("Dataflow result for {}:", self.analysis_name);
debug!("{}", pprust::to_string(self, |s| {
s.cbox(pprust::INDENT_UNIT);
s.ibox(0);
s.print_expr(&body.value)
}));
}
}
impl<O: DataFlowOperator> PropagationContext<'_, 'tcx, O> {
fn walk_cfg(&mut self,
cfg: &cfg::CFG,
nodes_po: &[CFGIndex],
in_out: &mut [usize]) {
debug!("DataFlowContext::walk_cfg(in_out={}) {}",
bits_to_string(in_out), self.dfcx.analysis_name);
assert!(self.dfcx.bits_per_id > 0);
// Iterate over nodes in reverse post-order.
for &node_index in nodes_po.iter().rev() {
let node = cfg.graph.node(node_index);
debug!("DataFlowContext::walk_cfg idx={:?} id={:?} begin in_out={}",
node_index, node.data.id(), bits_to_string(in_out));
let (start, end) = self.dfcx.compute_id_range(node_index);
// Initialize local bitvector with state on-entry.
in_out.copy_from_slice(&self.dfcx.on_entry[start.. end]);
// Compute state on-exit by applying transfer function to
// state on-entry.
self.dfcx.apply_gen_kill(node_index, in_out);
// Propagate state on-exit from node into its successors.
self.propagate_bits_into_graph_successors_of(in_out, cfg, node_index);
}
}
fn reset(&mut self, bits: &mut [usize]) {
let e = if self.dfcx.oper.initial_value() {usize::MAX} else {0};
for b in bits {
*b = e;
}
}
fn propagate_bits_into_graph_successors_of(&mut self,
pred_bits: &[usize],
cfg: &cfg::CFG,
cfgidx: CFGIndex) {
for (_, edge) in cfg.graph.outgoing_edges(cfgidx) {
self.propagate_bits_into_entry_set_for(pred_bits, edge);
}
}
fn propagate_bits_into_entry_set_for(&mut self,
pred_bits: &[usize],
edge: &cfg::CFGEdge) {
let source = edge.source();
let cfgidx = edge.target();
debug!("{} propagate_bits_into_entry_set_for(pred_bits={}, {:?} to {:?})",
self.dfcx.analysis_name, bits_to_string(pred_bits), source, cfgidx);
assert!(self.dfcx.bits_per_id > 0);
let (start, end) = self.dfcx.compute_id_range(cfgidx);
let changed = {
// (scoping mutable borrow of self.dfcx.on_entry)
let on_entry = &mut self.dfcx.on_entry[start.. end];
bitwise(on_entry, pred_bits, &self.dfcx.oper)
};
if changed {
debug!("{} changed entry set for {:?} to {}",
self.dfcx.analysis_name, cfgidx,
bits_to_string(&self.dfcx.on_entry[start.. end]));
self.changed = true;
}
}
}
fn mut_bits_to_string(words: &mut [usize]) -> String {
bits_to_string(words)
}
fn bits_to_string(words: &[usize]) -> String {
let mut result = String::new();
let mut sep = '[';
// Note: this is a little endian printout of bytes.
for &word in words {
let mut v = word;
for _ in 0..mem::size_of::<usize>() {
result.push(sep);
result.push_str(&format!("{:02x}", v & 0xFF));
v >>= 8;
sep = '-';
}
}
result.push(']');
return result
}
#[inline]
fn bitwise<Op: BitwiseOperator>(out_vec: &mut [usize],
in_vec: &[usize],
op: &Op) -> bool {
assert_eq!(out_vec.len(), in_vec.len());
let mut changed = false;
for (out_elt, in_elt) in out_vec.iter_mut().zip(in_vec) {
let old_val = *out_elt;
let new_val = op.join(old_val, *in_elt);
*out_elt = new_val;
changed |= old_val != new_val;
}
changed
}
fn set_bit(words: &mut [usize], bit: usize) -> bool {
debug!("set_bit: words={} bit={}",
mut_bits_to_string(words), bit_str(bit));
let usize_bits = mem::size_of::<usize>() * 8;
let word = bit / usize_bits;
let bit_in_word = bit % usize_bits;
let bit_mask = 1 << bit_in_word;
debug!("word={} bit_in_word={} bit_mask={}", word, bit_in_word, bit_mask);
let oldv = words[word];
let newv = oldv | bit_mask;
words[word] = newv;
oldv != newv
}
fn bit_str(bit: usize) -> String {
let byte = bit >> 3;
let lobits = 1 << (bit & 0b111);
format!("[{}:{}-{:02x}]", bit, byte, lobits)
}
struct Union;
impl BitwiseOperator for Union {
fn join(&self, a: usize, b: usize) -> usize { a | b }
}
struct Subtract;
impl BitwiseOperator for Subtract {
fn join(&self, a: usize, b: usize) -> usize { a & !b }
}

View file

@ -1,145 +0,0 @@
//! This module provides linkage between rustc::middle::graph and
//! libgraphviz traits, specialized to attaching borrowck analysis
//! data to rendered labels.
pub use Variant::*;
pub(crate) use crate::cfg::graphviz::{Node, Edge};
use crate::cfg::graphviz as cfg_dot;
use crate::cfg::CFGIndex;
use crate::borrowck::{self, BorrowckCtxt, LoanPath};
use crate::dataflow::{DataFlowOperator, DataFlowContext, EntryOrExit};
use log::debug;
use std::rc::Rc;
#[derive(Debug, Copy, Clone)]
pub enum Variant {
Loans,
Moves,
Assigns,
}
impl Variant {
pub fn short_name(&self) -> &'static str {
match *self {
Loans => "loans",
Moves => "moves",
Assigns => "assigns",
}
}
}
pub struct DataflowLabeller<'a, 'tcx> {
pub inner: cfg_dot::LabelledCFG<'a, 'tcx>,
pub variants: Vec<Variant>,
pub borrowck_ctxt: &'a BorrowckCtxt<'a, 'tcx>,
pub analysis_data: &'a borrowck::AnalysisData<'tcx>,
}
impl<'a, 'tcx> DataflowLabeller<'a, 'tcx> {
fn dataflow_for(&self, e: EntryOrExit, n: &Node<'a>) -> String {
let id = n.1.data.id();
debug!("dataflow_for({:?}, id={:?}) {:?}", e, id, self.variants);
let mut sets = String::new();
let mut seen_one = false;
for &variant in &self.variants {
if seen_one { sets.push_str(" "); } else { seen_one = true; }
sets.push_str(variant.short_name());
sets.push_str(": ");
sets.push_str(&self.dataflow_for_variant(e, n, variant));
}
sets
}
fn dataflow_for_variant(&self, e: EntryOrExit, n: &Node<'_>, v: Variant) -> String {
let cfgidx = n.0;
match v {
Loans => self.dataflow_loans_for(e, cfgidx),
Moves => self.dataflow_moves_for(e, cfgidx),
Assigns => self.dataflow_assigns_for(e, cfgidx),
}
}
fn build_set<O: DataFlowOperator, F>(
&self,
e: EntryOrExit,
cfgidx: CFGIndex,
dfcx: &DataFlowContext<'tcx, O>,
mut to_lp: F,
) -> String
where
F: FnMut(usize) -> Rc<LoanPath<'tcx>>,
{
let mut saw_some = false;
let mut set = "{".to_string();
dfcx.each_bit_for_node(e, cfgidx, |index| {
let lp = to_lp(index);
if saw_some {
set.push_str(", ");
}
let loan_str = self.borrowck_ctxt.loan_path_to_string(&lp);
set.push_str(&loan_str);
saw_some = true;
true
});
set.push_str("}");
set
}
fn dataflow_loans_for(&self, e: EntryOrExit, cfgidx: CFGIndex) -> String {
let dfcx = &self.analysis_data.loans;
let loan_index_to_path = |loan_index| {
let all_loans = &self.analysis_data.all_loans;
let l: &borrowck::Loan<'_> = &all_loans[loan_index];
l.loan_path()
};
self.build_set(e, cfgidx, dfcx, loan_index_to_path)
}
fn dataflow_moves_for(&self, e: EntryOrExit, cfgidx: CFGIndex) -> String {
let dfcx = &self.analysis_data.move_data.dfcx_moves;
let move_index_to_path = |move_index| {
let move_data = &self.analysis_data.move_data.move_data;
let moves = move_data.moves.borrow();
let the_move: &borrowck::move_data::Move = &(*moves)[move_index];
move_data.path_loan_path(the_move.path)
};
self.build_set(e, cfgidx, dfcx, move_index_to_path)
}
fn dataflow_assigns_for(&self, e: EntryOrExit, cfgidx: CFGIndex) -> String {
let dfcx = &self.analysis_data.move_data.dfcx_assign;
let assign_index_to_path = |assign_index| {
let move_data = &self.analysis_data.move_data.move_data;
let assignments = move_data.var_assignments.borrow();
let assignment: &borrowck::move_data::Assignment = &(*assignments)[assign_index];
move_data.path_loan_path(assignment.path)
};
self.build_set(e, cfgidx, dfcx, assign_index_to_path)
}
}
impl<'a, 'tcx> dot::Labeller<'a> for DataflowLabeller<'a, 'tcx> {
type Node = Node<'a>;
type Edge = Edge<'a>;
fn graph_id(&'a self) -> dot::Id<'a> { self.inner.graph_id() }
fn node_id(&'a self, n: &Node<'a>) -> dot::Id<'a> { self.inner.node_id(n) }
fn node_label(&'a self, n: &Node<'a>) -> dot::LabelText<'a> {
let prefix = self.dataflow_for(EntryOrExit::Entry, n);
let suffix = self.dataflow_for(EntryOrExit::Exit, n);
let inner_label = self.inner.node_label(n);
inner_label
.prefix_line(dot::LabelText::LabelStr(prefix.into()))
.suffix_line(dot::LabelText::LabelStr(suffix.into()))
}
fn edge_label(&'a self, e: &Edge<'a>) -> dot::LabelText<'a> { self.inner.edge_label(e) }
}
impl<'a, 'tcx> dot::GraphWalk<'a> for DataflowLabeller<'a, 'tcx> {
type Node = Node<'a>;
type Edge = Edge<'a>;
fn nodes(&'a self) -> dot::Nodes<'a, Node<'a>> { self.inner.nodes() }
fn edges(&'a self) -> dot::Edges<'a, Edge<'a>> { self.inner.edges() }
fn source(&'a self, edge: &Edge<'a>) -> Node<'a> { self.inner.source(edge) }
fn target(&'a self, edge: &Edge<'a>) -> Node<'a> { self.inner.target(edge) }
}

View file

@ -1,23 +0,0 @@
#![doc(html_root_url = "https://doc.rust-lang.org/nightly/")]
#![allow(non_camel_case_types)]
#![feature(in_band_lifetimes)]
#![feature(nll)]
#![recursion_limit="256"]
#[macro_use]
extern crate rustc;
pub use borrowck::check_crate;
pub use borrowck::build_borrowck_dataflow_data_for_fn;
mod borrowck;
pub mod graphviz;
mod dataflow;
pub mod cfg;
pub use borrowck::provide;

View file

@ -13,10 +13,9 @@ crate-type = ["dylib"]
graphviz = { path = "../libgraphviz" }
lazy_static = "1.0"
log = "0.4"
env_logger = { version = "0.6", default-features = false }
env_logger = { version = "0.7", default-features = false }
rustc = { path = "../librustc" }
rustc_target = { path = "../librustc_target" }
rustc_ast_borrowck = { path = "../librustc_ast_borrowck" }
rustc_data_structures = { path = "../librustc_data_structures" }
errors = { path = "../librustc_errors", package = "rustc_errors" }
rustc_metadata = { path = "../librustc_metadata" }

View file

@ -2,7 +2,6 @@
use rustc::hir;
use rustc::hir::map as hir_map;
use rustc::hir::map::blocks;
use rustc::hir::print as pprust_hir;
use rustc::hir::def_id::LOCAL_CRATE;
use rustc::session::Session;
@ -10,9 +9,6 @@ use rustc::session::config::Input;
use rustc::ty::{self, TyCtxt};
use rustc::util::common::ErrorReported;
use rustc_interface::util::ReplaceBodyWithLoop;
use rustc_ast_borrowck as borrowck;
use rustc_ast_borrowck::graphviz as borrowck_dot;
use rustc_ast_borrowck::cfg::{self, graphviz::LabelledCFG};
use rustc_mir::util::{write_mir_pretty, write_mir_graphviz};
use syntax::ast;
@ -20,11 +16,9 @@ use syntax::mut_visit::MutVisitor;
use syntax::print::{pprust};
use syntax_pos::FileName;
use graphviz as dot;
use std::cell::Cell;
use std::fs::File;
use std::io::{self, Write};
use std::io::Write;
use std::option;
use std::path::Path;
use std::str::FromStr;
@ -48,21 +42,11 @@ pub enum PpSourceMode {
PpmTyped,
}
#[derive(Copy, Clone, PartialEq, Debug)]
pub enum PpFlowGraphMode {
Default,
/// Drops the labels from the edges in the flowgraph output. This
/// is mostly for use in the -Z unpretty flowgraph run-make tests,
/// since the labels are largely uninteresting in those cases and
/// have become a pain to maintain.
UnlabelledEdges,
}
#[derive(Copy, Clone, PartialEq, Debug)]
pub enum PpMode {
PpmSource(PpSourceMode),
PpmHir(PpSourceMode),
PpmHirTree(PpSourceMode),
PpmFlowGraph(PpFlowGraphMode),
PpmMir,
PpmMirCFG,
}
@ -80,15 +64,14 @@ impl PpMode {
PpmHir(_) |
PpmHirTree(_) |
PpmMir |
PpmMirCFG |
PpmFlowGraph(_) => true,
PpmMirCFG => true,
PpmSource(PpmTyped) => panic!("invalid state"),
}
}
pub fn needs_analysis(&self) -> bool {
match *self {
PpmMir | PpmMirCFG | PpmFlowGraph(_) => true,
PpmMir | PpmMirCFG => true,
_ => false,
}
}
@ -114,13 +97,11 @@ pub fn parse_pretty(sess: &Session,
("hir-tree", true) => PpmHirTree(PpmNormal),
("mir", true) => PpmMir,
("mir-cfg", true) => PpmMirCFG,
("flowgraph", true) => PpmFlowGraph(PpFlowGraphMode::Default),
("flowgraph,unlabelled", true) => PpmFlowGraph(PpFlowGraphMode::UnlabelledEdges),
_ => {
if extended {
sess.fatal(&format!("argument to `unpretty` must be one of `normal`, \
`expanded`, `flowgraph[,unlabelled]=<nodeid>`, \
`identified`, `expanded,identified`, `everybody_loops`, \
`expanded`, `identified`, `expanded,identified`, \
`expanded,hygiene`, `everybody_loops`, \
`hir`, `hir,identified`, `hir,typed`, `hir-tree`, \
`mir` or `mir-cfg`; got {}",
name));
@ -501,24 +482,6 @@ impl<'a, 'tcx> pprust_hir::PpAnn for TypedAnnotation<'a, 'tcx> {
}
}
fn gather_flowgraph_variants(sess: &Session) -> Vec<borrowck_dot::Variant> {
let print_loans = sess.opts.debugging_opts.flowgraph_print_loans;
let print_moves = sess.opts.debugging_opts.flowgraph_print_moves;
let print_assigns = sess.opts.debugging_opts.flowgraph_print_assigns;
let print_all = sess.opts.debugging_opts.flowgraph_print_all;
let mut variants = Vec::new();
if print_all || print_loans {
variants.push(borrowck_dot::Loans);
}
if print_all || print_moves {
variants.push(borrowck_dot::Moves);
}
if print_all || print_assigns {
variants.push(borrowck_dot::Assigns);
}
variants
}
#[derive(Clone, Debug)]
pub enum UserIdentifiedItem {
ItemViaNode(ast::NodeId),
@ -609,81 +572,6 @@ impl UserIdentifiedItem {
}
}
fn print_flowgraph<'tcx, W: Write>(
variants: Vec<borrowck_dot::Variant>,
tcx: TyCtxt<'tcx>,
code: blocks::Code<'tcx>,
mode: PpFlowGraphMode,
mut out: W,
) -> io::Result<()> {
let body_id = match code {
blocks::Code::Expr(expr) => {
// Find the function this expression is from.
let mut hir_id = expr.hir_id;
loop {
let node = tcx.hir().get(hir_id);
if let Some(n) = hir::map::blocks::FnLikeNode::from_node(node) {
break n.body();
}
let parent = tcx.hir().get_parent_node(hir_id);
assert_ne!(hir_id, parent);
hir_id = parent;
}
}
blocks::Code::FnLike(fn_like) => fn_like.body(),
};
let body = tcx.hir().body(body_id);
let cfg = cfg::CFG::new(tcx, &body);
let labelled_edges = mode != PpFlowGraphMode::UnlabelledEdges;
let hir_id = code.id();
// We have to disassemble the hir_id because name must be ASCII
// alphanumeric. This does not appear in the rendered graph, so it does not
// have to be user friendly.
let name = format!(
"hir_id_{}_{}",
hir_id.owner.index(),
hir_id.local_id.index(),
);
let lcfg = LabelledCFG {
tcx,
cfg: &cfg,
name,
labelled_edges,
};
match code {
_ if variants.is_empty() => {
let r = dot::render(&lcfg, &mut out);
return expand_err_details(r);
}
blocks::Code::Expr(_) => {
tcx.sess.err("--pretty flowgraph with -Z flowgraph-print annotations requires \
fn-like node id.");
return Ok(());
}
blocks::Code::FnLike(fn_like) => {
let (bccx, analysis_data) =
borrowck::build_borrowck_dataflow_data_for_fn(tcx, fn_like.body(), &cfg);
let lcfg = borrowck_dot::DataflowLabeller {
inner: lcfg,
variants,
borrowck_ctxt: &bccx,
analysis_data: &analysis_data,
};
let r = dot::render(&lcfg, &mut out);
return expand_err_details(r);
}
}
fn expand_err_details(r: io::Result<()>) -> io::Result<()> {
r.map_err(|ioerr| {
io::Error::new(io::ErrorKind::Other,
format!("graphviz::render failed: {}", ioerr))
})
}
}
pub fn visit_crate(sess: &Session, krate: &mut ast::Crate, ppm: PpMode) {
if let PpmSource(PpmEveryBodyLoops) = ppm {
ReplaceBodyWithLoop::new(sess).visit_crate(krate);
@ -872,55 +760,17 @@ fn print_with_analysis(
tcx.analysis(LOCAL_CRATE)?;
let mut print = || match ppm {
match ppm {
PpmMir | PpmMirCFG => {
if let Some(nodeid) = nodeid {
let def_id = tcx.hir().local_def_id_from_node_id(nodeid);
match ppm {
PpmMir => write_mir_pretty(tcx, Some(def_id), &mut out),
PpmMirCFG => write_mir_graphviz(tcx, Some(def_id), &mut out),
_ => unreachable!(),
}?;
} else {
match ppm {
PpmMir => write_mir_pretty(tcx, None, &mut out),
PpmMirCFG => write_mir_graphviz(tcx, None, &mut out),
_ => unreachable!(),
}?;
}
Ok(())
}
PpmFlowGraph(mode) => {
let nodeid =
nodeid.expect("`pretty flowgraph=..` needs NodeId (int) or unique path \
suffix (b::c::d)");
let hir_id = tcx.hir().node_to_hir_id(nodeid);
let node = tcx.hir().find(hir_id).unwrap_or_else(|| {
tcx.sess.fatal(&format!("`--pretty=flowgraph` couldn't find ID: {}", nodeid))
});
match blocks::Code::from_node(&tcx.hir(), hir_id) {
Some(code) => {
let variants = gather_flowgraph_variants(tcx.sess);
let out: &mut dyn Write = &mut out;
print_flowgraph(variants, tcx, code, mode, out)
}
None => {
let message = format!("`--pretty=flowgraph` needs block, fn, or method; \
got {:?}",
node);
let hir_id = tcx.hir().node_to_hir_id(nodeid);
tcx.sess.span_fatal(tcx.hir().span(hir_id), &message)
}
let def_id = nodeid.map(|nid| tcx.hir().local_def_id_from_node_id(nid));
match ppm {
PpmMir => write_mir_pretty(tcx, def_id, &mut out),
PpmMirCFG => write_mir_graphviz(tcx, def_id, &mut out),
_ => unreachable!(),
}
}
_ => unreachable!(),
};
print().unwrap();
}.unwrap();
write_output(out, ofile);

View file

@ -18,7 +18,6 @@ syntax_ext = { path = "../libsyntax_ext" }
syntax_pos = { path = "../libsyntax_pos" }
rustc_serialize = { path = "../libserialize", package = "serialize" }
rustc = { path = "../librustc" }
rustc_ast_borrowck = { path = "../librustc_ast_borrowck" }
rustc_incremental = { path = "../librustc_incremental" }
rustc_traits = { path = "../librustc_traits" }
rustc_data_structures = { path = "../librustc_data_structures" }

View file

@ -17,7 +17,6 @@ use rustc::util::common::{time, ErrorReported};
use rustc::session::Session;
use rustc::session::config::{self, CrateType, Input, OutputFilenames, OutputType};
use rustc::session::search_paths::PathKind;
use rustc_ast_borrowck as borrowck;
use rustc_codegen_ssa::back::link::emit_metadata;
use rustc_codegen_utils::codegen_backend::CodegenBackend;
use rustc_codegen_utils::link::filename_for_metadata;
@ -769,7 +768,6 @@ pub fn default_provide(providers: &mut ty::query::Providers<'_>) {
proc_macro_decls::provide(providers);
plugin::build::provide(providers);
hir::provide(providers);
borrowck::provide(providers);
mir::provide(providers);
reachable::provide(providers);
resolve_lifetime::provide(providers);
@ -937,12 +935,6 @@ fn analysis(tcx: TyCtxt<'_>, cnum: CrateNum) -> Result<()> {
});
});
time(sess, "borrow checking", || {
if tcx.use_ast_borrowck() {
borrowck::check_crate(tcx);
}
});
time(sess, "MIR borrow checking", || {
tcx.par_body_owners(|def_id| tcx.ensure().mir_borrowck(def_id));
});

View file

@ -442,8 +442,8 @@ pub fn rustc_queries(input: TokenStream) -> TokenStream {
.map(|c| c.is_green())
.unwrap_or(false));
let key = RecoverKey::recover(tcx.global_tcx(), self).unwrap();
if queries::#name::cache_on_disk(tcx.global_tcx(), key, None) {
let key = RecoverKey::recover(tcx, self).unwrap();
if queries::#name::cache_on_disk(tcx, key, None) {
let _ = tcx.#name(key);
}
}

View file

@ -621,7 +621,7 @@ impl<'cx, 'tcx> DataflowResultsConsumer<'cx, 'tcx> for MirBorrowckCtxt<'cx, 'tcx
target: _,
unwind: _,
} => {
let gcx = self.infcx.tcx.global_tcx();
let tcx = self.infcx.tcx;
// Compute the type with accurate region information.
let drop_place_ty = drop_place.ty(self.body, self.infcx.tcx);
@ -629,10 +629,10 @@ impl<'cx, 'tcx> DataflowResultsConsumer<'cx, 'tcx> for MirBorrowckCtxt<'cx, 'tcx
// Erase the regions.
let drop_place_ty = self.infcx.tcx.erase_regions(&drop_place_ty).ty;
// "Lift" into the gcx -- once regions are erased, this type should be in the
// "Lift" into the tcx -- once regions are erased, this type should be in the
// global arenas; this "lift" operation basically just asserts that is true, but
// that is useful later.
gcx.lift_to_global(&drop_place_ty).unwrap();
tcx.lift(&drop_place_ty).unwrap();
debug!("visit_terminator_drop \
loc: {:?} term: {:?} drop_place: {:?} drop_place_ty: {:?} span: {:?}",
@ -1932,48 +1932,26 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
}
}
Reservation(wk @ WriteKind::Move)
| Write(wk @ WriteKind::Move)
| Reservation(wk @ WriteKind::StorageDeadOrDrop)
| Reservation(wk @ WriteKind::MutableBorrow(BorrowKind::Shared))
| Reservation(wk @ WriteKind::MutableBorrow(BorrowKind::Shallow))
| Write(wk @ WriteKind::StorageDeadOrDrop)
| Write(wk @ WriteKind::MutableBorrow(BorrowKind::Shared))
| Write(wk @ WriteKind::MutableBorrow(BorrowKind::Shallow)) => {
if let (Err(place_err), true) = (
Reservation(WriteKind::Move)
| Write(WriteKind::Move)
| Reservation(WriteKind::StorageDeadOrDrop)
| Reservation(WriteKind::MutableBorrow(BorrowKind::Shared))
| Reservation(WriteKind::MutableBorrow(BorrowKind::Shallow))
| Write(WriteKind::StorageDeadOrDrop)
| Write(WriteKind::MutableBorrow(BorrowKind::Shared))
| Write(WriteKind::MutableBorrow(BorrowKind::Shallow)) => {
if let (Err(_), true) = (
self.is_mutable(place.as_ref(), is_local_mutation_allowed),
self.errors_buffer.is_empty()
) {
if self.infcx.tcx.migrate_borrowck() {
// rust-lang/rust#46908: In pure NLL mode this
// code path should be unreachable (and thus
// we signal an ICE in the else branch
// here). But we can legitimately get here
// under borrowck=migrate mode, so instead of
// ICE'ing we instead report a legitimate
// error (which will then be downgraded to a
// warning by the migrate machinery).
error_access = match wk {
WriteKind::MutableBorrow(_) => AccessKind::MutableBorrow,
WriteKind::Move => AccessKind::Move,
WriteKind::StorageDeadOrDrop |
WriteKind::Mutate => AccessKind::Mutate,
};
self.report_mutability_error(
place,
span,
place_err,
error_access,
location,
);
} else {
span_bug!(
span,
"Accessing `{:?}` with the kind `{:?}` shouldn't be possible",
place,
kind,
);
}
// rust-lang/rust#46908: In pure NLL mode this code path should
// be unreachable (and thus we signal an ICE in the else branch here).
span_bug!(
span,
"Accessing `{:?}` with the kind `{:?}` shouldn't be possible",
place,
kind,
);
}
return false;
}

View file

@ -18,7 +18,6 @@ use rustc_errors::Applicability;
pub(super) enum AccessKind {
MutableBorrow,
Mutate,
Move,
}
impl<'a, 'tcx> MirBorrowckCtxt<'a, 'tcx> {
@ -124,7 +123,6 @@ impl<'a, 'tcx> MirBorrowckCtxt<'a, 'tcx> {
if let Some(desc) = access_place_desc {
item_msg = format!("`{}`", desc);
reason = match error_access {
AccessKind::Move |
AccessKind::Mutate => format!(" which is behind {}", pointer_type),
AccessKind::MutableBorrow => {
format!(", as it is behind {}", pointer_type)
@ -194,12 +192,6 @@ impl<'a, 'tcx> MirBorrowckCtxt<'a, 'tcx> {
let acted_on;
let span = match error_access {
AccessKind::Move => {
err = self.cannot_move_out_of(span, &(item_msg + &reason));
err.span_label(span, "cannot move");
err.buffer(&mut self.errors_buffer);
return;
}
AccessKind::Mutate => {
err = self.cannot_assign(span, &(item_msg + &reason));
act = "assign";

View file

@ -1894,9 +1894,8 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
// Erase the regions from `ty` to get a global type. The
// `Sized` bound in no way depends on precise regions, so this
// shouldn't affect `is_sized`.
let gcx = tcx.global_tcx();
let erased_ty = tcx.erase_regions(&ty);
if !erased_ty.is_sized(gcx.at(span), self.param_env) {
if !erased_ty.is_sized(tcx.at(span), self.param_env) {
// in current MIR construction, all non-control-flow rvalue
// expressions evaluate through `as_temp` or `into` a return
// slot or local, so to find all unsized rvalues it is enough

View file

@ -521,9 +521,8 @@ impl<'cx, 'tcx> UniversalRegionsBuilder<'cx, 'tcx> {
defining_ty: DefiningTy<'tcx>,
) -> UniversalRegionIndices<'tcx> {
let tcx = self.infcx.tcx;
let gcx = tcx.global_tcx();
let closure_base_def_id = tcx.closure_base_def_id(self.mir_def_id);
let identity_substs = InternalSubsts::identity_for_item(gcx, closure_base_def_id);
let identity_substs = InternalSubsts::identity_for_item(tcx, closure_base_def_id);
let fr_substs = match defining_ty {
DefiningTy::Closure(_, ClosureSubsts { ref substs })
| DefiningTy::Generator(_, GeneratorSubsts { ref substs }, _) => {
@ -542,7 +541,7 @@ impl<'cx, 'tcx> UniversalRegionsBuilder<'cx, 'tcx> {
DefiningTy::FnDef(_, substs) | DefiningTy::Const(_, substs) => substs,
};
let global_mapping = iter::once((gcx.lifetimes.re_static, fr_static));
let global_mapping = iter::once((tcx.lifetimes.re_static, fr_static));
let subst_mapping = identity_substs
.regions()
.zip(fr_substs.regions().map(|r| r.to_region_vid()));

View file

@ -148,9 +148,8 @@ pub(crate) fn on_all_drop_children_bits<'tcx, F>(
let ty = place.ty(body, tcx).ty;
debug!("on_all_drop_children_bits({:?}, {:?} : {:?})", path, place, ty);
let gcx = tcx.global_tcx();
let erased_ty = tcx.erase_regions(&ty);
if erased_ty.needs_drop(gcx, ctxt.param_env) {
if erased_ty.needs_drop(tcx, ctxt.param_env) {
each_child(child);
} else {
debug!("on_all_drop_children_bits - skipping")

View file

@ -543,9 +543,9 @@ fn make_mirror_unadjusted<'a, 'tcx>(
// Now comes the rote stuff:
hir::ExprKind::Repeat(ref v, ref count) => {
let def_id = cx.tcx.hir().local_def_id(count.hir_id);
let substs = InternalSubsts::identity_for_item(cx.tcx.global_tcx(), def_id);
let substs = InternalSubsts::identity_for_item(cx.tcx, def_id);
let instance = ty::Instance::resolve(
cx.tcx.global_tcx(),
cx.tcx,
cx.param_env,
def_id,
substs,

View file

@ -83,7 +83,7 @@ impl<'a, 'tcx> Cx<'a, 'tcx> {
infcx,
root_lint_level: src_id,
param_env: tcx.param_env(src_def_id),
identity_substs: InternalSubsts::identity_for_item(tcx.global_tcx(), src_def_id),
identity_substs: InternalSubsts::identity_for_item(tcx, src_def_id),
region_scope_tree: tcx.region_scope_tree(src_def_id),
tables,
constness,
@ -154,12 +154,11 @@ impl<'a, 'tcx> Cx<'a, 'tcx> {
}
pub fn pattern_from_hir(&mut self, p: &hir::Pat) -> Pat<'tcx> {
let tcx = self.tcx.global_tcx();
let p = match tcx.hir().get(p.hir_id) {
let p = match self.tcx.hir().get(p.hir_id) {
Node::Pat(p) | Node::Binding(p) => p,
node => bug!("pattern became {:?}", node)
};
Pat::from_hir(tcx, self.param_env.and(self.identity_substs), self.tables(), p)
Pat::from_hir(self.tcx, self.param_env.and(self.identity_substs), self.tables(), p)
}
pub fn trait_method(&mut self,
@ -187,7 +186,7 @@ impl<'a, 'tcx> Cx<'a, 'tcx> {
}
pub fn needs_drop(&mut self, ty: Ty<'tcx>) -> bool {
ty.needs_drop(self.tcx.global_tcx(), self.param_env)
ty.needs_drop(self.tcx, self.param_env)
}
pub fn tcx(&self) -> TyCtxt<'tcx> {

View file

@ -4,7 +4,6 @@ use super::_match::WitnessPreference::*;
use super::{PatCtxt, PatternError, PatKind};
use rustc::middle::borrowck::SignalledError;
use rustc::session::Session;
use rustc::ty::{self, Ty, TyCtxt};
use rustc::ty::subst::{InternalSubsts, SubstsRef};
@ -21,11 +20,10 @@ use std::slice;
use syntax_pos::{Span, DUMMY_SP, MultiSpan};
crate fn check_match(tcx: TyCtxt<'_>, def_id: DefId) -> SignalledError {
let body_id = if let Some(id) = tcx.hir().as_local_hir_id(def_id) {
tcx.hir().body_owned_by(id)
} else {
return SignalledError::NoErrorsSeen;
crate fn check_match(tcx: TyCtxt<'_>, def_id: DefId) {
let body_id = match tcx.hir().as_local_hir_id(def_id) {
None => return,
Some(id) => tcx.hir().body_owned_by(id),
};
let mut visitor = MatchVisitor {
@ -33,10 +31,8 @@ crate fn check_match(tcx: TyCtxt<'_>, def_id: DefId) -> SignalledError {
tables: tcx.body_tables(body_id),
param_env: tcx.param_env(def_id),
identity_substs: InternalSubsts::identity_for_item(tcx, def_id),
signalled_error: SignalledError::NoErrorsSeen,
};
visitor.visit_body(tcx.hir().body(body_id));
visitor.signalled_error
}
fn create_e0004(sess: &Session, sp: Span, error_message: String) -> DiagnosticBuilder<'_> {
@ -48,7 +44,6 @@ struct MatchVisitor<'a, 'tcx> {
tables: &'a ty::TypeckTables<'tcx>,
param_env: ty::ParamEnv<'tcx>,
identity_substs: SubstsRef<'tcx>,
signalled_error: SignalledError,
}
impl<'tcx> Visitor<'tcx> for MatchVisitor<'_, 'tcx> {
@ -136,13 +131,7 @@ impl<'tcx> MatchVisitor<'_, 'tcx> {
// First, check legality of move bindings.
self.check_patterns(arm.guard.is_some(), &arm.pat);
// Second, if there is a guard on each arm, make sure it isn't
// assigning or borrowing anything mutably.
if arm.guard.is_some() {
self.signalled_error = SignalledError::SawSomeError;
}
// Third, perform some lints.
// Second, perform some lints.
check_for_bindings_named_same_as_variants(self, &arm.pat);
}
@ -151,10 +140,17 @@ impl<'tcx> MatchVisitor<'_, 'tcx> {
let mut have_errors = false;
let inlined_arms : Vec<(Vec<_>, _)> = arms.iter().map(|arm| (
arm.top_pats_hack().iter().map(|pat| {
let mut patcx = PatCtxt::new(self.tcx,
self.param_env.and(self.identity_substs),
self.tables);
// HACK(or_patterns; Centril | dlrobertson): Remove this and
// correctly handle exhaustiveness checking for nested or-patterns.
match &arm.pat.kind {
hir::PatKind::Or(pats) => pats,
_ => std::slice::from_ref(&arm.pat),
}.iter().map(|pat| {
let mut patcx = PatCtxt::new(
self.tcx,
self.param_env.and(self.identity_substs),
self.tables
);
patcx.include_lint_checks();
let pattern = expand_pattern(cx, patcx.lower_pattern(&pat));
if !patcx.errors.is_empty() {
@ -270,20 +266,51 @@ impl<'tcx> MatchVisitor<'_, 'tcx> {
"refutable pattern in {}: {} not covered",
origin, joined_patterns
);
err.span_label(pat.span, match &pat.kind {
match &pat.kind {
hir::PatKind::Path(hir::QPath::Resolved(None, path))
if path.segments.len() == 1 && path.segments[0].args.is_none() => {
format!("interpreted as {} {} pattern, not new variable",
path.res.article(), path.res.descr())
if path.segments.len() == 1 && path.segments[0].args.is_none() =>
{
const_not_var(&mut err, cx.tcx, pat, path);
}
_ => pattern_not_convered_label(&witnesses, &joined_patterns),
});
_ => {
err.span_label(
pat.span,
pattern_not_covered_label(&witnesses, &joined_patterns),
);
}
}
adt_defined_here(cx, &mut err, pattern_ty, &witnesses);
err.emit();
});
}
}
/// A path pattern was interpreted as a constant, not a new variable.
/// This caused an irrefutable match failure in e.g. `let`.
fn const_not_var(err: &mut DiagnosticBuilder<'_>, tcx: TyCtxt<'_>, pat: &Pat, path: &hir::Path) {
let descr = path.res.descr();
err.span_label(pat.span, format!(
"interpreted as {} {} pattern, not a new variable",
path.res.article(),
descr,
));
err.span_suggestion(
pat.span,
"introduce a variable instead",
format!("{}_var", path.segments[0].ident).to_lowercase(),
// Cannot use `MachineApplicable` as it's not really *always* correct
// because there may be such an identifier in scope or the user maybe
// really wanted to match against the constant. This is quite unlikely however.
Applicability::MaybeIncorrect,
);
if let Some(span) = tcx.hir().res_span(path.res) {
err.span_label(span, format!("{} defined here", descr));
}
}
fn check_for_bindings_named_same_as_variants(cx: &MatchVisitor<'_, '_>, pat: &Pat) {
pat.walk(|p| {
if let hir::PatKind::Binding(_, _, ident, None) = p.kind {
@ -449,7 +476,7 @@ fn check_exhaustive<'tcx>(
cx.tcx.sess, sp,
format!("non-exhaustive patterns: {} not covered", joined_patterns),
);
err.span_label(sp, pattern_not_convered_label(&witnesses, &joined_patterns));
err.span_label(sp, pattern_not_covered_label(&witnesses, &joined_patterns));
adt_defined_here(cx, &mut err, scrut_ty, &witnesses);
err.help(
"ensure that all possible cases are being handled, \
@ -475,7 +502,7 @@ fn joined_uncovered_patterns(witnesses: &[super::Pat<'_>]) -> String {
}
}
fn pattern_not_convered_label(witnesses: &[super::Pat<'_>], joined_patterns: &str) -> String {
fn pattern_not_covered_label(witnesses: &[super::Pat<'_>], joined_patterns: &str) -> String {
format!("pattern{} {} not covered", rustc_errors::pluralise!(witnesses.len()), joined_patterns)
}

View file

@ -79,7 +79,7 @@ fn make_shim<'tcx>(tcx: TyCtxt<'tcx>, instance: ty::InstanceDef<'tcx>) -> &'tcx
}
ty::InstanceDef::ClosureOnceShim { call_once } => {
let fn_mut = tcx.lang_items().fn_mut_trait().unwrap();
let call_mut = tcx.global_tcx()
let call_mut = tcx
.associated_items(fn_mut)
.find(|it| it.kind == ty::AssocKind::Method)
.unwrap().def_id;

View file

@ -28,17 +28,7 @@ impl<'tcx> MirPass<'tcx> for ElaborateDrops {
let param_env = tcx.param_env(src.def_id()).with_reveal_all();
let move_data = match MoveData::gather_moves(body, tcx) {
Ok(move_data) => move_data,
Err((move_data, _move_errors)) => {
// The only way we should be allowing any move_errors
// in here is if we are in the migration path for the
// NLL-based MIR-borrowck.
//
// If we are in the migration path, we have already
// reported these errors as warnings to the user. So
// we will just ignore them here.
assert!(tcx.migrate_borrowck());
move_data
}
Err(_) => bug!("No `move_errors` should be allowed in MIR borrowck"),
};
let elaborate_patch = {
let body = &*body;

View file

@ -291,10 +291,6 @@ fn optimized_mir(tcx: TyCtxt<'_>, def_id: DefId) -> &Body<'_> {
// execute before we can steal.
tcx.ensure().mir_borrowck(def_id);
if tcx.use_ast_borrowck() {
tcx.ensure().borrowck(def_id);
}
let (body, _) = tcx.mir_validated(def_id);
let mut body = body.steal();
run_optimization_passes(tcx, &mut body, def_id, None);

View file

@ -474,7 +474,7 @@ impl context::UnificationOps<ChalkArenas<'tcx>, ChalkArenas<'tcx>>
&self,
value: DelayedLiteral<ChalkArenas<'tcx>>,
) -> DelayedLiteral<ChalkArenas<'tcx>> {
match self.infcx.tcx.lift_to_global(&value) {
match self.infcx.tcx.lift(&value) {
Some(literal) => literal,
None => bug!("cannot lift {:?}", value),
}

View file

@ -1269,7 +1269,7 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o {
// to avoid ICEs.
for item in &regular_traits {
let object_safety_violations =
tcx.global_tcx().astconv_object_safety_violations(item.trait_ref().def_id());
tcx.astconv_object_safety_violations(item.trait_ref().def_id());
if !object_safety_violations.is_empty() {
tcx.report_object_safety_error(
span,
@ -1368,11 +1368,8 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o {
span,
format!("associated type `{}` must be specified", assoc_item.ident),
);
if item_def_id.is_local() {
err.span_label(
tcx.def_span(*item_def_id),
format!("`{}` defined here", assoc_item.ident),
);
if let Some(sp) = tcx.hir().span_if_local(*item_def_id) {
err.span_label(sp, format!("`{}` defined here", assoc_item.ident));
}
if suggest {
if let Ok(snippet) = tcx.sess.source_map().span_to_snippet(

View file

@ -351,16 +351,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
err.span_label(call_expr.span, "call expression requires function");
let def_span = match def {
Res::Err => None,
Res::Local(id) => {
Some(self.tcx.hir().span(id))
},
_ => def
.opt_def_id()
.and_then(|did| self.tcx.hir().span_if_local(did)),
};
if let Some(span) = def_span {
if let Some(span) = self.tcx.hir().res_span(def) {
let label = match (unit_variant, inner_callee_path) {
(Some(path), _) => format!("`{}` defined here", path),
(_, Some(hir::QPath::Resolved(_, path))) => format!(

View file

@ -620,8 +620,12 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
expr: &'tcx hir::Expr
) -> Ty<'tcx> {
if self.ret_coercion.is_none() {
struct_span_err!(self.tcx.sess, expr.span, E0572,
"return statement outside of function body").emit();
struct_span_err!(
self.tcx.sess,
expr.span,
E0572,
"return statement outside of function body",
).emit();
} else if let Some(ref e) = expr_opt {
if self.ret_coercion_span.borrow().is_none() {
*self.ret_coercion_span.borrow_mut() = Some(e.span);
@ -932,9 +936,9 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
Ok(self.to_const(count, tcx.type_of(count_def_id)))
} else {
let param_env = ty::ParamEnv::empty();
let substs = InternalSubsts::identity_for_item(tcx.global_tcx(), count_def_id);
let substs = InternalSubsts::identity_for_item(tcx, count_def_id);
let instance = ty::Instance::resolve(
tcx.global_tcx(),
tcx,
param_env,
count_def_id,
substs,

View file

@ -48,7 +48,7 @@ impl<'tcx> CheckWfFcxBuilder<'tcx> {
// empty `param_env`.
check_false_global_bounds(&fcx, span, id);
}
let wf_tys = f(&fcx, fcx.tcx.global_tcx());
let wf_tys = f(&fcx, fcx.tcx);
fcx.select_all_obligations_or_error();
fcx.regionck_item(id, span, &wf_tys);
});
@ -366,8 +366,8 @@ fn check_item_type(
) {
debug!("check_item_type: {:?}", item_id);
for_id(tcx, item_id, ty_span).with_fcx(|fcx, gcx| {
let ty = gcx.type_of(gcx.hir().local_def_id(item_id));
for_id(tcx, item_id, ty_span).with_fcx(|fcx, tcx| {
let ty = tcx.type_of(tcx.hir().local_def_id(item_id));
let item_ty = fcx.normalize_associated_types_in(ty_span, &ty);
let mut forbid_unsized = true;

View file

@ -322,29 +322,29 @@ fn visit_implementation_of_dispatch_from_dyn(tcx: TyCtxt<'_>, impl_did: DefId) {
}
}
pub fn coerce_unsized_info<'tcx>(gcx: TyCtxt<'tcx>, impl_did: DefId) -> CoerceUnsizedInfo {
pub fn coerce_unsized_info<'tcx>(tcx: TyCtxt<'tcx>, impl_did: DefId) -> CoerceUnsizedInfo {
debug!("compute_coerce_unsized_info(impl_did={:?})", impl_did);
let coerce_unsized_trait = gcx.lang_items().coerce_unsized_trait().unwrap();
let coerce_unsized_trait = tcx.lang_items().coerce_unsized_trait().unwrap();
let unsize_trait = gcx.lang_items().require(UnsizeTraitLangItem).unwrap_or_else(|err| {
gcx.sess.fatal(&format!("`CoerceUnsized` implementation {}", err));
let unsize_trait = tcx.lang_items().require(UnsizeTraitLangItem).unwrap_or_else(|err| {
tcx.sess.fatal(&format!("`CoerceUnsized` implementation {}", err));
});
// this provider should only get invoked for local def-ids
let impl_hir_id = gcx.hir().as_local_hir_id(impl_did).unwrap_or_else(|| {
let impl_hir_id = tcx.hir().as_local_hir_id(impl_did).unwrap_or_else(|| {
bug!("coerce_unsized_info: invoked for non-local def-id {:?}", impl_did)
});
let source = gcx.type_of(impl_did);
let trait_ref = gcx.impl_trait_ref(impl_did).unwrap();
let source = tcx.type_of(impl_did);
let trait_ref = tcx.impl_trait_ref(impl_did).unwrap();
assert_eq!(trait_ref.def_id, coerce_unsized_trait);
let target = trait_ref.substs.type_at(1);
debug!("visit_implementation_of_coerce_unsized: {:?} -> {:?} (bound)",
source,
target);
let span = gcx.hir().span(impl_hir_id);
let param_env = gcx.param_env(impl_did);
let span = tcx.hir().span(impl_hir_id);
let param_env = tcx.param_env(impl_did);
assert!(!source.has_escaping_bound_vars());
let err_info = CoerceUnsizedInfo { custom_kind: None };
@ -353,7 +353,7 @@ pub fn coerce_unsized_info<'tcx>(gcx: TyCtxt<'tcx>, impl_did: DefId) -> CoerceUn
source,
target);
gcx.infer_ctxt().enter(|infcx| {
tcx.infer_ctxt().enter(|infcx| {
let cause = ObligationCause::misc(span, impl_hir_id);
let check_mutbl = |mt_a: ty::TypeAndMut<'tcx>,
mt_b: ty::TypeAndMut<'tcx>,
@ -372,24 +372,24 @@ pub fn coerce_unsized_info<'tcx>(gcx: TyCtxt<'tcx>, impl_did: DefId) -> CoerceUn
infcx.sub_regions(infer::RelateObjectBound(span), r_b, r_a);
let mt_a = ty::TypeAndMut { ty: ty_a, mutbl: mutbl_a };
let mt_b = ty::TypeAndMut { ty: ty_b, mutbl: mutbl_b };
check_mutbl(mt_a, mt_b, &|ty| gcx.mk_imm_ref(r_b, ty))
check_mutbl(mt_a, mt_b, &|ty| tcx.mk_imm_ref(r_b, ty))
}
(&ty::Ref(_, ty_a, mutbl_a), &ty::RawPtr(mt_b)) => {
let mt_a = ty::TypeAndMut { ty: ty_a, mutbl: mutbl_a };
check_mutbl(mt_a, mt_b, &|ty| gcx.mk_imm_ptr(ty))
check_mutbl(mt_a, mt_b, &|ty| tcx.mk_imm_ptr(ty))
}
(&ty::RawPtr(mt_a), &ty::RawPtr(mt_b)) => {
check_mutbl(mt_a, mt_b, &|ty| gcx.mk_imm_ptr(ty))
check_mutbl(mt_a, mt_b, &|ty| tcx.mk_imm_ptr(ty))
}
(&ty::Adt(def_a, substs_a), &ty::Adt(def_b, substs_b)) if def_a.is_struct() &&
def_b.is_struct() => {
if def_a != def_b {
let source_path = gcx.def_path_str(def_a.did);
let target_path = gcx.def_path_str(def_b.did);
span_err!(gcx.sess,
let source_path = tcx.def_path_str(def_a.did);
let target_path = tcx.def_path_str(def_b.did);
span_err!(tcx.sess,
span,
E0377,
"the trait `CoerceUnsized` may only be implemented \
@ -443,9 +443,9 @@ pub fn coerce_unsized_info<'tcx>(gcx: TyCtxt<'tcx>, impl_did: DefId) -> CoerceUn
let diff_fields = fields.iter()
.enumerate()
.filter_map(|(i, f)| {
let (a, b) = (f.ty(gcx, substs_a), f.ty(gcx, substs_b));
let (a, b) = (f.ty(tcx, substs_a), f.ty(tcx, substs_b));
if gcx.type_of(f.did).is_phantom_data() {
if tcx.type_of(f.did).is_phantom_data() {
// Ignore PhantomData fields
return None;
}
@ -472,7 +472,7 @@ pub fn coerce_unsized_info<'tcx>(gcx: TyCtxt<'tcx>, impl_did: DefId) -> CoerceUn
.collect::<Vec<_>>();
if diff_fields.is_empty() {
span_err!(gcx.sess,
span_err!(tcx.sess,
span,
E0374,
"the trait `CoerceUnsized` may only be implemented \
@ -480,14 +480,14 @@ pub fn coerce_unsized_info<'tcx>(gcx: TyCtxt<'tcx>, impl_did: DefId) -> CoerceUn
being coerced, none found");
return err_info;
} else if diff_fields.len() > 1 {
let item = gcx.hir().expect_item(impl_hir_id);
let item = tcx.hir().expect_item(impl_hir_id);
let span = if let ItemKind::Impl(.., Some(ref t), _, _) = item.kind {
t.path.span
} else {
gcx.hir().span(impl_hir_id)
tcx.hir().span(impl_hir_id)
};
let mut err = struct_span_err!(gcx.sess,
let mut err = struct_span_err!(tcx.sess,
span,
E0375,
"implementing the trait \
@ -514,7 +514,7 @@ pub fn coerce_unsized_info<'tcx>(gcx: TyCtxt<'tcx>, impl_did: DefId) -> CoerceUn
}
_ => {
span_err!(gcx.sess,
span_err!(tcx.sess,
span,
E0376,
"the trait `CoerceUnsized` may only be implemented \
@ -527,7 +527,7 @@ pub fn coerce_unsized_info<'tcx>(gcx: TyCtxt<'tcx>, impl_did: DefId) -> CoerceUn
// Register an obligation for `A: Trait<B>`.
let cause = traits::ObligationCause::misc(span, impl_hir_id);
let predicate = gcx.predicate_for_trait_def(param_env,
let predicate = tcx.predicate_for_trait_def(param_env,
cause,
trait_def_id,
0,

View file

@ -1717,9 +1717,7 @@ fn find_opaque_ty_constraints(tcx: TyCtxt<'_>, def_id: DefId) -> Ty<'_> {
}
let hir_id = tcx.hir().as_local_hir_id(def_id).unwrap();
let scope = tcx.hir()
.get_defining_scope(hir_id)
.expect("could not get defining scope");
let scope = tcx.hir().get_defining_scope(hir_id);
let mut locator = ConstraintLocator {
def_id,
tcx,

View file

@ -714,7 +714,6 @@ where
/// # Examples
///
/// ```
/// #![feature(map_get_key_value)]
/// use std::collections::HashMap;
///
/// let mut map = HashMap::new();
@ -722,7 +721,7 @@ where
/// assert_eq!(map.get_key_value(&1), Some((&1, &"a")));
/// assert_eq!(map.get_key_value(&2), None);
/// ```
#[unstable(feature = "map_get_key_value", issue = "49347")]
#[stable(feature = "map_get_key_value", since = "1.40.0")]
#[inline]
pub fn get_key_value<Q: ?Sized>(&self, k: &Q) -> Option<(&K, &V)>
where

View file

@ -566,7 +566,9 @@ mod prim_array { }
#[doc(alias = "[")]
#[doc(alias = "]")]
#[doc(alias = "[]")]
/// A dynamically-sized view into a contiguous sequence, `[T]`.
/// A dynamically-sized view into a contiguous sequence, `[T]`. Contiguous here
/// means that elements are layed out so that every element is the same
/// distance from its neighbors.
///
/// *[See also the `std::slice` module](slice/index.html).*
///

View file

@ -54,8 +54,8 @@ impl Command {
let ret = libc::rtpSpawn(
self.get_argv()[0], // executing program
self.get_argv().as_ptr() as *const _, // argv
*sys::os::environ() as *const *const c_char,
self.get_argv().as_ptr() as *mut *const c_char, // argv
*sys::os::environ() as *mut *const c_char,
100 as c_int, // initial priority
thread::min_stack(), // initial stack size.
0, // options

View file

@ -1546,6 +1546,7 @@ fn calc_result(desc: &TestDesc, task_result: Result<(), Box<dyn Any + Send>>) ->
}
}
}
(&ShouldPanic::Yes, Ok(())) => TrFailedMsg("test did not panic as expected".to_string()),
_ if desc.allow_fail => TrAllowedFail,
_ => TrFailed,
}

View file

@ -2,7 +2,7 @@ use super::*;
use crate::test::{
filter_tests, parse_opts, run_test, DynTestFn, DynTestName, MetricMap, RunIgnored,
ShouldPanic, StaticTestName, TestDesc, TestDescAndFn, TestOpts, TrFailed, TrFailedMsg,
ShouldPanic, StaticTestName, TestDesc, TestDescAndFn, TestOpts, TrFailedMsg,
TrIgnored, TrOk,
};
use std::sync::mpsc::channel;
@ -167,7 +167,7 @@ fn test_should_panic_but_succeeds() {
let (tx, rx) = channel();
run_test(&TestOpts::new(), false, desc, tx, Concurrent::No);
let (_, res, _, _) = rx.recv().unwrap();
assert!(res == TrFailed);
assert!(res == TrFailedMsg("test did not panic as expected".to_string()));
}
fn report_time_test_template(report_time: bool) -> Option<TestExecTime> {

View file

@ -0,0 +1,10 @@
// This test checks that all expected errors occur when there are multiple invalid attributes
// on an item.
#[inline]
//~^ ERROR attribute should be applied to function or closure [E0518]
#[target_feature(enable = "sse2")]
//~^ ERROR attribute should be applied to a function
const FOO: u8 = 0;
fn main() { }

View file

@ -0,0 +1,21 @@
error[E0518]: attribute should be applied to function or closure
--> $DIR/multiple-invalid.rs:4:1
|
LL | #[inline]
| ^^^^^^^^^
...
LL | const FOO: u8 = 0;
| ------------------ not a function or closure
error: attribute should be applied to a function
--> $DIR/multiple-invalid.rs:6:1
|
LL | #[target_feature(enable = "sse2")]
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
LL |
LL | const FOO: u8 = 0;
| ------------------ not a function
error: aborting due to 2 previous errors
For more information about this error, try `rustc --explain E0518`.

View file

@ -1,20 +1,38 @@
error[E0005]: refutable pattern in local binding: `0u8..=1u8` and `3u8..=std::u8::MAX` not covered
--> $DIR/const-pattern-irrefutable.rs:12:9
|
LL | const a: u8 = 2;
| ---------------- constant defined here
...
LL | let a = 4;
| ^ interpreted as a constant pattern, not new variable
| ^
| |
| interpreted as a constant pattern, not a new variable
| help: introduce a variable instead: `a_var`
error[E0005]: refutable pattern in local binding: `0u8..=1u8` and `3u8..=std::u8::MAX` not covered
--> $DIR/const-pattern-irrefutable.rs:13:9
|
LL | pub const b: u8 = 2;
| -------------------- constant defined here
...
LL | let c = 4;
| ^ interpreted as a constant pattern, not new variable
| ^
| |
| interpreted as a constant pattern, not a new variable
| help: introduce a variable instead: `c_var`
error[E0005]: refutable pattern in local binding: `0u8..=1u8` and `3u8..=std::u8::MAX` not covered
--> $DIR/const-pattern-irrefutable.rs:14:9
|
LL | pub const d: u8 = 2;
| -------------------- constant defined here
...
LL | let d = 4;
| ^ interpreted as a constant pattern, not new variable
| ^
| |
| interpreted as a constant pattern, not a new variable
| help: introduce a variable instead: `d_var`
error: aborting due to 3 previous errors

View file

@ -2,6 +2,7 @@
fn main() {
let gen = |start| { //~ ERROR generators cannot have explicit parameters
//~^ ERROR type inside generator must be known in this context
yield;
};
}

View file

@ -4,5 +4,18 @@ error[E0628]: generators cannot have explicit parameters
LL | let gen = |start| {
| ^^^^^^^
error: aborting due to previous error
error[E0698]: type inside generator must be known in this context
--> $DIR/no-parameters-on-generators.rs:4:16
|
LL | let gen = |start| {
| ^^^^^ cannot infer type
|
note: the type is part of the generator because of this `yield`
--> $DIR/no-parameters-on-generators.rs:6:9
|
LL | yield;
| ^^^^^
error: aborting due to 2 previous errors
For more information about this error, try `rustc --explain E0698`.

View file

@ -1,33 +0,0 @@
error[E0713]: borrow may still be in use when destructor runs
--> $DIR/issue-45696-scribble-on-boxed-borrow.rs:34:5
|
LL | fn scribbled<'a>(s: Scribble<'a>) -> &'a mut u32 {
| -- lifetime `'a` defined here
LL | &mut *s.0
| ^^^^^^^^^ returning this value requires that `*s.0` is borrowed for `'a`
LL | }
| - here, drop of `s` needs exclusive access to `*s.0`, because the type `Scribble<'_>` implements the `Drop` trait
error[E0713]: borrow may still be in use when destructor runs
--> $DIR/issue-45696-scribble-on-boxed-borrow.rs:39:5
|
LL | fn boxed_scribbled<'a>(s: Box<Scribble<'a>>) -> &'a mut u32 {
| -- lifetime `'a` defined here
LL | &mut *(*s).0
| ^^^^^^^^^^^^ returning this value requires that `*s.0` is borrowed for `'a`
LL | }
| - here, drop of `s` needs exclusive access to `*s.0`, because the type `Scribble<'_>` implements the `Drop` trait
error[E0713]: borrow may still be in use when destructor runs
--> $DIR/issue-45696-scribble-on-boxed-borrow.rs:44:5
|
LL | fn boxed_boxed_scribbled<'a>(s: Box<Box<Scribble<'a>>>) -> &'a mut u32 {
| -- lifetime `'a` defined here
LL | &mut *(**s).0
| ^^^^^^^^^^^^^ returning this value requires that `*s.0` is borrowed for `'a`
LL | }
| - here, drop of `s` needs exclusive access to `*s.0`, because the type `Scribble<'_>` implements the `Drop` trait
error: aborting due to 3 previous errors
For more information about this error, try `rustc --explain E0713`.

Some files were not shown because too many files have changed in this diff Show more