Rollup merge of #69644 - ecstatic-morse:unified-dataflow-cleanup, r=eddyb

Remove framework in `dataflow/mod.rs` in favor of "generic" one

This is the culmination of the work described in rust-lang/compiler-team#202. All dataflow analyses (including the one in `clippy`) have been ported to use the framework in `dataflow/generic`, which can efficiently handle both gen/kill and generic problems. This PR moves the framework in `dataflow/generic` to `dataflow/framework`, and removes the gen/kill framework in `dataflow/mod.rs`.

More comprehensive documentation for the new framework is tracked in rust-lang/rustc-guide#564.

`clippy` will need to change the path it uses to import the dataflow analysis traits.
This commit is contained in:
Dylan DPC 2020-03-27 01:23:47 +01:00 committed by GitHub
commit 0f6144a115
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
22 changed files with 89 additions and 1108 deletions

View file

@ -29,12 +29,12 @@ use std::mem;
use std::rc::Rc;
use crate::dataflow;
use crate::dataflow::generic::{Analysis, BorrowckFlowState as Flows, BorrowckResults};
use crate::dataflow::indexes::{BorrowIndex, InitIndex, MoveOutIndex, MovePathIndex};
use crate::dataflow::move_paths::{InitLocation, LookupResult, MoveData, MoveError};
use crate::dataflow::Borrows;
use crate::dataflow::EverInitializedPlaces;
use crate::dataflow::MoveDataParamEnv;
use crate::dataflow::{Analysis, BorrowckFlowState as Flows, BorrowckResults};
use crate::dataflow::{MaybeInitializedPlaces, MaybeUninitializedPlaces};
use crate::transform::MirSource;
@ -298,7 +298,7 @@ fn do_mir_borrowck<'a, 'tcx>(
mbcx.report_move_errors(errors);
}
dataflow::generic::visit_results(
dataflow::visit_results(
&*body,
traversal::reverse_postorder(&*body).map(|(bb, _)| bb),
&results,
@ -509,7 +509,7 @@ crate struct MirBorrowckCtxt<'cx, 'tcx> {
// 2. loans made in overlapping scopes do not conflict
// 3. assignments do not affect things loaned out as immutable
// 4. moves do not affect things loaned out in any way
impl<'cx, 'tcx> dataflow::generic::ResultsVisitor<'cx, 'tcx> for MirBorrowckCtxt<'cx, 'tcx> {
impl<'cx, 'tcx> dataflow::ResultsVisitor<'cx, 'tcx> for MirBorrowckCtxt<'cx, 'tcx> {
type FlowState = Flows<'cx, 'tcx>;
fn visit_statement(

View file

@ -21,9 +21,9 @@ use std::str::FromStr;
use self::mir_util::PassWhere;
use polonius_engine::{Algorithm, Output};
use crate::dataflow::generic::ResultsCursor;
use crate::dataflow::move_paths::{InitKind, InitLocation, MoveData};
use crate::dataflow::MaybeInitializedPlaces;
use crate::dataflow::ResultsCursor;
use crate::transform::MirSource;
use crate::util as mir_util;
use crate::util::pretty;

View file

@ -3,9 +3,9 @@ use rustc::ty::{RegionVid, TyCtxt};
use rustc_data_structures::fx::FxHashSet;
use std::rc::Rc;
use crate::dataflow::generic::ResultsCursor;
use crate::dataflow::move_paths::MoveData;
use crate::dataflow::MaybeInitializedPlaces;
use crate::dataflow::ResultsCursor;
use crate::borrow_check::{
constraints::OutlivesConstraintSet,

View file

@ -8,10 +8,10 @@ use rustc_trait_selection::traits::query::type_op::outlives::DropckOutlives;
use rustc_trait_selection::traits::query::type_op::TypeOp;
use std::rc::Rc;
use crate::dataflow::generic::ResultsCursor;
use crate::dataflow::indexes::MovePathIndex;
use crate::dataflow::move_paths::{HasMoveData, MoveData};
use crate::dataflow::MaybeInitializedPlaces;
use crate::dataflow::ResultsCursor;
use crate::borrow_check::{
region_infer::values::{self, PointIndex, RegionValueElements},

View file

@ -39,9 +39,9 @@ use rustc_trait_selection::traits::query::type_op::custom::CustomTypeOp;
use rustc_trait_selection::traits::query::{Fallible, NoSolution};
use rustc_trait_selection::traits::{self, ObligationCause, PredicateObligations};
use crate::dataflow::generic::ResultsCursor;
use crate::dataflow::move_paths::MoveData;
use crate::dataflow::MaybeInitializedPlaces;
use crate::dataflow::ResultsCursor;
use crate::transform::promote_consts::should_suggest_const_in_array_repeat_expressions_attribute;
use crate::borrow_check::{

View file

@ -1,169 +0,0 @@
//! A nice wrapper to consume dataflow results at several CFG
//! locations.
use rustc::mir::{BasicBlock, Location};
use rustc_index::bit_set::{BitIter, BitSet, HybridBitSet};
use crate::dataflow::{BitDenotation, DataflowResults, GenKillSet};
use std::borrow::Borrow;
use std::iter;
/// A trait for "cartesian products" of multiple FlowAtLocation.
///
/// There's probably a way to auto-impl this, but I think
/// it is cleaner to have manual visitor impls.
pub trait FlowsAtLocation {
/// Reset the state bitvector to represent the entry to block `bb`.
fn reset_to_entry_of(&mut self, bb: BasicBlock);
/// Reset the state bitvector to represent the exit of the
/// terminator of block `bb`.
///
/// **Important:** In the case of a `Call` terminator, these
/// effects do *not* include the result of storing the destination
/// of the call, since that is edge-dependent (in other words, the
/// effects don't apply to the unwind edge).
fn reset_to_exit_of(&mut self, bb: BasicBlock);
/// Builds gen and kill sets for statement at `loc`.
///
/// Note that invoking this method alone does not change the
/// `curr_state` -- you must invoke `apply_local_effect`
/// afterwards.
fn reconstruct_statement_effect(&mut self, loc: Location);
/// Builds gen and kill sets for terminator for `loc`.
///
/// Note that invoking this method alone does not change the
/// `curr_state` -- you must invoke `apply_local_effect`
/// afterwards.
fn reconstruct_terminator_effect(&mut self, loc: Location);
/// Apply current gen + kill sets to `flow_state`.
///
/// (`loc` parameters can be ignored if desired by
/// client. For the terminator, the `stmt_idx` will be the number
/// of statements in the block.)
fn apply_local_effect(&mut self, loc: Location);
}
/// Represents the state of dataflow at a particular
/// CFG location, both before and after it is
/// executed.
///
/// Data flow results are typically computed only as basic block
/// boundaries. A `FlowInProgress` allows you to reconstruct the
/// effects at any point in the control-flow graph by starting with
/// the state at the start of the basic block (`reset_to_entry_of`)
/// and then replaying the effects of statements and terminators
/// (e.g., via `reconstruct_statement_effect` and
/// `reconstruct_terminator_effect`; don't forget to call
/// `apply_local_effect`).
pub struct FlowAtLocation<'tcx, BD, DR = DataflowResults<'tcx, BD>>
where
BD: BitDenotation<'tcx>,
DR: Borrow<DataflowResults<'tcx, BD>>,
{
base_results: DR,
curr_state: BitSet<BD::Idx>,
stmt_trans: GenKillSet<BD::Idx>,
}
impl<'tcx, BD, DR> FlowAtLocation<'tcx, BD, DR>
where
BD: BitDenotation<'tcx>,
DR: Borrow<DataflowResults<'tcx, BD>>,
{
/// Iterate over each bit set in the current state.
pub fn each_state_bit<F>(&self, f: F)
where
F: FnMut(BD::Idx),
{
self.curr_state.iter().for_each(f)
}
/// Iterate over each `gen` bit in the current effect (invoke
/// `reconstruct_statement_effect` or
/// `reconstruct_terminator_effect` first).
pub fn each_gen_bit<F>(&self, f: F)
where
F: FnMut(BD::Idx),
{
self.stmt_trans.gen_set.iter().for_each(f)
}
pub fn new(results: DR) -> Self {
let bits_per_block = results.borrow().sets().bits_per_block();
let curr_state = BitSet::new_empty(bits_per_block);
let stmt_trans = GenKillSet::from_elem(HybridBitSet::new_empty(bits_per_block));
FlowAtLocation { base_results: results, curr_state, stmt_trans }
}
/// Access the underlying operator.
pub fn operator(&self) -> &BD {
self.base_results.borrow().operator()
}
pub fn contains(&self, x: BD::Idx) -> bool {
self.curr_state.contains(x)
}
/// Returns an iterator over the elements present in the current state.
pub fn iter_incoming(&self) -> iter::Peekable<BitIter<'_, BD::Idx>> {
self.curr_state.iter().peekable()
}
/// Creates a clone of the current state and applies the local
/// effects to the clone (leaving the state of self intact).
/// Invokes `f` with an iterator over the resulting state.
pub fn with_iter_outgoing<F>(&self, f: F)
where
F: FnOnce(BitIter<'_, BD::Idx>),
{
let mut curr_state = self.curr_state.clone();
self.stmt_trans.apply(&mut curr_state);
f(curr_state.iter());
}
/// Returns a bitset of the elements present in the current state.
pub fn as_dense(&self) -> &BitSet<BD::Idx> {
&self.curr_state
}
}
impl<'tcx, BD, DR> FlowsAtLocation for FlowAtLocation<'tcx, BD, DR>
where
BD: BitDenotation<'tcx>,
DR: Borrow<DataflowResults<'tcx, BD>>,
{
fn reset_to_entry_of(&mut self, bb: BasicBlock) {
self.curr_state.overwrite(self.base_results.borrow().sets().entry_set_for(bb.index()));
}
fn reset_to_exit_of(&mut self, bb: BasicBlock) {
self.reset_to_entry_of(bb);
let trans = self.base_results.borrow().sets().trans_for(bb.index());
trans.apply(&mut self.curr_state)
}
fn reconstruct_statement_effect(&mut self, loc: Location) {
self.stmt_trans.clear();
self.base_results.borrow().operator().before_statement_effect(&mut self.stmt_trans, loc);
self.stmt_trans.apply(&mut self.curr_state);
self.base_results.borrow().operator().statement_effect(&mut self.stmt_trans, loc);
}
fn reconstruct_terminator_effect(&mut self, loc: Location) {
self.stmt_trans.clear();
self.base_results.borrow().operator().before_terminator_effect(&mut self.stmt_trans, loc);
self.stmt_trans.apply(&mut self.curr_state);
self.base_results.borrow().operator().terminator_effect(&mut self.stmt_trans, loc);
}
fn apply_local_effect(&mut self, _loc: Location) {
self.stmt_trans.apply(&mut self.curr_state)
}
}

View file

@ -1,26 +1,25 @@
//! A framework that can express both [gen-kill] and generic dataflow problems.
//!
//! There is another interface for dataflow in the compiler in `librustc_mir/dataflow/mod.rs`. The
//! interface in this module will eventually [replace that one][design-meeting].
//! To actually use this framework, you must implement either the `Analysis` or the
//! `GenKillAnalysis` trait. If your transfer function can be expressed with only gen/kill
//! operations, prefer `GenKillAnalysis` since it will run faster while iterating to fixpoint. The
//! `impls` module contains several examples of gen/kill dataflow analyses.
//!
//! To actually use this framework, you must implement either the `Analysis` or the `GenKillAnalysis`
//! trait. If your transfer function can be expressed with only gen/kill operations, prefer
//! `GenKillAnalysis` since it will run faster while iterating to fixpoint. Create an `Engine` using
//! the appropriate constructor and call `iterate_to_fixpoint`. You can use a `ResultsCursor` to
//! inspect the fixpoint solution to your dataflow problem.
//! Create an `Engine` for your analysis using the `into_engine` method on the `Analysis` trait,
//! then call `iterate_to_fixpoint`. From there, you can use a `ResultsCursor` to inspect the
//! fixpoint solution to your dataflow problem, or implement the `ResultsVisitor` interface and use
//! `visit_results`. The following example uses the `ResultsCursor` approach.
//!
//! ```ignore(cross-crate-imports)
//! use rustc_mir::dataflow::Analysis; // Makes `into_engine` available.
//!
//! fn do_my_analysis(tcx: TyCtxt<'tcx>, body: &mir::Body<'tcx>, did: DefId) {
//! let analysis = MyAnalysis::new();
//!
//! // If `MyAnalysis` implements `GenKillAnalysis`.
//! let results = Engine::new_gen_kill(tcx, body, did, analysis).iterate_to_fixpoint();
//!
//! // If `MyAnalysis` implements `Analysis`.
//! // let results = Engine::new_generic(tcx, body, did, analysis).iterate_to_fixpoint();
//!
//! let mut cursor = ResultsCursor::new(body, results);
//! let analysis = MyAnalysis::new()
//! .into_engine(tcx, body, did)
//! .iterate_to_fixpoint()
//! .into_results_cursor(body);
//!
//! // Print the dataflow state *after* each statement in the start block.
//! for (_, statement_index) in body.block_data[START_BLOCK].statements.iter_enumerated() {
//! cursor.seek_after(Location { block: START_BLOCK, statement_index });
//! let state = cursor.get();
@ -30,7 +29,6 @@
//! ```
//!
//! [gen-kill]: https://en.wikipedia.org/wiki/Data-flow_analysis#Bit_vector_problems
//! [design-meeting]https://github.com/rust-lang/compiler-team/issues/202
use std::io;
@ -41,8 +39,6 @@ use rustc_hir::def_id::DefId;
use rustc_index::bit_set::{BitSet, HybridBitSet};
use rustc_index::vec::{Idx, IndexVec};
use crate::dataflow::BottomValue;
mod cursor;
mod engine;
mod graphviz;
@ -95,6 +91,47 @@ where
}
}
/// Parameterization for the precise form of data flow that is used.
///
/// `BottomValue` determines whether the initial entry set for each basic block is empty or full.
/// This also determines the semantics of the lattice `join` operator used to merge dataflow
/// results, since dataflow works by starting at the bottom and moving monotonically to a fixed
/// point.
///
/// This means, for propagation across the graph, that you either want to start at all-zeroes and
/// then use Union as your merge when propagating, or you start at all-ones and then use Intersect
/// as your merge when propagating.
pub trait BottomValue {
/// Specifies the initial value for each bit in the entry set for each basic block.
const BOTTOM_VALUE: bool;
/// Merges `in_set` into `inout_set`, returning `true` if `inout_set` changed.
///
/// It is almost certainly wrong to override this, since it automatically applies
/// * `inout_set & in_set` if `BOTTOM_VALUE == true`
/// * `inout_set | in_set` if `BOTTOM_VALUE == false`
///
/// This means that if a bit is not `BOTTOM_VALUE`, it is propagated into all target blocks.
/// For clarity, the above statement again from a different perspective:
/// A bit in the block's entry set is `!BOTTOM_VALUE` if *any* predecessor block's bit value is
/// `!BOTTOM_VALUE`.
///
/// There are situations where you want the opposite behaviour: propagate only if *all*
/// predecessor blocks's value is `!BOTTOM_VALUE`.
/// E.g. if you want to know whether a bit is *definitely* set at a specific location. This
/// means that all code paths leading to the location must have set the bit, instead of any
/// code path leading there.
///
/// If you want this kind of "definitely set" analysis, you need to
/// 1. Invert `BOTTOM_VALUE`
/// 2. Reset the `entry_set` in `start_block_effect` to `!BOTTOM_VALUE`
/// 3. Override `join` to do the opposite from what it's doing now.
#[inline]
fn join<T: Idx>(&self, inout_set: &mut BitSet<T>, in_set: &BitSet<T>) -> bool {
if !Self::BOTTOM_VALUE { inout_set.union(in_set) } else { inout_set.intersect(in_set) }
}
}
/// Define the domain of a dataflow problem.
///
/// This trait specifies the lattice on which this analysis operates. For now, this must be a

View file

@ -1,6 +1,6 @@
pub use super::*;
use crate::dataflow::generic::{AnalysisDomain, GenKill, GenKillAnalysis};
use crate::dataflow::{AnalysisDomain, GenKill, GenKillAnalysis};
use rustc::mir::visit::Visitor;
use rustc::mir::*;
use rustc::ty::{ParamEnv, TyCtxt};

View file

@ -8,8 +8,8 @@ use rustc_index::bit_set::BitSet;
use crate::borrow_check::{
places_conflict, BorrowSet, PlaceConflictBias, PlaceExt, RegionInferenceContext, ToRegionVid,
};
use crate::dataflow::generic::{self, GenKill};
use crate::dataflow::BottomValue;
use crate::dataflow::{self, GenKill};
use std::rc::Rc;
@ -226,7 +226,7 @@ impl<'a, 'tcx> Borrows<'a, 'tcx> {
}
}
impl<'tcx> generic::AnalysisDomain<'tcx> for Borrows<'_, 'tcx> {
impl<'tcx> dataflow::AnalysisDomain<'tcx> for Borrows<'_, 'tcx> {
type Idx = BorrowIndex;
const NAME: &'static str = "borrows";
@ -245,7 +245,7 @@ impl<'tcx> generic::AnalysisDomain<'tcx> for Borrows<'_, 'tcx> {
}
}
impl<'tcx> generic::GenKillAnalysis<'tcx> for Borrows<'_, 'tcx> {
impl<'tcx> dataflow::GenKillAnalysis<'tcx> for Borrows<'_, 'tcx> {
fn before_statement_effect(
&self,
trans: &mut impl GenKill<Self::Idx>,

View file

@ -12,9 +12,8 @@ use super::MoveDataParamEnv;
use crate::util::elaborate_drops::DropFlagState;
use super::generic::{AnalysisDomain, GenKill, GenKillAnalysis};
use super::move_paths::{HasMoveData, InitIndex, InitKind, LookupResult, MoveData, MovePathIndex};
use super::BottomValue;
use super::{AnalysisDomain, BottomValue, GenKill, GenKillAnalysis};
use super::drop_flag_effects_for_function_entry;
use super::drop_flag_effects_for_location;

View file

@ -1,7 +1,7 @@
pub use super::*;
use crate::dataflow::generic::{self as dataflow, GenKill, Results, ResultsRefCursor};
use crate::dataflow::BottomValue;
use crate::dataflow::{self, GenKill, Results, ResultsRefCursor};
use rustc::mir::visit::{NonMutatingUseContext, PlaceContext, Visitor};
use rustc::mir::*;
use std::cell::RefCell;

View file

@ -1,35 +1,22 @@
use rustc::mir::traversal;
use rustc::mir::{self, BasicBlock, BasicBlockData, Body, Location, Statement, Terminator};
use rustc::ty::{self, TyCtxt};
use rustc::ty;
use rustc_ast::ast::{self, MetaItem};
use rustc_ast_pretty::pprust;
use rustc_data_structures::work_queue::WorkQueue;
use rustc_hir::def_id::DefId;
use rustc_index::bit_set::{BitSet, HybridBitSet};
use rustc_index::vec::Idx;
use rustc_session::Session;
use rustc_span::symbol::{sym, Symbol};
use std::borrow::Borrow;
use std::fmt;
use std::io;
use std::path::PathBuf;
pub use self::at_location::{FlowAtLocation, FlowsAtLocation};
pub(crate) use self::drop_flag_effects::*;
pub use self::impls::borrows::Borrows;
pub use self::impls::DefinitelyInitializedPlaces;
pub use self::impls::EverInitializedPlaces;
pub use self::impls::{MaybeBorrowedLocals, MaybeMutBorrowedLocals};
pub use self::impls::{MaybeInitializedPlaces, MaybeUninitializedPlaces};
pub use self::impls::{MaybeRequiresStorage, MaybeStorageLive};
pub use self::framework::{
visit_results, Analysis, AnalysisDomain, BorrowckFlowState, BorrowckResults, BottomValue,
Engine, GenKill, GenKillAnalysis, Results, ResultsCursor, ResultsRefCursor, ResultsVisitor,
};
pub use self::impls::{
borrows::Borrows, DefinitelyInitializedPlaces, EverInitializedPlaces, MaybeBorrowedLocals,
MaybeInitializedPlaces, MaybeMutBorrowedLocals, MaybeRequiresStorage, MaybeStorageLive,
MaybeUninitializedPlaces,
};
use self::move_paths::MoveData;
mod at_location;
pub mod drop_flag_effects;
pub mod generic;
mod graphviz;
mod framework;
mod impls;
pub mod move_paths;
@ -40,74 +27,9 @@ pub(crate) mod indexes {
};
}
pub(crate) struct DataflowBuilder<'a, 'tcx, BD>
where
BD: BitDenotation<'tcx>,
{
def_id: DefId,
flow_state: DataflowAnalysis<'a, 'tcx, BD>,
print_preflow_to: Option<String>,
print_postflow_to: Option<String>,
}
/// `DebugFormatted` encapsulates the "{:?}" rendering of some
/// arbitrary value. This way: you pay cost of allocating an extra
/// string (as well as that of rendering up-front); in exchange, you
/// don't have to hand over ownership of your value or deal with
/// borrowing it.
pub struct DebugFormatted(String);
impl DebugFormatted {
pub fn new(input: &dyn fmt::Debug) -> DebugFormatted {
DebugFormatted(format!("{:?}", input))
}
}
impl fmt::Debug for DebugFormatted {
fn fmt(&self, w: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(w, "{}", self.0)
}
}
pub trait Dataflow<'tcx, BD: BitDenotation<'tcx>> {
/// Sets up and runs the dataflow problem, using `p` to render results if
/// implementation so chooses.
fn dataflow<P>(&mut self, p: P)
where
P: Fn(&BD, BD::Idx) -> DebugFormatted,
{
let _ = p; // default implementation does not instrument process.
self.build_sets();
self.propagate();
}
/// Sets up the entry, gen, and kill sets for this instance of a dataflow problem.
fn build_sets(&mut self);
/// Finds a fixed-point solution to this instance of a dataflow problem.
fn propagate(&mut self);
}
impl<'a, 'tcx, BD> Dataflow<'tcx, BD> for DataflowBuilder<'a, 'tcx, BD>
where
BD: BitDenotation<'tcx>,
{
fn dataflow<P>(&mut self, p: P)
where
P: Fn(&BD, BD::Idx) -> DebugFormatted,
{
self.flow_state.build_sets();
self.pre_dataflow_instrumentation(|c, i| p(c, i)).unwrap();
self.flow_state.propagate();
self.post_dataflow_instrumentation(|c, i| p(c, i)).unwrap();
}
fn build_sets(&mut self) {
self.flow_state.build_sets();
}
fn propagate(&mut self) {
self.flow_state.propagate();
}
pub struct MoveDataParamEnv<'tcx> {
pub(crate) move_data: MoveData<'tcx>,
pub(crate) param_env: ty::ParamEnv<'tcx>,
}
pub(crate) fn has_rustc_mir_with(attrs: &[ast::Attribute], name: Symbol) -> Option<MetaItem> {
@ -124,811 +46,3 @@ pub(crate) fn has_rustc_mir_with(attrs: &[ast::Attribute], name: Symbol) -> Opti
}
None
}
pub struct MoveDataParamEnv<'tcx> {
pub(crate) move_data: MoveData<'tcx>,
pub(crate) param_env: ty::ParamEnv<'tcx>,
}
pub fn do_dataflow<'a, 'tcx, BD, P>(
tcx: TyCtxt<'tcx>,
body: &'a Body<'tcx>,
def_id: DefId,
attributes: &[ast::Attribute],
dead_unwinds: &BitSet<BasicBlock>,
bd: BD,
p: P,
) -> DataflowResults<'tcx, BD>
where
BD: BitDenotation<'tcx>,
P: Fn(&BD, BD::Idx) -> DebugFormatted,
{
let flow_state = DataflowAnalysis::new(body, dead_unwinds, bd);
flow_state.run(tcx, def_id, attributes, p)
}
impl<'a, 'tcx, BD> DataflowAnalysis<'a, 'tcx, BD>
where
BD: BitDenotation<'tcx>,
{
pub(crate) fn run<P>(
self,
tcx: TyCtxt<'tcx>,
def_id: DefId,
attributes: &[ast::Attribute],
p: P,
) -> DataflowResults<'tcx, BD>
where
P: Fn(&BD, BD::Idx) -> DebugFormatted,
{
let name_found = |sess: &Session, attrs: &[ast::Attribute], name| -> Option<String> {
if let Some(item) = has_rustc_mir_with(attrs, name) {
if let Some(s) = item.value_str() {
return Some(s.to_string());
} else {
let path = pprust::path_to_string(&item.path);
sess.span_err(item.span, &format!("{} attribute requires a path", path));
return None;
}
}
None
};
let print_preflow_to = name_found(tcx.sess, attributes, sym::borrowck_graphviz_preflow);
let print_postflow_to = name_found(tcx.sess, attributes, sym::borrowck_graphviz_postflow);
let mut mbcx =
DataflowBuilder { def_id, print_preflow_to, print_postflow_to, flow_state: self };
mbcx.dataflow(p);
mbcx.flow_state.results()
}
}
struct PropagationContext<'b, 'a, 'tcx, O>
where
O: BitDenotation<'tcx>,
{
builder: &'b mut DataflowAnalysis<'a, 'tcx, O>,
}
impl<'a, 'tcx, BD> DataflowAnalysis<'a, 'tcx, BD>
where
BD: BitDenotation<'tcx>,
{
fn propagate(&mut self) {
let mut temp = BitSet::new_empty(self.flow_state.sets.bits_per_block);
let mut propcx = PropagationContext { builder: self };
propcx.walk_cfg(&mut temp);
}
fn build_sets(&mut self) {
// Build the transfer function for each block.
for (bb, data) in self.body.basic_blocks().iter_enumerated() {
let &mir::BasicBlockData { ref statements, ref terminator, is_cleanup: _ } = data;
let trans = self.flow_state.sets.trans_mut_for(bb.index());
for j_stmt in 0..statements.len() {
let location = Location { block: bb, statement_index: j_stmt };
self.flow_state.operator.before_statement_effect(trans, location);
self.flow_state.operator.statement_effect(trans, location);
}
if terminator.is_some() {
let location = Location { block: bb, statement_index: statements.len() };
self.flow_state.operator.before_terminator_effect(trans, location);
self.flow_state.operator.terminator_effect(trans, location);
}
}
// Initialize the flow state at entry to the start block.
let on_entry = self.flow_state.sets.entry_set_mut_for(mir::START_BLOCK.index());
self.flow_state.operator.start_block_effect(on_entry);
}
}
impl<'b, 'a, 'tcx, BD> PropagationContext<'b, 'a, 'tcx, BD>
where
BD: BitDenotation<'tcx>,
{
fn walk_cfg(&mut self, in_out: &mut BitSet<BD::Idx>) {
let body = self.builder.body;
// Initialize the dirty queue in reverse post-order. This makes it more likely that the
// entry state for each basic block will have the effects of its predecessors applied
// before it is processed. In fact, for CFGs without back edges, this guarantees that
// dataflow will converge in exactly `N` iterations, where `N` is the number of basic
// blocks.
let mut dirty_queue: WorkQueue<mir::BasicBlock> =
WorkQueue::with_none(body.basic_blocks().len());
for (bb, _) in traversal::reverse_postorder(body) {
dirty_queue.insert(bb);
}
// Add blocks which are not reachable from START_BLOCK to the work queue. These blocks will
// be processed after the ones added above.
for bb in body.basic_blocks().indices() {
dirty_queue.insert(bb);
}
while let Some(bb) = dirty_queue.pop() {
let (on_entry, trans) = self.builder.flow_state.sets.get_mut(bb.index());
debug_assert!(in_out.words().len() == on_entry.words().len());
in_out.overwrite(on_entry);
trans.apply(in_out);
let bb_data = &body[bb];
self.builder.propagate_bits_into_graph_successors_of(
in_out,
(bb, bb_data),
&mut dirty_queue,
);
}
}
}
fn dataflow_path(context: &str, path: &str) -> PathBuf {
let mut path = PathBuf::from(path);
let new_file_name = {
let orig_file_name = path.file_name().unwrap().to_str().unwrap();
format!("{}_{}", context, orig_file_name)
};
path.set_file_name(new_file_name);
path
}
impl<'a, 'tcx, BD> DataflowBuilder<'a, 'tcx, BD>
where
BD: BitDenotation<'tcx>,
{
fn pre_dataflow_instrumentation<P>(&self, p: P) -> io::Result<()>
where
P: Fn(&BD, BD::Idx) -> DebugFormatted,
{
if let Some(ref path_str) = self.print_preflow_to {
let path = dataflow_path(BD::name(), path_str);
graphviz::print_borrowck_graph_to(self, &path, p)
} else {
Ok(())
}
}
fn post_dataflow_instrumentation<P>(&self, p: P) -> io::Result<()>
where
P: Fn(&BD, BD::Idx) -> DebugFormatted,
{
if let Some(ref path_str) = self.print_postflow_to {
let path = dataflow_path(BD::name(), path_str);
graphviz::print_borrowck_graph_to(self, &path, p)
} else {
Ok(())
}
}
}
/// DataflowResultsConsumer abstracts over walking the MIR with some
/// already constructed dataflow results.
///
/// It abstracts over the FlowState and also completely hides the
/// underlying flow analysis results, because it needs to handle cases
/// where we are combining the results of *multiple* flow analyses
/// (e.g., borrows + inits + uninits).
pub(crate) trait DataflowResultsConsumer<'a, 'tcx: 'a> {
type FlowState: FlowsAtLocation;
// Observation Hooks: override (at least one of) these to get analysis feedback.
fn visit_block_entry(&mut self, _bb: BasicBlock, _flow_state: &Self::FlowState) {}
fn visit_statement_entry(
&mut self,
_loc: Location,
_stmt: &'a Statement<'tcx>,
_flow_state: &Self::FlowState,
) {
}
fn visit_terminator_entry(
&mut self,
_loc: Location,
_term: &'a Terminator<'tcx>,
_flow_state: &Self::FlowState,
) {
}
// Main entry point: this drives the processing of results.
fn analyze_results(&mut self, flow_uninit: &mut Self::FlowState) {
let flow = flow_uninit;
for (bb, _) in traversal::reverse_postorder(self.body()) {
flow.reset_to_entry_of(bb);
self.process_basic_block(bb, flow);
}
}
fn process_basic_block(&mut self, bb: BasicBlock, flow_state: &mut Self::FlowState) {
self.visit_block_entry(bb, flow_state);
let BasicBlockData { ref statements, ref terminator, is_cleanup: _ } = self.body()[bb];
let mut location = Location { block: bb, statement_index: 0 };
for stmt in statements.iter() {
flow_state.reconstruct_statement_effect(location);
self.visit_statement_entry(location, stmt, flow_state);
flow_state.apply_local_effect(location);
location.statement_index += 1;
}
if let Some(ref term) = *terminator {
flow_state.reconstruct_terminator_effect(location);
self.visit_terminator_entry(location, term, flow_state);
// We don't need to apply the effect of the terminator,
// since we are only visiting dataflow state on control
// flow entry to the various nodes. (But we still need to
// reconstruct the effect, because the visit method might
// inspect it.)
}
}
// Delegated Hooks: Provide access to the MIR and process the flow state.
fn body(&self) -> &'a Body<'tcx>;
}
/// Allows iterating dataflow results in a flexible and reasonably fast way.
pub struct DataflowResultsCursor<'mir, 'tcx, BD, DR = DataflowResults<'tcx, BD>>
where
BD: BitDenotation<'tcx>,
DR: Borrow<DataflowResults<'tcx, BD>>,
{
flow_state: FlowAtLocation<'tcx, BD, DR>,
// The statement (or terminator) whose effect has been reconstructed in
// flow_state.
curr_loc: Option<Location>,
body: &'mir Body<'tcx>,
}
pub type DataflowResultsRefCursor<'mir, 'tcx, BD> =
DataflowResultsCursor<'mir, 'tcx, BD, &'mir DataflowResults<'tcx, BD>>;
impl<'mir, 'tcx, BD, DR> DataflowResultsCursor<'mir, 'tcx, BD, DR>
where
BD: BitDenotation<'tcx>,
DR: Borrow<DataflowResults<'tcx, BD>>,
{
pub fn new(result: DR, body: &'mir Body<'tcx>) -> Self {
DataflowResultsCursor { flow_state: FlowAtLocation::new(result), curr_loc: None, body }
}
/// Seek to the given location in MIR. This method is fast if you are
/// traversing your MIR statements in order.
///
/// After calling `seek`, the current state will reflect all effects up to
/// and including the `before_statement_effect` of the statement at location
/// `loc`. The `statement_effect` of the statement at `loc` will be
/// available as the current effect (see e.g. `each_gen_bit`).
///
/// If `loc.statement_index` equals the number of statements in the block,
/// we will reconstruct the terminator effect in the same way as described
/// above.
pub fn seek(&mut self, loc: Location) {
if self.curr_loc.map(|cur| loc == cur).unwrap_or(false) {
return;
}
let start_index;
let should_reset = match self.curr_loc {
None => true,
Some(cur) if loc.block != cur.block || loc.statement_index < cur.statement_index => {
true
}
_ => false,
};
if should_reset {
self.flow_state.reset_to_entry_of(loc.block);
start_index = 0;
} else {
let curr_loc = self.curr_loc.unwrap();
start_index = curr_loc.statement_index;
// Apply the effect from the last seek to the current state.
self.flow_state.apply_local_effect(curr_loc);
}
for stmt in start_index..loc.statement_index {
let mut stmt_loc = loc;
stmt_loc.statement_index = stmt;
self.flow_state.reconstruct_statement_effect(stmt_loc);
self.flow_state.apply_local_effect(stmt_loc);
}
if loc.statement_index == self.body[loc.block].statements.len() {
self.flow_state.reconstruct_terminator_effect(loc);
} else {
self.flow_state.reconstruct_statement_effect(loc);
}
self.curr_loc = Some(loc);
}
/// Return whether the current state contains bit `x`.
pub fn contains(&self, x: BD::Idx) -> bool {
self.flow_state.contains(x)
}
/// Iterate over each `gen` bit in the current effect (invoke `seek` first).
pub fn each_gen_bit<F>(&self, f: F)
where
F: FnMut(BD::Idx),
{
self.flow_state.each_gen_bit(f)
}
pub fn get(&self) -> &BitSet<BD::Idx> {
self.flow_state.as_dense()
}
}
pub struct DataflowAnalysis<'a, 'tcx, O>
where
O: BitDenotation<'tcx>,
{
flow_state: DataflowState<'tcx, O>,
dead_unwinds: &'a BitSet<mir::BasicBlock>,
body: &'a Body<'tcx>,
}
impl<'a, 'tcx, O> DataflowAnalysis<'a, 'tcx, O>
where
O: BitDenotation<'tcx>,
{
pub fn results(self) -> DataflowResults<'tcx, O> {
DataflowResults(self.flow_state)
}
pub fn body(&self) -> &'a Body<'tcx> {
self.body
}
}
pub struct DataflowResults<'tcx, O>(pub(crate) DataflowState<'tcx, O>)
where
O: BitDenotation<'tcx>;
impl<'tcx, O: BitDenotation<'tcx>> DataflowResults<'tcx, O> {
pub fn sets(&self) -> &AllSets<O::Idx> {
&self.0.sets
}
pub fn operator(&self) -> &O {
&self.0.operator
}
}
/// State of a dataflow analysis; couples a collection of bit sets
/// with operator used to initialize and merge bits during analysis.
pub struct DataflowState<'tcx, O: BitDenotation<'tcx>> {
/// All the sets for the analysis. (Factored into its
/// own structure so that we can borrow it mutably
/// on its own separate from other fields.)
pub sets: AllSets<O::Idx>,
/// operator used to initialize, combine, and interpret bits.
pub(crate) operator: O,
}
impl<'tcx, O: BitDenotation<'tcx>> DataflowState<'tcx, O> {
pub(crate) fn interpret_set<'c, P>(
&self,
o: &'c O,
set: &BitSet<O::Idx>,
render_idx: &P,
) -> Vec<DebugFormatted>
where
P: Fn(&O, O::Idx) -> DebugFormatted,
{
set.iter().map(|i| render_idx(o, i)).collect()
}
pub(crate) fn interpret_hybrid_set<'c, P>(
&self,
o: &'c O,
set: &HybridBitSet<O::Idx>,
render_idx: &P,
) -> Vec<DebugFormatted>
where
P: Fn(&O, O::Idx) -> DebugFormatted,
{
set.iter().map(|i| render_idx(o, i)).collect()
}
}
/// A 2-tuple representing the "gen" and "kill" bitsets during
/// dataflow analysis.
///
/// It is best to ensure that the intersection of `gen_set` and
/// `kill_set` is empty; otherwise the results of the dataflow will
/// have a hidden dependency on what order the bits are generated and
/// killed during the iteration. (This is such a good idea that the
/// `fn gen` and `fn kill` methods that set their state enforce this
/// for you.)
#[derive(Debug, Clone, Copy)]
pub struct GenKill<T> {
pub(crate) gen_set: T,
pub(crate) kill_set: T,
}
pub type GenKillSet<T> = GenKill<HybridBitSet<T>>;
impl<T> GenKill<T> {
/// Creates a new tuple where `gen_set == kill_set == elem`.
pub(crate) fn from_elem(elem: T) -> Self
where
T: Clone,
{
GenKill { gen_set: elem.clone(), kill_set: elem }
}
}
impl<E: Idx> GenKillSet<E> {
pub fn clear(&mut self) {
self.gen_set.clear();
self.kill_set.clear();
}
pub fn gen(&mut self, e: E) {
self.gen_set.insert(e);
self.kill_set.remove(e);
}
pub fn gen_all(&mut self, i: impl IntoIterator<Item: Borrow<E>>) {
for j in i {
self.gen(*j.borrow());
}
}
pub fn kill(&mut self, e: E) {
self.gen_set.remove(e);
self.kill_set.insert(e);
}
pub fn kill_all(&mut self, i: impl IntoIterator<Item: Borrow<E>>) {
for j in i {
self.kill(*j.borrow());
}
}
/// Computes `(set gen) - kill` and assigns the result to `set`.
pub(crate) fn apply(&self, set: &mut BitSet<E>) {
set.union(&self.gen_set);
set.subtract(&self.kill_set);
}
}
#[derive(Debug)]
pub struct AllSets<E: Idx> {
/// Analysis bitwidth for each block.
bits_per_block: usize,
/// For each block, bits valid on entry to the block.
on_entry: Vec<BitSet<E>>,
/// The transfer function of each block expressed as the set of bits
/// generated and killed by executing the statements + terminator in the
/// block -- with one caveat. In particular, for *call terminators*, the
/// effect of storing the destination is not included, since that only takes
/// effect on the **success** edge (and not the unwind edge).
trans: Vec<GenKillSet<E>>,
}
impl<E: Idx> AllSets<E> {
pub fn bits_per_block(&self) -> usize {
self.bits_per_block
}
pub fn get_mut(&mut self, block_idx: usize) -> (&mut BitSet<E>, &mut GenKillSet<E>) {
(&mut self.on_entry[block_idx], &mut self.trans[block_idx])
}
pub fn trans_for(&self, block_idx: usize) -> &GenKillSet<E> {
&self.trans[block_idx]
}
pub fn trans_mut_for(&mut self, block_idx: usize) -> &mut GenKillSet<E> {
&mut self.trans[block_idx]
}
pub fn entry_set_for(&self, block_idx: usize) -> &BitSet<E> {
&self.on_entry[block_idx]
}
pub fn entry_set_mut_for(&mut self, block_idx: usize) -> &mut BitSet<E> {
&mut self.on_entry[block_idx]
}
pub fn gen_set_for(&self, block_idx: usize) -> &HybridBitSet<E> {
&self.trans_for(block_idx).gen_set
}
pub fn kill_set_for(&self, block_idx: usize) -> &HybridBitSet<E> {
&self.trans_for(block_idx).kill_set
}
}
/// Parameterization for the precise form of data flow that is used.
///
/// `BottomValue` determines whether the initial entry set for each basic block is empty or full.
/// This also determines the semantics of the lattice `join` operator used to merge dataflow
/// results, since dataflow works by starting at the bottom and moving monotonically to a fixed
/// point.
///
/// This means, for propagation across the graph, that you either want to start at all-zeroes and
/// then use Union as your merge when propagating, or you start at all-ones and then use Intersect
/// as your merge when propagating.
pub trait BottomValue {
/// Specifies the initial value for each bit in the entry set for each basic block.
const BOTTOM_VALUE: bool;
/// Merges `in_set` into `inout_set`, returning `true` if `inout_set` changed.
///
/// It is almost certainly wrong to override this, since it automatically applies
/// * `inout_set & in_set` if `BOTTOM_VALUE == true`
/// * `inout_set | in_set` if `BOTTOM_VALUE == false`
///
/// This means that if a bit is not `BOTTOM_VALUE`, it is propagated into all target blocks.
/// For clarity, the above statement again from a different perspective:
/// A bit in the block's entry set is `!BOTTOM_VALUE` if *any* predecessor block's bit value is
/// `!BOTTOM_VALUE`.
///
/// There are situations where you want the opposite behaviour: propagate only if *all*
/// predecessor blocks's value is `!BOTTOM_VALUE`.
/// E.g. if you want to know whether a bit is *definitely* set at a specific location. This
/// means that all code paths leading to the location must have set the bit, instead of any
/// code path leading there.
///
/// If you want this kind of "definitely set" analysis, you need to
/// 1. Invert `BOTTOM_VALUE`
/// 2. Reset the `entry_set` in `start_block_effect` to `!BOTTOM_VALUE`
/// 3. Override `join` to do the opposite from what it's doing now.
#[inline]
fn join<T: Idx>(&self, inout_set: &mut BitSet<T>, in_set: &BitSet<T>) -> bool {
if !Self::BOTTOM_VALUE { inout_set.union(in_set) } else { inout_set.intersect(in_set) }
}
}
/// A specific flavor of dataflow analysis.
///
/// To run a dataflow analysis, one sets up an initial state for the
/// `START_BLOCK` via `start_block_effect` and a transfer function (`trans`)
/// for each block individually. The entry set for all other basic blocks is
/// initialized to `Self::BOTTOM_VALUE`. The dataflow analysis then
/// iteratively modifies the various entry sets (but leaves the the transfer
/// function unchanged). `BottomValue::join` is used to merge the bitsets from
/// two blocks (e.g. when two blocks' terminator jumps to a single block, that
/// target block's state is the merged state of both incoming blocks).
pub trait BitDenotation<'tcx>: BottomValue {
/// Specifies what index type is used to access the bitvector.
type Idx: Idx;
/// A name describing the dataflow analysis that this
/// `BitDenotation` is supporting. The name should be something
/// suitable for plugging in as part of a filename (i.e., avoid
/// space-characters or other things that tend to look bad on a
/// file system, like slashes or periods). It is also better for
/// the name to be reasonably short, again because it will be
/// plugged into a filename.
fn name() -> &'static str;
/// Size of each bitvector allocated for each block in the analysis.
fn bits_per_block(&self) -> usize;
/// Mutates the entry set according to the effects that
/// have been established *prior* to entering the start
/// block. This can't access the gen/kill sets, because
/// these won't be accounted for correctly.
///
/// (For example, establishing the call arguments.)
fn start_block_effect(&self, entry_set: &mut BitSet<Self::Idx>);
/// Similar to `statement_effect`, except it applies
/// *just before* the statement rather than *just after* it.
///
/// This matters for "dataflow at location" APIs, because the
/// before-statement effect is visible while visiting the
/// statement, while the after-statement effect only becomes
/// visible at the next statement.
///
/// Both the before-statement and after-statement effects are
/// applied, in that order, before moving for the next
/// statement.
fn before_statement_effect(&self, _trans: &mut GenKillSet<Self::Idx>, _location: Location) {}
/// Mutates the block-sets (the flow sets for the given
/// basic block) according to the effects of evaluating statement.
///
/// This is used, in particular, for building up the
/// "transfer-function" representing the overall-effect of the
/// block, represented via GEN and KILL sets.
///
/// The statement is identified as `bb_data[idx_stmt]`, where
/// `bb_data` is the sequence of statements identified by `bb` in
/// the MIR.
fn statement_effect(&self, trans: &mut GenKillSet<Self::Idx>, location: Location);
/// Similar to `terminator_effect`, except it applies
/// *just before* the terminator rather than *just after* it.
///
/// This matters for "dataflow at location" APIs, because the
/// before-terminator effect is visible while visiting the
/// terminator, while the after-terminator effect only becomes
/// visible at the terminator's successors.
///
/// Both the before-terminator and after-terminator effects are
/// applied, in that order, before moving for the next
/// terminator.
fn before_terminator_effect(&self, _trans: &mut GenKillSet<Self::Idx>, _location: Location) {}
/// Mutates the block-sets (the flow sets for the given
/// basic block) according to the effects of evaluating
/// the terminator.
///
/// This is used, in particular, for building up the
/// "transfer-function" representing the overall-effect of the
/// block, represented via GEN and KILL sets.
///
/// The effects applied here cannot depend on which branch the
/// terminator took.
fn terminator_effect(&self, trans: &mut GenKillSet<Self::Idx>, location: Location);
/// Mutates the block-sets according to the (flow-dependent)
/// effect of a successful return from a Call terminator.
///
/// If basic-block BB_x ends with a call-instruction that, upon
/// successful return, flows to BB_y, then this method will be
/// called on the exit flow-state of BB_x in order to set up the
/// entry flow-state of BB_y.
///
/// This is used, in particular, as a special case during the
/// "propagate" loop where all of the basic blocks are repeatedly
/// visited. Since the effects of a Call terminator are
/// flow-dependent, the current MIR cannot encode them via just
/// GEN and KILL sets attached to the block, and so instead we add
/// this extra machinery to represent the flow-dependent effect.
//
// FIXME: right now this is a bit of a wart in the API. It might
// be better to represent this as an additional gen- and
// kill-sets associated with each edge coming out of the basic
// block.
fn propagate_call_return(
&self,
in_out: &mut BitSet<Self::Idx>,
call_bb: mir::BasicBlock,
dest_bb: mir::BasicBlock,
dest_place: &mir::Place<'tcx>,
);
}
impl<'a, 'tcx, D> DataflowAnalysis<'a, 'tcx, D>
where
D: BitDenotation<'tcx>,
{
pub fn new(
body: &'a Body<'tcx>,
dead_unwinds: &'a BitSet<mir::BasicBlock>,
denotation: D,
) -> Self {
let bits_per_block = denotation.bits_per_block();
let num_blocks = body.basic_blocks().len();
let on_entry = if D::BOTTOM_VALUE {
vec![BitSet::new_filled(bits_per_block); num_blocks]
} else {
vec![BitSet::new_empty(bits_per_block); num_blocks]
};
let nop = GenKill::from_elem(HybridBitSet::new_empty(bits_per_block));
DataflowAnalysis {
body,
dead_unwinds,
flow_state: DataflowState {
sets: AllSets { bits_per_block, on_entry, trans: vec![nop; num_blocks] },
operator: denotation,
},
}
}
}
impl<'a, 'tcx, D> DataflowAnalysis<'a, 'tcx, D>
where
D: BitDenotation<'tcx>,
{
/// Propagates the bits of `in_out` into all the successors of `bb`,
/// using bitwise operator denoted by `self.operator`.
///
/// For most blocks, this is entirely uniform. However, for blocks
/// that end with a call terminator, the effect of the call on the
/// dataflow state may depend on whether the call returned
/// successfully or unwound.
///
/// To reflect this, the `propagate_call_return` method of the
/// `BitDenotation` mutates `in_out` when propagating `in_out` via
/// a call terminator; such mutation is performed *last*, to
/// ensure its side-effects do not leak elsewhere (e.g., into
/// unwind target).
fn propagate_bits_into_graph_successors_of(
&mut self,
in_out: &mut BitSet<D::Idx>,
(bb, bb_data): (mir::BasicBlock, &mir::BasicBlockData<'tcx>),
dirty_list: &mut WorkQueue<mir::BasicBlock>,
) {
match bb_data.terminator().kind {
mir::TerminatorKind::Return
| mir::TerminatorKind::Resume
| mir::TerminatorKind::Abort
| mir::TerminatorKind::GeneratorDrop
| mir::TerminatorKind::Unreachable => {}
mir::TerminatorKind::Goto { target }
| mir::TerminatorKind::Assert { target, cleanup: None, .. }
| mir::TerminatorKind::Yield { resume: target, drop: None, .. }
| mir::TerminatorKind::Drop { target, location: _, unwind: None }
| mir::TerminatorKind::DropAndReplace { target, value: _, location: _, unwind: None } =>
{
self.propagate_bits_into_entry_set_for(in_out, target, dirty_list);
}
mir::TerminatorKind::Yield { resume: target, drop: Some(drop), .. } => {
self.propagate_bits_into_entry_set_for(in_out, target, dirty_list);
self.propagate_bits_into_entry_set_for(in_out, drop, dirty_list);
}
mir::TerminatorKind::Assert { target, cleanup: Some(unwind), .. }
| mir::TerminatorKind::Drop { target, location: _, unwind: Some(unwind) }
| mir::TerminatorKind::DropAndReplace {
target,
value: _,
location: _,
unwind: Some(unwind),
} => {
self.propagate_bits_into_entry_set_for(in_out, target, dirty_list);
if !self.dead_unwinds.contains(bb) {
self.propagate_bits_into_entry_set_for(in_out, unwind, dirty_list);
}
}
mir::TerminatorKind::SwitchInt { ref targets, .. } => {
for target in targets {
self.propagate_bits_into_entry_set_for(in_out, *target, dirty_list);
}
}
mir::TerminatorKind::Call { cleanup, ref destination, .. } => {
if let Some(unwind) = cleanup {
if !self.dead_unwinds.contains(bb) {
self.propagate_bits_into_entry_set_for(in_out, unwind, dirty_list);
}
}
if let Some((ref dest_place, dest_bb)) = *destination {
// N.B.: This must be done *last*, after all other
// propagation, as documented in comment above.
self.flow_state.operator.propagate_call_return(in_out, bb, dest_bb, dest_place);
self.propagate_bits_into_entry_set_for(in_out, dest_bb, dirty_list);
}
}
mir::TerminatorKind::FalseEdges { real_target, imaginary_target } => {
self.propagate_bits_into_entry_set_for(in_out, real_target, dirty_list);
self.propagate_bits_into_entry_set_for(in_out, imaginary_target, dirty_list);
}
mir::TerminatorKind::FalseUnwind { real_target, unwind } => {
self.propagate_bits_into_entry_set_for(in_out, real_target, dirty_list);
if let Some(unwind) = unwind {
if !self.dead_unwinds.contains(bb) {
self.propagate_bits_into_entry_set_for(in_out, unwind, dirty_list);
}
}
}
}
}
fn propagate_bits_into_entry_set_for(
&mut self,
in_out: &BitSet<D::Idx>,
bb: mir::BasicBlock,
dirty_queue: &mut WorkQueue<mir::BasicBlock>,
) {
let entry_set = self.flow_state.sets.entry_set_mut_for(bb.index());
let set_changed = self.flow_state.operator.join(entry_set, &in_out);
if set_changed {
dirty_queue.insert(bb);
}
}
}

View file

@ -9,7 +9,7 @@ use rustc_index::bit_set::BitSet;
use std::marker::PhantomData;
use super::{qualifs, Item, Qualif};
use crate::dataflow::{self as old_dataflow, generic as dataflow};
use crate::dataflow;
/// A `Visitor` that propagates qualifs between locals. This defines the transfer function of
/// `FlowSensitiveAnalysis`.
@ -165,7 +165,7 @@ where
}
}
impl<Q> old_dataflow::BottomValue for FlowSensitiveAnalysis<'_, '_, '_, Q> {
impl<Q> dataflow::BottomValue for FlowSensitiveAnalysis<'_, '_, '_, Q> {
const BOTTOM_VALUE: bool = false;
}

View file

@ -22,8 +22,8 @@ use super::qualifs::{self, HasMutInterior, NeedsDrop};
use super::resolver::FlowSensitiveAnalysis;
use super::{is_lang_panic_fn, ConstKind, Item, Qualif};
use crate::const_eval::{is_const_fn, is_unstable_const_fn};
use crate::dataflow::generic::{self as dataflow, Analysis};
use crate::dataflow::MaybeMutBorrowedLocals;
use crate::dataflow::{self, Analysis};
// We are using `MaybeMutBorrowedLocals` as a proxy for whether an item may have been mutated
// through a pointer prior to the given point. This is okay even though `MaybeMutBorrowedLocals`

View file

@ -1,9 +1,9 @@
use crate::dataflow;
use crate::dataflow::generic::{Analysis, ResultsCursor};
use crate::dataflow::move_paths::{LookupResult, MoveData, MovePathIndex};
use crate::dataflow::on_lookup_result_bits;
use crate::dataflow::MoveDataParamEnv;
use crate::dataflow::{on_all_children_bits, on_all_drop_children_bits};
use crate::dataflow::{Analysis, ResultsCursor};
use crate::dataflow::{MaybeInitializedPlaces, MaybeUninitializedPlaces};
use crate::transform::{MirPass, MirSource};
use crate::util::elaborate_drops::{elaborate_drop, DropFlagState, Unwind};

View file

@ -49,7 +49,7 @@
//! For generators with state 1 (returned) and state 2 (poisoned) it does nothing.
//! Otherwise it drops all the values in scope at the last suspension point.
use crate::dataflow::generic::{self as dataflow, Analysis};
use crate::dataflow::{self, Analysis};
use crate::dataflow::{MaybeBorrowedLocals, MaybeRequiresStorage, MaybeStorageLive};
use crate::transform::no_landing_pads::no_landing_pads;
use crate::transform::simplify;

View file

@ -9,11 +9,11 @@ use rustc::ty::{self, Ty, TyCtxt};
use rustc_hir::def_id::DefId;
use rustc_index::bit_set::BitSet;
use crate::dataflow::generic::{Analysis, Results, ResultsCursor};
use crate::dataflow::move_paths::{HasMoveData, MoveData};
use crate::dataflow::move_paths::{LookupResult, MovePathIndex};
use crate::dataflow::MaybeMutBorrowedLocals;
use crate::dataflow::MoveDataParamEnv;
use crate::dataflow::{Analysis, Results, ResultsCursor};
use crate::dataflow::{
DefinitelyInitializedPlaces, MaybeInitializedPlaces, MaybeUninitializedPlaces,
};