Auto merge of #64736 - Nashenas88:mir_predecessors_cache_cleanup, r=oli-obk

Remove interior mutability in mir predecessors cache
This commit is contained in:
bors 2019-12-02 14:51:02 +00:00
commit fdc0011561
74 changed files with 944 additions and 592 deletions

View file

@ -23,17 +23,17 @@ macro_rules! arena_types {
[] generics: rustc::ty::Generics,
[] trait_def: rustc::ty::TraitDef,
[] adt_def: rustc::ty::AdtDef,
[] steal_mir: rustc::ty::steal::Steal<rustc::mir::Body<$tcx>>,
[] mir: rustc::mir::Body<$tcx>,
[] steal_mir: rustc::ty::steal::Steal<rustc::mir::BodyCache<$tcx>>,
[] mir: rustc::mir::BodyCache<$tcx>,
[] steal_promoted: rustc::ty::steal::Steal<
rustc_index::vec::IndexVec<
rustc::mir::Promoted,
rustc::mir::Body<$tcx>
rustc::mir::BodyCache<$tcx>
>
>,
[] promoted: rustc_index::vec::IndexVec<
rustc::mir::Promoted,
rustc::mir::Body<$tcx>
rustc::mir::BodyCache<$tcx>
>,
[] tables: rustc::ty::TypeckTables<$tcx>,
[] const_allocs: rustc::mir::interpret::Allocation,

View file

@ -1,16 +1,19 @@
use rustc_index::vec::IndexVec;
use rustc_data_structures::sync::{RwLock, MappedReadGuard, ReadGuard};
use rustc_data_structures::stable_hasher::{HashStable, StableHasher};
use rustc_serialize::{Encodable, Encoder, Decodable, Decoder};
use crate::ich::StableHashingContext;
use crate::mir::{Body, BasicBlock};
use crate::mir::{BasicBlock, BasicBlockData, Body, LocalDecls, Location, Successors};
use rustc_data_structures::graph::{self, GraphPredecessors, GraphSuccessors};
use rustc_data_structures::graph::dominators::{dominators, Dominators};
use std::iter;
use std::ops::{Deref, DerefMut, Index, IndexMut};
use std::vec::IntoIter;
#[derive(Clone, Debug)]
pub struct Cache {
predecessors: RwLock<Option<IndexVec<BasicBlock, Vec<BasicBlock>>>>
predecessors: Option<IndexVec<BasicBlock, Vec<BasicBlock>>>,
}
impl rustc_serialize::Encodable for Cache {
fn encode<S: Encoder>(&self, s: &mut S) -> Result<(), S::Error> {
Encodable::encode(&(), s)
@ -31,39 +34,264 @@ impl<'a> HashStable<StableHashingContext<'a>> for Cache {
impl Cache {
pub fn new() -> Self {
Cache {
predecessors: RwLock::new(None)
Self {
predecessors: None,
}
}
pub fn invalidate(&self) {
pub fn invalidate_predecessors(&mut self) {
// FIXME: consider being more fine-grained
*self.predecessors.borrow_mut() = None;
self.predecessors = None;
}
pub fn predecessors(
&self,
body: &Body<'_>
) -> MappedReadGuard<'_, IndexVec<BasicBlock, Vec<BasicBlock>>> {
if self.predecessors.borrow().is_none() {
*self.predecessors.borrow_mut() = Some(calculate_predecessors(body));
}
pub fn ensure_predecessors(&mut self, body: &Body<'_>) {
if self.predecessors.is_none() {
let mut result = IndexVec::from_elem(vec![], body.basic_blocks());
for (bb, data) in body.basic_blocks().iter_enumerated() {
if let Some(ref term) = data.terminator {
for &tgt in term.successors() {
result[tgt].push(bb);
}
}
}
ReadGuard::map(self.predecessors.borrow(), |p| p.as_ref().unwrap())
self.predecessors = Some(result)
}
}
/// This will recompute the predecessors cache if it is not available
fn predecessors(&mut self, body: &Body<'_>) -> &IndexVec<BasicBlock, Vec<BasicBlock>> {
self.ensure_predecessors(body);
self.predecessors.as_ref().unwrap()
}
fn unwrap_predecessors_for(&self, bb: BasicBlock) -> &[BasicBlock] {
&self.predecessors.as_ref().unwrap()[bb]
}
fn unwrap_predecessor_locations<'a>(
&'a self,
loc: Location,
body: &'a Body<'a>
) -> impl Iterator<Item = Location> + 'a {
let if_zero_locations = if loc.statement_index == 0 {
let predecessor_blocks = self.unwrap_predecessors_for(loc.block);
let num_predecessor_blocks = predecessor_blocks.len();
Some(
(0..num_predecessor_blocks)
.map(move |i| predecessor_blocks[i])
.map(move |bb| body.terminator_loc(bb)),
)
} else {
None
};
let if_not_zero_locations = if loc.statement_index == 0 {
None
} else {
Some(Location { block: loc.block, statement_index: loc.statement_index - 1 })
};
if_zero_locations.into_iter().flatten().chain(if_not_zero_locations)
}
pub fn basic_blocks_mut<'a, 'tcx>(
&mut self,
body: &'a mut Body<'tcx>
) -> &'a mut IndexVec<BasicBlock, BasicBlockData<'tcx>> {
debug!("bbm: Clearing predecessors cache for body at: {:?}", body.span.data());
self.invalidate_predecessors();
&mut body.basic_blocks
}
pub fn basic_blocks_and_local_decls_mut<'a, 'tcx>(
&mut self,
body: &'a mut Body<'tcx>
) -> (&'a mut IndexVec<BasicBlock, BasicBlockData<'tcx>>, &'a mut LocalDecls<'tcx>) {
debug!("bbaldm: Clearing predecessors cache for body at: {:?}", body.span.data());
self.invalidate_predecessors();
(&mut body.basic_blocks, &mut body.local_decls)
}
}
fn calculate_predecessors(body: &Body<'_>) -> IndexVec<BasicBlock, Vec<BasicBlock>> {
let mut result = IndexVec::from_elem(vec![], body.basic_blocks());
for (bb, data) in body.basic_blocks().iter_enumerated() {
if let Some(ref term) = data.terminator {
for &tgt in term.successors() {
result[tgt].push(bb);
}
#[derive(Clone, Debug, HashStable, RustcEncodable, RustcDecodable, TypeFoldable)]
pub struct BodyCache<'tcx> {
cache: Cache,
body: Body<'tcx>,
}
impl BodyCache<'tcx> {
pub fn new(body: Body<'tcx>) -> Self {
Self {
cache: Cache::new(),
body,
}
}
}
#[macro_export]
macro_rules! read_only {
($body:expr) => {
{
$body.ensure_predecessors();
$body.unwrap_read_only()
}
};
}
impl BodyCache<'tcx> {
pub fn ensure_predecessors(&mut self) {
self.cache.ensure_predecessors(&self.body);
}
pub fn predecessors(&mut self) -> &IndexVec<BasicBlock, Vec<BasicBlock>> {
self.cache.predecessors(&self.body)
}
pub fn unwrap_read_only(&self) -> ReadOnlyBodyCache<'_, 'tcx> {
ReadOnlyBodyCache::new(&self.cache, &self.body)
}
pub fn basic_blocks_mut(&mut self) -> &mut IndexVec<BasicBlock, BasicBlockData<'tcx>> {
self.cache.basic_blocks_mut(&mut self.body)
}
pub fn basic_blocks_and_local_decls_mut(
&mut self
) -> (&mut IndexVec<BasicBlock, BasicBlockData<'tcx>>, &mut LocalDecls<'tcx>) {
self.cache.basic_blocks_and_local_decls_mut(&mut self.body)
}
}
impl<'tcx> Index<BasicBlock> for BodyCache<'tcx> {
type Output = BasicBlockData<'tcx>;
fn index(&self, index: BasicBlock) -> &BasicBlockData<'tcx> {
&self.body[index]
}
}
impl<'tcx> IndexMut<BasicBlock> for BodyCache<'tcx> {
fn index_mut(&mut self, index: BasicBlock) -> &mut Self::Output {
&mut self.basic_blocks_mut()[index]
}
}
impl<'tcx> Deref for BodyCache<'tcx> {
type Target = Body<'tcx>;
fn deref(&self) -> &Self::Target {
&self.body
}
}
impl<'tcx> DerefMut for BodyCache<'tcx> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.body
}
}
#[derive(Copy, Clone, Debug)]
pub struct ReadOnlyBodyCache<'a, 'tcx> {
cache: &'a Cache,
body: &'a Body<'tcx>,
}
impl ReadOnlyBodyCache<'a, 'tcx> {
fn new(cache: &'a Cache, body: &'a Body<'tcx>) -> Self {
assert!(
cache.predecessors.is_some(),
"Cannot construct ReadOnlyBodyCache without computed predecessors");
Self {
cache,
body,
}
}
result
pub fn predecessors(&self) -> &IndexVec<BasicBlock, Vec<BasicBlock>> {
self.cache.predecessors.as_ref().unwrap()
}
pub fn predecessors_for(&self, bb: BasicBlock) -> &[BasicBlock] {
self.cache.unwrap_predecessors_for(bb)
}
pub fn predecessor_locations(&self, loc: Location) -> impl Iterator<Item = Location> + '_ {
self.cache.unwrap_predecessor_locations(loc, self.body)
}
pub fn body(&self) -> &'a Body<'tcx> {
self.body
}
pub fn basic_blocks(&self) -> &IndexVec<BasicBlock, BasicBlockData<'tcx>> {
&self.body.basic_blocks
}
pub fn dominators(&self) -> Dominators<BasicBlock> {
dominators(self)
}
}
impl graph::DirectedGraph for ReadOnlyBodyCache<'a, 'tcx> {
type Node = BasicBlock;
}
impl graph::GraphPredecessors<'graph> for ReadOnlyBodyCache<'a, 'tcx> {
type Item = BasicBlock;
type Iter = IntoIter<BasicBlock>;
}
impl graph::WithPredecessors for ReadOnlyBodyCache<'a, 'tcx> {
fn predecessors(
&self,
node: Self::Node,
) -> <Self as GraphPredecessors<'_>>::Iter {
self.cache.unwrap_predecessors_for(node).to_vec().into_iter()
}
}
impl graph::WithNumNodes for ReadOnlyBodyCache<'a, 'tcx> {
fn num_nodes(&self) -> usize {
self.body.num_nodes()
}
}
impl graph::WithStartNode for ReadOnlyBodyCache<'a, 'tcx> {
fn start_node(&self) -> Self::Node {
self.body.start_node()
}
}
impl graph::WithSuccessors for ReadOnlyBodyCache<'a, 'tcx> {
fn successors(
&self,
node: Self::Node,
) -> <Self as GraphSuccessors<'_>>::Iter {
self.body.successors(node)
}
}
impl<'a, 'b, 'tcx> graph::GraphSuccessors<'b> for ReadOnlyBodyCache<'a, 'tcx> {
type Item = BasicBlock;
type Iter = iter::Cloned<Successors<'b>>;
}
impl Deref for ReadOnlyBodyCache<'a, 'tcx> {
type Target = Body<'tcx>;
fn deref(&self) -> &Self::Target {
self.body
}
}
impl Index<BasicBlock> for ReadOnlyBodyCache<'a, 'tcx> {
type Output = BasicBlockData<'tcx>;
fn index(&self, index: BasicBlock) -> &BasicBlockData<'tcx> {
&self.body[index]
}
}
CloneTypeFoldableAndLiftImpls! {

View file

@ -21,25 +21,25 @@ use crate::ty::{
use polonius_engine::Atom;
use rustc_index::bit_set::BitMatrix;
use rustc_data_structures::fx::FxHashSet;
use rustc_data_structures::graph::dominators::{dominators, Dominators};
use rustc_data_structures::graph::{self, GraphPredecessors, GraphSuccessors};
use rustc_data_structures::graph::dominators::Dominators;
use rustc_data_structures::graph::{self, GraphSuccessors};
use rustc_index::vec::{Idx, IndexVec};
use rustc_data_structures::sync::Lrc;
use rustc_data_structures::sync::MappedReadGuard;
use rustc_macros::HashStable;
use rustc_serialize::{Encodable, Decodable};
use smallvec::SmallVec;
use std::borrow::Cow;
use std::fmt::{self, Debug, Display, Formatter, Write};
use std::ops::{Index, IndexMut};
use std::ops::Index;
use std::slice;
use std::vec::IntoIter;
use std::{iter, mem, option, u32};
use syntax::ast::Name;
use syntax::symbol::Symbol;
use syntax_pos::{Span, DUMMY_SP};
pub use crate::mir::interpret::AssertMessage;
pub use crate::mir::cache::{BodyCache, ReadOnlyBodyCache};
pub use crate::read_only;
mod cache;
pub mod interpret;
@ -108,7 +108,7 @@ pub struct Body<'tcx> {
pub yield_ty: Option<Ty<'tcx>>,
/// Generator drop glue.
pub generator_drop: Option<Box<Body<'tcx>>>,
pub generator_drop: Option<Box<BodyCache<'tcx>>>,
/// The layout of a generator. Produced by the state transformation.
pub generator_layout: Option<GeneratorLayout<'tcx>>,
@ -154,9 +154,6 @@ pub struct Body<'tcx> {
/// A span representing this MIR, for error reporting.
pub span: Span,
/// A cache for various calculations.
cache: cache::Cache,
}
impl<'tcx> Body<'tcx> {
@ -193,7 +190,6 @@ impl<'tcx> Body<'tcx> {
spread_arg: None,
var_debug_info,
span,
cache: cache::Cache::new(),
control_flow_destroyed,
}
}
@ -203,58 +199,6 @@ impl<'tcx> Body<'tcx> {
&self.basic_blocks
}
#[inline]
pub fn basic_blocks_mut(&mut self) -> &mut IndexVec<BasicBlock, BasicBlockData<'tcx>> {
self.cache.invalidate();
&mut self.basic_blocks
}
#[inline]
pub fn basic_blocks_and_local_decls_mut(
&mut self,
) -> (&mut IndexVec<BasicBlock, BasicBlockData<'tcx>>, &mut LocalDecls<'tcx>) {
self.cache.invalidate();
(&mut self.basic_blocks, &mut self.local_decls)
}
#[inline]
pub fn predecessors(&self) -> MappedReadGuard<'_, IndexVec<BasicBlock, Vec<BasicBlock>>> {
self.cache.predecessors(self)
}
#[inline]
pub fn predecessors_for(&self, bb: BasicBlock) -> MappedReadGuard<'_, Vec<BasicBlock>> {
MappedReadGuard::map(self.predecessors(), |p| &p[bb])
}
#[inline]
pub fn predecessor_locations(&self, loc: Location) -> impl Iterator<Item = Location> + '_ {
let if_zero_locations = if loc.statement_index == 0 {
let predecessor_blocks = self.predecessors_for(loc.block);
let num_predecessor_blocks = predecessor_blocks.len();
Some(
(0..num_predecessor_blocks)
.map(move |i| predecessor_blocks[i])
.map(move |bb| self.terminator_loc(bb)),
)
} else {
None
};
let if_not_zero_locations = if loc.statement_index == 0 {
None
} else {
Some(Location { block: loc.block, statement_index: loc.statement_index - 1 })
};
if_zero_locations.into_iter().flatten().chain(if_not_zero_locations)
}
#[inline]
pub fn dominators(&self) -> Dominators<BasicBlock> {
dominators(self)
}
/// Returns `true` if a cycle exists in the control-flow graph that is reachable from the
/// `START_BLOCK`.
pub fn is_cfg_cyclic(&self) -> bool {
@ -355,7 +299,7 @@ impl<'tcx> Body<'tcx> {
/// Changes a statement to a nop. This is both faster than deleting instructions and avoids
/// invalidating statement indices in `Location`s.
pub fn make_statement_nop(&mut self, location: Location) {
let block = &mut self[location.block];
let block = &mut self.basic_blocks[location.block];
debug_assert!(location.statement_index < block.statements.len());
block.statements[location.statement_index].make_nop()
}
@ -415,13 +359,6 @@ impl<'tcx> Index<BasicBlock> for Body<'tcx> {
}
}
impl<'tcx> IndexMut<BasicBlock> for Body<'tcx> {
#[inline]
fn index_mut(&mut self, index: BasicBlock) -> &mut BasicBlockData<'tcx> {
&mut self.basic_blocks_mut()[index]
}
}
#[derive(Copy, Clone, Debug, HashStable, TypeFoldable)]
pub enum ClearCrossCrate<T> {
Clear,
@ -2618,15 +2555,6 @@ impl<'tcx> graph::WithStartNode for Body<'tcx> {
}
}
impl<'tcx> graph::WithPredecessors for Body<'tcx> {
fn predecessors(
&self,
node: Self::Node,
) -> <Self as GraphPredecessors<'_>>::Iter {
self.predecessors_for(node).clone().into_iter()
}
}
impl<'tcx> graph::WithSuccessors for Body<'tcx> {
fn successors(
&self,
@ -2636,11 +2564,6 @@ impl<'tcx> graph::WithSuccessors for Body<'tcx> {
}
}
impl<'a, 'b> graph::GraphPredecessors<'b> for Body<'a> {
type Item = BasicBlock;
type Iter = IntoIter<BasicBlock>;
}
impl<'a, 'b> graph::GraphSuccessors<'b> for Body<'a> {
type Item = BasicBlock;
type Iter = iter::Cloned<Successors<'b>>;
@ -2675,7 +2598,11 @@ impl Location {
}
/// Returns `true` if `other` is earlier in the control flow graph than `self`.
pub fn is_predecessor_of<'tcx>(&self, other: Location, body: &Body<'tcx>) -> bool {
pub fn is_predecessor_of<'tcx>(
&self,
other: Location,
body: ReadOnlyBodyCache<'_, 'tcx>
) -> bool {
// If we are in the same block as the other location and are an earlier statement
// then we are a predecessor of `other`.
if self.block == other.block && self.statement_index < other.statement_index {
@ -2683,13 +2610,13 @@ impl Location {
}
// If we're in another block, then we want to check that block is a predecessor of `other`.
let mut queue: Vec<BasicBlock> = body.predecessors_for(other.block).clone();
let mut queue: Vec<BasicBlock> = body.predecessors_for(other.block).to_vec();
let mut visited = FxHashSet::default();
while let Some(block) = queue.pop() {
// If we haven't visited this block before, then make sure we visit it's predecessors.
if visited.insert(block) {
queue.append(&mut body.predecessors_for(block).clone());
queue.extend(body.predecessors_for(block).iter().cloned());
} else {
continue;
}

View file

@ -65,13 +65,25 @@ use syntax_pos::Span;
// variant argument) that does not require visiting, as in
// `is_cleanup` above.
macro_rules! body_cache_type {
(mut $a:lifetime, $tcx:lifetime) => {
&mut BodyCache<$tcx>
};
($a:lifetime, $tcx:lifetime) => {
ReadOnlyBodyCache<$a, $tcx>
};
}
macro_rules! make_mir_visitor {
($visitor_trait_name:ident, $($mutability:ident)?) => {
pub trait $visitor_trait_name<'tcx> {
// Override these, and call `self.super_xxx` to revert back to the
// default behavior.
fn visit_body(&mut self, body: & $($mutability)? Body<'tcx>) {
fn visit_body(
&mut self,
body: body_cache_type!($($mutability)? '_, 'tcx)
) {
self.super_body(body);
}
@ -240,11 +252,14 @@ macro_rules! make_mir_visitor {
// The `super_xxx` methods comprise the default behavior and are
// not meant to be overridden.
fn super_body(&mut self,
body: & $($mutability)? Body<'tcx>) {
fn super_body(
&mut self,
$($mutability)? body: body_cache_type!($($mutability)? '_, 'tcx)
) {
let span = body.span;
if let Some(yield_ty) = &$($mutability)? body.yield_ty {
self.visit_ty(yield_ty, TyContext::YieldTy(SourceInfo {
span: body.span,
span,
scope: OUTERMOST_SOURCE_SCOPE,
}));
}
@ -260,6 +275,7 @@ macro_rules! make_mir_visitor {
self.visit_basic_block_data(bb, data);
}
let body: & $($mutability)? Body<'_> = & $($mutability)? body;
for scope in &$($mutability)? body.source_scopes {
self.visit_source_scope_data(scope);
}
@ -790,7 +806,11 @@ macro_rules! make_mir_visitor {
// Convenience methods
fn visit_location(&mut self, body: & $($mutability)? Body<'tcx>, location: Location) {
fn visit_location(
&mut self,
body: body_cache_type!($($mutability)? '_, 'tcx),
location: Location
) {
let basic_block = & $($mutability)? body[location.block];
if basic_block.statements.len() == location.statement_index {
if let Some(ref $($mutability)? terminator) = basic_block.terminator {

View file

@ -106,44 +106,54 @@ rustc_queries! {
/// Fetch the MIR for a given `DefId` right after it's built - this includes
/// unreachable code.
query mir_built(_: DefId) -> &'tcx Steal<mir::Body<'tcx>> {}
query mir_built(_: DefId) -> &'tcx Steal<mir::BodyCache<'tcx>> {}
/// Fetch the MIR for a given `DefId` up till the point where it is
/// ready for const evaluation.
///
/// See the README for the `mir` module for details.
query mir_const(_: DefId) -> &'tcx Steal<mir::Body<'tcx>> {
query mir_const(_: DefId) -> &'tcx Steal<mir::BodyCache<'tcx>> {
no_hash
}
query mir_validated(_: DefId) ->
(
&'tcx Steal<mir::Body<'tcx>>,
&'tcx Steal<IndexVec<mir::Promoted, mir::Body<'tcx>>>
&'tcx Steal<mir::BodyCache<'tcx>>,
&'tcx Steal<IndexVec<mir::Promoted, mir::BodyCache<'tcx>>>
) {
no_hash
}
/// MIR after our optimization passes have run. This is MIR that is ready
/// for codegen. This is also the only query that can fetch non-local MIR, at present.
query optimized_mir(key: DefId) -> &'tcx mir::Body<'tcx> {
query optimized_mir(key: DefId) -> &'tcx mir::BodyCache<'tcx> {
cache_on_disk_if { key.is_local() }
load_cached(tcx, id) {
let mir: Option<crate::mir::Body<'tcx>> = tcx.queries.on_disk_cache
.try_load_query_result(tcx, id);
mir.map(|x| &*tcx.arena.alloc(x))
let mir: Option<crate::mir::BodyCache<'tcx>>
= tcx.queries.on_disk_cache.try_load_query_result(tcx, id);
mir.map(|x| {
let cache = tcx.arena.alloc(x);
cache.ensure_predecessors();
&*cache
})
}
}
query promoted_mir(key: DefId) -> &'tcx IndexVec<mir::Promoted, mir::Body<'tcx>> {
query promoted_mir(key: DefId) -> &'tcx IndexVec<mir::Promoted, mir::BodyCache<'tcx>> {
cache_on_disk_if { key.is_local() }
load_cached(tcx, id) {
let promoted: Option<
rustc_index::vec::IndexVec<
crate::mir::Promoted,
crate::mir::Body<'tcx>
crate::mir::BodyCache<'tcx>
>> = tcx.queries.on_disk_cache.try_load_query_result(tcx, id);
promoted.map(|p| &*tcx.arena.alloc(p))
promoted.map(|p| {
let cache = tcx.arena.alloc(p);
for body in cache.iter_mut() {
body.ensure_predecessors();
}
&*cache
})
}
}
}
@ -502,7 +512,7 @@ rustc_queries! {
/// in the case of closures, this will be redirected to the enclosing function.
query region_scope_tree(_: DefId) -> &'tcx region::ScopeTree {}
query mir_shims(key: ty::InstanceDef<'tcx>) -> &'tcx mir::Body<'tcx> {
query mir_shims(key: ty::InstanceDef<'tcx>) -> &'tcx mir::BodyCache<'tcx> {
no_force
desc { |tcx| "generating MIR shim for `{}`", tcx.def_path_str(key.def_id()) }
}

View file

@ -22,7 +22,7 @@ use crate::middle::cstore::EncodedMetadata;
use crate::middle::lang_items;
use crate::middle::resolve_lifetime::{self, ObjectLifetimeDefault};
use crate::middle::stability;
use crate::mir::{Body, Field, interpret, Local, Place, PlaceElem, ProjectionKind, Promoted};
use crate::mir::{BodyCache, Field, interpret, Local, Place, PlaceElem, ProjectionKind, Promoted};
use crate::mir::interpret::{ConstValue, Allocation, Scalar};
use crate::ty::subst::{GenericArg, InternalSubsts, SubstsRef, Subst};
use crate::ty::ReprOptions;
@ -1083,17 +1083,17 @@ impl<'tcx> TyCtxt<'tcx> {
&self.hir_map
}
pub fn alloc_steal_mir(self, mir: Body<'tcx>) -> &'tcx Steal<Body<'tcx>> {
pub fn alloc_steal_mir(self, mir: BodyCache<'tcx>) -> &'tcx Steal<BodyCache<'tcx>> {
self.arena.alloc(Steal::new(mir))
}
pub fn alloc_steal_promoted(self, promoted: IndexVec<Promoted, Body<'tcx>>) ->
&'tcx Steal<IndexVec<Promoted, Body<'tcx>>> {
pub fn alloc_steal_promoted(self, promoted: IndexVec<Promoted, BodyCache<'tcx>>) ->
&'tcx Steal<IndexVec<Promoted, BodyCache<'tcx>>> {
self.arena.alloc(Steal::new(promoted))
}
pub fn intern_promoted(self, promoted: IndexVec<Promoted, Body<'tcx>>) ->
&'tcx IndexVec<Promoted, Body<'tcx>> {
pub fn intern_promoted(self, promoted: IndexVec<Promoted, BodyCache<'tcx>>) ->
&'tcx IndexVec<Promoted, BodyCache<'tcx>> {
self.arena.alloc(promoted)
}

View file

@ -18,7 +18,7 @@ use crate::infer::canonical::Canonical;
use crate::middle::cstore::CrateStoreDyn;
use crate::middle::lang_items::{FnTraitLangItem, FnMutTraitLangItem, FnOnceTraitLangItem};
use crate::middle::resolve_lifetime::ObjectLifetimeDefault;
use crate::mir::Body;
use crate::mir::ReadOnlyBodyCache;
use crate::mir::interpret::{GlobalId, ErrorHandled};
use crate::mir::GeneratorLayout;
use crate::session::CrateDisambiguator;
@ -2985,10 +2985,10 @@ impl<'tcx> TyCtxt<'tcx> {
}
/// Returns the possibly-auto-generated MIR of a `(DefId, Subst)` pair.
pub fn instance_mir(self, instance: ty::InstanceDef<'tcx>) -> &'tcx Body<'tcx> {
pub fn instance_mir(self, instance: ty::InstanceDef<'tcx>) -> ReadOnlyBodyCache<'tcx, 'tcx> {
match instance {
ty::InstanceDef::Item(did) => {
self.optimized_mir(did)
self.optimized_mir(did).unwrap_read_only()
}
ty::InstanceDef::VtableShim(..) |
ty::InstanceDef::ReifyShim(..) |
@ -2998,7 +2998,7 @@ impl<'tcx> TyCtxt<'tcx> {
ty::InstanceDef::ClosureOnceShim { .. } |
ty::InstanceDef::DropGlue(..) |
ty::InstanceDef::CloneShim(..) => {
self.mir_shims(instance)
self.mir_shims(instance).unwrap_read_only()
}
}
}