make HR algorithms account for region subtyping

Currently, we consider region subtyping a failure
if a skolemized lifetime is relatable to any
other lifetime in any way at all. But a more precise
formulation is to say that a skolemized lifetime:

- must not have any *incoming* edges in the region graph
- only has *outgoing* edges to nodes that are `'static`

To enforce the latter requirement, we add edges from `'static -> 'x` for
each lifetime '`x' reachable from a skolemized region.

We now have to add a new `pop_skolemized` routine to do cleanup.
Whereas before if there were *any* edges relating to a skolemized
region, we would return `Err` and hence rollback the transaction, we now
tolerate some edges and return `Ok`. Therefore, the `pop_skolemized`
routine runs and cleans up those edges.
This commit is contained in:
Niko Matsakis 2016-04-20 19:51:56 -04:00
parent aecce2ba6e
commit 11984340bf
11 changed files with 639 additions and 168 deletions

View file

@ -1648,4 +1648,5 @@ register_diagnostics! {
E0491, // in type `..`, reference has a longer lifetime than the data it...
E0495, // cannot infer an appropriate lifetime due to conflicting requirements
E0525, // expected a closure that implements `..` but this closure only implements `..`
E0526, // skolemization subtype
}

View file

@ -918,6 +918,17 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> {
"");
err
}
infer::SkolemizeSuccessor(span) => {
let mut err =
struct_span_err!(self.tcx.sess, span, E0526,
"to satisfy higher-ranked bounds, \
a static lifetime is required");
self.tcx.note_and_explain_region(&mut err,
"but the lifetime is only valid for ",
sub,
"");
err
}
}
}
@ -1784,6 +1795,11 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> {
"...so that references are valid when the destructor \
runs");
}
infer::SkolemizeSuccessor(span) => {
err.span_note(
span,
"...so that higher-ranked bounds are satisfied");
}
}
}
}

View file

@ -11,9 +11,16 @@
//! Helper routines for higher-ranked things. See the `doc` module at
//! the end of the file for details.
use super::{CombinedSnapshot, InferCtxt, HigherRankedType, SkolemizationMap};
use super::{CombinedSnapshot,
InferCtxt,
LateBoundRegion,
HigherRankedType,
SubregionOrigin,
SkolemizationMap};
use super::combine::CombineFields;
use super::region_inference::{TaintDirections};
use infer::error_reporting;
use ty::{self, TyCtxt, Binder, TypeFoldable};
use ty::error::TypeError;
use ty::relate::{Relate, RelateResult, TypeRelation};
@ -39,11 +46,13 @@ impl<'a, 'gcx, 'tcx> CombineFields<'a, 'gcx, 'tcx> {
// Start a snapshot so we can examine "all bindings that were
// created as part of this type comparison".
return self.infcx.commit_if_ok(|snapshot| {
let span = self.trace.origin.span();
// First, we instantiate each bound region in the subtype with a fresh
// region variable.
let (a_prime, _) =
self.infcx.replace_late_bound_regions_with_fresh_var(
self.trace.origin.span(),
span,
HigherRankedType,
a);
@ -60,7 +69,11 @@ impl<'a, 'gcx, 'tcx> CombineFields<'a, 'gcx, 'tcx> {
// Presuming type comparison succeeds, we need to check
// that the skolemized regions do not "leak".
self.infcx.leak_check(!self.a_is_expected, &skol_map, snapshot)?;
self.infcx.leak_check(!self.a_is_expected, span, &skol_map, snapshot)?;
// We are finished with the skolemized regions now so pop
// them off.
self.infcx.pop_skolemized(skol_map, snapshot);
debug!("higher_ranked_sub: OK result={:?}", result);
@ -124,7 +137,7 @@ impl<'a, 'gcx, 'tcx> CombineFields<'a, 'gcx, 'tcx> {
return r0;
}
let tainted = infcx.tainted_regions(snapshot, r0);
let tainted = infcx.tainted_regions(snapshot, r0, TaintDirections::both());
// Variables created during LUB computation which are
// *related* to regions that pre-date the LUB computation
@ -219,7 +232,7 @@ impl<'a, 'gcx, 'tcx> CombineFields<'a, 'gcx, 'tcx> {
return r0;
}
let tainted = infcx.tainted_regions(snapshot, r0);
let tainted = infcx.tainted_regions(snapshot, r0, TaintDirections::both());
let mut a_r = None;
let mut b_r = None;
@ -341,8 +354,12 @@ fn fold_regions_in<'a, 'gcx, 'tcx, T, F>(tcx: TyCtxt<'a, 'gcx, 'tcx>,
}
impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> {
fn tainted_regions(&self, snapshot: &CombinedSnapshot, r: ty::Region) -> Vec<ty::Region> {
self.region_vars.tainted(&snapshot.region_vars_snapshot, r)
fn tainted_regions(&self,
snapshot: &CombinedSnapshot,
r: ty::Region,
directions: TaintDirections)
-> FnvHashSet<ty::Region> {
self.region_vars.tainted(&snapshot.region_vars_snapshot, r, directions)
}
fn region_vars_confined_to_snapshot(&self,
@ -422,22 +439,27 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> {
region_vars
}
/// Replace all regions bound by `binder` with skolemized regions and
/// return a map indicating which bound-region was replaced with what
/// skolemized region. This is the first step of checking subtyping
/// when higher-ranked things are involved.
///
/// **Important:** you must call this function from within a snapshot.
/// Moreover, before committing the snapshot, you must eventually call
/// either `plug_leaks` or `pop_skolemized` to remove the skolemized
/// regions. If you rollback the snapshot (or are using a probe), then
/// the pop occurs as part of the rollback, so an explicit call is not
/// needed (but is also permitted).
///
/// See `README.md` for more details.
pub fn skolemize_late_bound_regions<T>(&self,
binder: &ty::Binder<T>,
snapshot: &CombinedSnapshot)
-> (T, SkolemizationMap)
where T : TypeFoldable<'tcx>
{
/*!
* Replace all regions bound by `binder` with skolemized regions and
* return a map indicating which bound-region was replaced with what
* skolemized region. This is the first step of checking subtyping
* when higher-ranked things are involved. See `README.md` for more
* details.
*/
let (result, map) = self.tcx.replace_late_bound_regions(binder, |br| {
self.region_vars.new_skolemized(br, &snapshot.region_vars_snapshot)
self.region_vars.push_skolemized(br, &snapshot.region_vars_snapshot)
});
debug!("skolemize_bound_regions(binder={:?}, result={:?}, map={:?})",
@ -448,27 +470,29 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> {
(result, map)
}
/// Searches the region constriants created since `snapshot` was started
/// and checks to determine whether any of the skolemized regions created
/// in `skol_map` would "escape" -- meaning that they are related to
/// other regions in some way. If so, the higher-ranked subtyping doesn't
/// hold. See `README.md` for more details.
pub fn leak_check(&self,
overly_polymorphic: bool,
span: Span,
skol_map: &SkolemizationMap,
snapshot: &CombinedSnapshot)
-> RelateResult<'tcx, ()>
{
/*!
* Searches the region constriants created since `snapshot` was started
* and checks to determine whether any of the skolemized regions created
* in `skol_map` would "escape" -- meaning that they are related to
* other regions in some way. If so, the higher-ranked subtyping doesn't
* hold. See `README.md` for more details.
*/
debug!("leak_check: skol_map={:?}",
skol_map);
let new_vars = self.region_vars_confined_to_snapshot(snapshot);
for (&skol_br, &skol) in skol_map {
let tainted = self.tainted_regions(snapshot, skol);
for &tainted_region in &tainted {
// The inputs to a skolemized variable can only
// be itself or other new variables.
let incoming_taints = self.tainted_regions(snapshot,
skol,
TaintDirections::incoming());
for &tainted_region in &incoming_taints {
// Each skolemized should only be relatable to itself
// or new variables:
match tainted_region {
@ -496,6 +520,72 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> {
}
}
}
for (_, &skol) in skol_map {
// The outputs from a skolemized variable must all be
// equatable with `'static`.
let outgoing_taints = self.tainted_regions(snapshot,
skol,
TaintDirections::outgoing());
for &tainted_region in &outgoing_taints {
match tainted_region {
ty::ReVar(vid) if new_vars.contains(&vid) => {
// There is a path from a skolemized variable
// to some region variable that doesn't escape
// this snapshot:
//
// [skol] -> [tainted_region]
//
// We can ignore this. The reasoning relies on
// the fact that the preivous loop
// completed. There are two possible cases
// here.
//
// - `tainted_region` eventually reaches a
// skolemized variable, which *must* be `skol`
// (because otherwise we would have already
// returned `Err`). In that case,
// `tainted_region` could be inferred to `skol`.
//
// - `tainted_region` never reaches a
// skolemized variable. In that case, we can
// safely choose `'static` as an upper bound
// incoming edges. This is a conservative
// choice -- the LUB might be one of the
// incoming skolemized variables, which we
// might know by ambient bounds. We can
// consider a more clever choice of upper
// bound later (modulo some theoretical
// breakage).
//
// We used to force such `tainted_region` to be
// `'static`, but that leads to problems when
// combined with `plug_leaks`. If you have a case
// where `[skol] -> [tainted_region] -> [skol]`,
// then `plug_leaks` concludes it should replace
// `'static` with a late-bound region, which is
// clearly wrong. (Well, what actually happens is
// you get assertion failures because it WOULD
// have to replace 'static with a late-bound
// region.)
}
ty::ReSkolemized(..) => {
// the only skolemized region we find in the
// successors of X can be X; if there was another
// region Y, then X would have been in the preds
// of Y, and we would have aborted above
assert_eq!(skol, tainted_region);
}
_ => {
self.region_vars.make_subregion(
SubregionOrigin::SkolemizeSuccessor(span),
ty::ReStatic,
tainted_region);
}
}
}
}
Ok(())
}
@ -533,8 +623,6 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> {
value: &T) -> T
where T : TypeFoldable<'tcx>
{
debug_assert!(self.leak_check(false, &skol_map, snapshot).is_ok());
debug!("plug_leaks(skol_map={:?}, value={:?})",
skol_map,
value);
@ -545,9 +633,9 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> {
// these taint sets are mutually disjoint.
let inv_skol_map: FnvHashMap<ty::Region, ty::BoundRegion> =
skol_map
.into_iter()
.flat_map(|(skol_br, skol)| {
self.tainted_regions(snapshot, skol)
.iter()
.flat_map(|(&skol_br, &skol)| {
self.tainted_regions(snapshot, skol, TaintDirections::incoming())
.into_iter()
.map(move |tainted_region| (tainted_region, skol_br))
})
@ -577,6 +665,19 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> {
// binders, so this assert is satisfied.
assert!(current_depth > 1);
// since leak-check passed, this skolemized region
// should only have incoming edges from variables
// (which ought not to escape the snapshot, but we
// don't check that) or itself
assert!(
match r {
ty::ReVar(_) => true,
ty::ReSkolemized(_, ref br1) => br == br1,
_ => false,
},
"leak-check would have us replace {:?} with {:?}",
r, br);
ty::ReLateBound(ty::DebruijnIndex::new(current_depth - 1), br.clone())
}
}
@ -585,6 +686,27 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> {
debug!("plug_leaks: result={:?}",
result);
self.pop_skolemized(skol_map, snapshot);
debug!("plug_leaks: result={:?}", result);
result
}
/// Pops the skolemized regions found in `skol_map` from the region
/// inference context. Whenever you create skolemized regions via
/// `skolemize_late_bound_regions`, they must be popped before you
/// commit the enclosing snapshot (if you do not commit, e.g. within a
/// probe or as a result of an error, then this is not necessary, as
/// popping happens as part of the rollback).
///
/// Note: popping also occurs implicitly as part of `leak_check`.
pub fn pop_skolemized(&self,
skol_map: SkolemizationMap,
snapshot: &CombinedSnapshot)
{
debug!("pop_skolemized({:?})", skol_map);
let skol_regions: FnvHashSet<_> = skol_map.values().cloned().collect();
self.region_vars.pop_skolemized(&skol_regions, &snapshot.region_vars_snapshot);
}
}

View file

@ -340,6 +340,11 @@ pub enum SubregionOrigin<'tcx> {
// Region constraint arriving from destructor safety
SafeDestructor(Span),
// When doing a higher-ranked comparison, this region was a
// successor from a skolemized region, which means that it must be
// `'static` to be sound.
SkolemizeSuccessor(Span),
}
/// Places that type/region parameters can appear.
@ -538,7 +543,7 @@ impl<T> ExpectedFound<T> {
}
impl<'tcx, T> InferOk<'tcx, T> {
fn unit(self) -> InferOk<'tcx, ()> {
pub fn unit(self) -> InferOk<'tcx, ()> {
InferOk { value: (), obligations: self.obligations }
}
}
@ -1076,7 +1081,9 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> {
self.skolemize_late_bound_regions(predicate, snapshot);
let origin = TypeOrigin::EquatePredicate(span);
let eqty_ok = self.eq_types(false, origin, a, b)?;
self.leak_check(false, &skol_map, snapshot).map(|_| eqty_ok.unit())
self.leak_check(false, span, &skol_map, snapshot)?;
self.pop_skolemized(skol_map, snapshot);
Ok(eqty_ok.unit())
})
}
@ -1090,7 +1097,8 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> {
self.skolemize_late_bound_regions(predicate, snapshot);
let origin = RelateRegionParamBound(span);
self.sub_regions(origin, r_b, r_a); // `b : a` ==> `a <= b`
self.leak_check(false, &skol_map, snapshot)
self.leak_check(false, span, &skol_map, snapshot)?;
Ok(self.pop_skolemized(skol_map, snapshot))
})
}
@ -1790,6 +1798,7 @@ impl<'tcx> SubregionOrigin<'tcx> {
AddrOf(a) => a,
AutoBorrow(a) => a,
SafeDestructor(a) => a,
SkolemizeSuccessor(a) => a,
}
}
}

View file

@ -20,6 +20,7 @@ pub use self::VarValue::*;
use super::{RegionVariableOrigin, SubregionOrigin, MiscVariable};
use super::unify_key;
use rustc_data_structures::fnv::{FnvHashMap, FnvHashSet};
use rustc_data_structures::graph::{self, Direction, NodeIndex, OUTGOING};
use rustc_data_structures::unify::{self, UnificationTable};
use middle::free_region::FreeRegionMap;
@ -27,12 +28,11 @@ use ty::{self, Ty, TyCtxt};
use ty::{BoundRegion, Region, RegionVid};
use ty::{ReEmpty, ReStatic, ReFree, ReEarlyBound};
use ty::{ReLateBound, ReScope, ReVar, ReSkolemized, BrFresh};
use util::common::indenter;
use util::nodemap::{FnvHashMap, FnvHashSet};
use std::cell::{Cell, RefCell};
use std::cmp::Ordering::{self, Less, Greater, Equal};
use std::fmt;
use std::mem;
use std::u32;
use syntax::ast;
@ -108,13 +108,36 @@ pub struct TwoRegions {
#[derive(Copy, Clone, PartialEq)]
pub enum UndoLogEntry {
/// Pushed when we start a snapshot.
OpenSnapshot,
/// Replaces an `OpenSnapshot` when a snapshot is committed, but
/// that snapshot is not the root. If the root snapshot is
/// unrolled, all nested snapshots must be committed.
CommitedSnapshot,
/// We added `RegionVid`
AddVar(RegionVid),
/// We added the given `constraint`
AddConstraint(Constraint),
/// We added the given `verify`
AddVerify(usize),
/// We added the given `given`
AddGiven(ty::FreeRegion, ty::RegionVid),
/// We added a GLB/LUB "combinaton variable"
AddCombination(CombineMapType, TwoRegions),
/// During skolemization, we sometimes purge entries from the undo
/// log in a kind of minisnapshot (unlike other snapshots, this
/// purging actually takes place *on success*). In that case, we
/// replace the corresponding entry with `Noop` so as to avoid the
/// need to do a bunch of swapping. (We can't use `swap_remove` as
/// the order of the vector is important.)
Purged,
}
#[derive(Copy, Clone, PartialEq)]
@ -253,6 +276,116 @@ pub struct RegionSnapshot {
skolemization_count: u32,
}
/// When working with skolemized regions, we often wish to find all of
/// the regions that are either reachable from a skolemized region, or
/// which can reach a skolemized region, or both. We call such regions
/// *tained* regions. This struct allows you to decide what set of
/// tainted regions you want.
#[derive(Debug)]
pub struct TaintDirections {
incoming: bool,
outgoing: bool,
}
impl TaintDirections {
pub fn incoming() -> Self {
TaintDirections { incoming: true, outgoing: false }
}
pub fn outgoing() -> Self {
TaintDirections { incoming: false, outgoing: true }
}
pub fn both() -> Self {
TaintDirections { incoming: true, outgoing: true }
}
}
struct TaintSet {
directions: TaintDirections,
regions: FnvHashSet<ty::Region>
}
impl TaintSet {
fn new(directions: TaintDirections,
initial_region: ty::Region)
-> Self {
let mut regions = FnvHashSet();
regions.insert(initial_region);
TaintSet { directions: directions, regions: regions }
}
fn fixed_point(&mut self,
undo_log: &[UndoLogEntry],
verifys: &[Verify]) {
let mut prev_len = 0;
while prev_len < self.len() {
debug!("tainted: prev_len = {:?} new_len = {:?}",
prev_len, self.len());
prev_len = self.len();
for undo_entry in undo_log {
match undo_entry {
&AddConstraint(ConstrainVarSubVar(a, b)) => {
self.add_edge(ReVar(a), ReVar(b));
}
&AddConstraint(ConstrainRegSubVar(a, b)) => {
self.add_edge(a, ReVar(b));
}
&AddConstraint(ConstrainVarSubReg(a, b)) => {
self.add_edge(ReVar(a), b);
}
&AddGiven(a, b) => {
self.add_edge(ReFree(a), ReVar(b));
}
&AddVerify(i) => {
match verifys[i] {
VerifyRegSubReg(_, a, b) => {
self.add_edge(a, b);
}
VerifyGenericBound(_, _, a, ref bound) => {
bound.for_each_region(&mut |b| {
self.add_edge(a, b);
});
}
}
}
&Purged |
&AddCombination(..) |
&AddVar(..) |
&OpenSnapshot |
&CommitedSnapshot => {}
}
}
}
}
fn into_set(self) -> FnvHashSet<ty::Region> {
self.regions
}
fn len(&self) -> usize {
self.regions.len()
}
fn add_edge(&mut self,
source: ty::Region,
target: ty::Region) {
if self.directions.incoming {
if self.regions.contains(&target) {
self.regions.insert(source);
}
}
if self.directions.outgoing {
if self.regions.contains(&source) {
self.regions.insert(target);
}
}
}
}
impl<'a, 'gcx, 'tcx> RegionVarBindings<'a, 'gcx, 'tcx> {
pub fn new(tcx: TyCtxt<'a, 'gcx, 'tcx>) -> RegionVarBindings<'a, 'gcx, 'tcx> {
RegionVarBindings {
@ -290,6 +423,10 @@ impl<'a, 'gcx, 'tcx> RegionVarBindings<'a, 'gcx, 'tcx> {
debug!("RegionVarBindings: commit({})", snapshot.length);
assert!(self.undo_log.borrow().len() > snapshot.length);
assert!((*self.undo_log.borrow())[snapshot.length] == OpenSnapshot);
assert!(self.skolemization_count.get() == snapshot.skolemization_count,
"failed to pop skolemized regions: {} now vs {} at start",
self.skolemization_count.get(),
snapshot.skolemization_count);
let mut undo_log = self.undo_log.borrow_mut();
if snapshot.length == 0 {
@ -297,7 +434,6 @@ impl<'a, 'gcx, 'tcx> RegionVarBindings<'a, 'gcx, 'tcx> {
} else {
(*undo_log)[snapshot.length] = CommitedSnapshot;
}
self.skolemization_count.set(snapshot.skolemization_count);
self.unification_table.borrow_mut().commit(snapshot.region_snapshot);
}
@ -307,33 +443,7 @@ impl<'a, 'gcx, 'tcx> RegionVarBindings<'a, 'gcx, 'tcx> {
assert!(undo_log.len() > snapshot.length);
assert!((*undo_log)[snapshot.length] == OpenSnapshot);
while undo_log.len() > snapshot.length + 1 {
match undo_log.pop().unwrap() {
OpenSnapshot => {
bug!("Failure to observe stack discipline");
}
CommitedSnapshot => {}
AddVar(vid) => {
let mut var_origins = self.var_origins.borrow_mut();
var_origins.pop().unwrap();
assert_eq!(var_origins.len(), vid.index as usize);
}
AddConstraint(ref constraint) => {
self.constraints.borrow_mut().remove(constraint);
}
AddVerify(index) => {
self.verifys.borrow_mut().pop();
assert_eq!(self.verifys.borrow().len(), index);
}
AddGiven(sub, sup) => {
self.givens.borrow_mut().remove(&(sub, sup));
}
AddCombination(Glb, ref regions) => {
self.glbs.borrow_mut().remove(regions);
}
AddCombination(Lub, ref regions) => {
self.lubs.borrow_mut().remove(regions);
}
}
self.rollback_undo_entry(undo_log.pop().unwrap());
}
let c = undo_log.pop().unwrap();
assert!(c == OpenSnapshot);
@ -342,6 +452,38 @@ impl<'a, 'gcx, 'tcx> RegionVarBindings<'a, 'gcx, 'tcx> {
.rollback_to(snapshot.region_snapshot);
}
pub fn rollback_undo_entry(&self, undo_entry: UndoLogEntry) {
match undo_entry {
OpenSnapshot => {
panic!("Failure to observe stack discipline");
}
Purged | CommitedSnapshot => {
// nothing to do here
}
AddVar(vid) => {
let mut var_origins = self.var_origins.borrow_mut();
var_origins.pop().unwrap();
assert_eq!(var_origins.len(), vid.index as usize);
}
AddConstraint(ref constraint) => {
self.constraints.borrow_mut().remove(constraint);
}
AddVerify(index) => {
self.verifys.borrow_mut().pop();
assert_eq!(self.verifys.borrow().len(), index);
}
AddGiven(sub, sup) => {
self.givens.borrow_mut().remove(&(sub, sup));
}
AddCombination(Glb, ref regions) => {
self.glbs.borrow_mut().remove(regions);
}
AddCombination(Lub, ref regions) => {
self.lubs.borrow_mut().remove(regions);
}
}
}
pub fn num_vars(&self) -> u32 {
let len = self.var_origins.borrow().len();
// enforce no overflow
@ -366,22 +508,30 @@ impl<'a, 'gcx, 'tcx> RegionVarBindings<'a, 'gcx, 'tcx> {
return vid;
}
pub fn var_origin(&self, vid: RegionVid) -> RegionVariableOrigin {
self.var_origins.borrow()[vid.index as usize].clone()
}
/// Creates a new skolemized region. Skolemized regions are fresh
/// regions used when performing higher-ranked computations. They
/// must be used in a very particular way and are never supposed
/// to "escape" out into error messages or the code at large.
///
/// The idea is to always create a snapshot. Skolemized regions
/// can be created in the context of this snapshot, but once the
/// snapshot is committed or rolled back, their numbers will be
/// recycled, so you must be finished with them. See the extensive
/// comments in `higher_ranked.rs` to see how it works (in
/// particular, the subtyping comparison).
/// can be created in the context of this snapshot, but before the
/// snapshot is committed or rolled back, they must be popped
/// (using `pop_skolemized_regions`), so that their numbers can be
/// recycled. Normally you don't have to think about this: you use
/// the APIs in `higher_ranked/mod.rs`, such as
/// `skolemize_late_bound_regions` and `plug_leaks`, which will
/// guide you on this path (ensure that the `SkolemizationMap` is
/// consumed and you are good). There are also somewhat extensive
/// comments in `higher_ranked/README.md`.
///
/// The `snapshot` argument to this function is not really used;
/// it's just there to make it explicit which snapshot bounds the
/// skolemized region that results.
pub fn new_skolemized(&self, br: ty::BoundRegion, snapshot: &RegionSnapshot) -> Region {
/// skolemized region that results. It should always be the top-most snapshot.
pub fn push_skolemized(&self, br: ty::BoundRegion, snapshot: &RegionSnapshot) -> Region {
assert!(self.in_snapshot());
assert!(self.undo_log.borrow()[snapshot.length] == OpenSnapshot);
@ -390,6 +540,92 @@ impl<'a, 'gcx, 'tcx> RegionVarBindings<'a, 'gcx, 'tcx> {
ReSkolemized(ty::SkolemizedRegionVid { index: sc }, br)
}
/// Removes all the edges to/from the skolemized regions that are
/// in `skols`. This is used after a higher-ranked operation
/// completes to remove all trace of the skolemized regions
/// created in that time.
pub fn pop_skolemized(&self,
skols: &FnvHashSet<ty::Region>,
snapshot: &RegionSnapshot) {
debug!("pop_skolemized_regions(skols={:?})", skols);
assert!(self.in_snapshot());
assert!(self.undo_log.borrow()[snapshot.length] == OpenSnapshot);
assert!(self.skolemization_count.get() as usize >= skols.len(),
"popping more skolemized variables than actually exist, \
sc now = {}, skols.len = {}",
self.skolemization_count.get(),
skols.len());
let last_to_pop = self.skolemization_count.get();
let first_to_pop = last_to_pop - (skols.len() as u32);
assert!(first_to_pop >= snapshot.skolemization_count,
"popping more regions than snapshot contains, \
sc now = {}, sc then = {}, skols.len = {}",
self.skolemization_count.get(),
snapshot.skolemization_count,
skols.len());
debug_assert! {
skols.iter()
.all(|k| match *k {
ty::ReSkolemized(index, _) =>
index.index >= first_to_pop &&
index.index < last_to_pop,
_ =>
false
}),
"invalid skolemization keys or keys out of range ({}..{}): {:?}",
snapshot.skolemization_count,
self.skolemization_count.get(),
skols
}
let mut undo_log = self.undo_log.borrow_mut();
let constraints_to_kill: Vec<usize> =
undo_log.iter()
.enumerate()
.rev()
.filter(|&(_, undo_entry)| kill_constraint(skols, undo_entry))
.map(|(index, _)| index)
.collect();
for index in constraints_to_kill {
let undo_entry = mem::replace(&mut undo_log[index], Purged);
self.rollback_undo_entry(undo_entry);
}
self.skolemization_count.set(snapshot.skolemization_count);
return;
fn kill_constraint(skols: &FnvHashSet<ty::Region>,
undo_entry: &UndoLogEntry)
-> bool {
match undo_entry {
&AddConstraint(ConstrainVarSubVar(_, _)) =>
false,
&AddConstraint(ConstrainRegSubVar(a, _)) =>
skols.contains(&a),
&AddConstraint(ConstrainVarSubReg(_, b)) =>
skols.contains(&b),
&AddGiven(_, _) =>
false,
&AddVerify(_) =>
false,
&AddCombination(_, ref two_regions) =>
skols.contains(&two_regions.a) ||
skols.contains(&two_regions.b),
&AddVar(..) |
&OpenSnapshot |
&Purged |
&CommitedSnapshot =>
false,
}
}
}
pub fn new_bound(&self, debruijn: ty::DebruijnIndex) -> Region {
// Creates a fresh bound variable for use in GLB computations.
// See discussion of GLB computation in the large comment at
@ -632,83 +868,30 @@ impl<'a, 'gcx, 'tcx> RegionVarBindings<'a, 'gcx, 'tcx> {
.collect()
}
/// Computes all regions that have been related to `r0` in any way since the mark `mark` was
/// made---`r0` itself will be the first entry. This is used when checking whether skolemized
/// regions are being improperly related to other regions.
pub fn tainted(&self, mark: &RegionSnapshot, r0: Region) -> Vec<Region> {
debug!("tainted(mark={:?}, r0={:?})", mark, r0);
let _indenter = indenter();
/// Computes all regions that have been related to `r0` since the
/// mark `mark` was made---`r0` itself will be the first
/// entry. The `directions` parameter controls what kind of
/// relations are considered. For example, one can say that only
/// "incoming" edges to `r0` are desired, in which case one will
/// get the set of regions `{r|r <= r0}`. This is used when
/// checking whether skolemized regions are being improperly
/// related to other regions.
pub fn tainted(&self,
mark: &RegionSnapshot,
r0: Region,
directions: TaintDirections)
-> FnvHashSet<ty::Region> {
debug!("tainted(mark={:?}, r0={:?}, directions={:?})",
mark, r0, directions);
// `result_set` acts as a worklist: we explore all outgoing
// edges and add any new regions we find to result_set. This
// is not a terribly efficient implementation.
let mut result_set = vec![r0];
let mut result_index = 0;
while result_index < result_set.len() {
// nb: can't use usize::range() here because result_set grows
let r = result_set[result_index];
debug!("result_index={}, r={:?}", result_index, r);
for undo_entry in self.undo_log.borrow()[mark.length..].iter() {
match undo_entry {
&AddConstraint(ConstrainVarSubVar(a, b)) => {
consider_adding_bidirectional_edges(&mut result_set, r, ReVar(a), ReVar(b));
}
&AddConstraint(ConstrainRegSubVar(a, b)) => {
consider_adding_bidirectional_edges(&mut result_set, r, a, ReVar(b));
}
&AddConstraint(ConstrainVarSubReg(a, b)) => {
consider_adding_bidirectional_edges(&mut result_set, r, ReVar(a), b);
}
&AddGiven(a, b) => {
consider_adding_bidirectional_edges(&mut result_set,
r,
ReFree(a),
ReVar(b));
}
&AddVerify(i) => {
match (*self.verifys.borrow())[i] {
VerifyRegSubReg(_, a, b) => {
consider_adding_bidirectional_edges(&mut result_set, r, a, b);
}
VerifyGenericBound(_, _, a, ref bound) => {
bound.for_each_region(&mut |b| {
consider_adding_bidirectional_edges(&mut result_set, r, a, b)
});
}
}
}
&AddCombination(..) |
&AddVar(..) |
&OpenSnapshot |
&CommitedSnapshot => {}
}
}
result_index += 1;
}
return result_set;
fn consider_adding_bidirectional_edges(result_set: &mut Vec<Region>,
r: Region,
r1: Region,
r2: Region) {
consider_adding_directed_edge(result_set, r, r1, r2);
consider_adding_directed_edge(result_set, r, r2, r1);
}
fn consider_adding_directed_edge(result_set: &mut Vec<Region>,
r: Region,
r1: Region,
r2: Region) {
if r == r1 {
// Clearly, this is potentially inefficient.
if !result_set.iter().any(|x| *x == r2) {
result_set.push(r2);
}
}
}
let mut taint_set = TaintSet::new(directions, r0);
taint_set.fixed_point(&self.undo_log.borrow()[mark.length..],
&self.verifys.borrow());
debug!("tainted: result={:?}", taint_set.regions);
return taint_set.into_set();
}
/// This function performs the actual region resolution. It must be

View file

@ -182,7 +182,8 @@ pub fn poly_project_and_unify_type<'cx, 'gcx, 'tcx>(
let skol_obligation = obligation.with(skol_predicate);
match project_and_unify_type(selcx, &skol_obligation) {
Ok(result) => {
match infcx.leak_check(false, &skol_map, snapshot) {
let span = obligation.cause.span;
match infcx.leak_check(false, span, &skol_map, snapshot) {
Ok(()) => Ok(infcx.plug_leaks(skol_map, snapshot, &result)),
Err(e) => Err(MismatchedProjectionTypes { err: e }),
}
@ -404,7 +405,11 @@ fn opt_normalize_projection_type<'a, 'b, 'gcx, 'tcx>(
depth: usize)
-> Option<NormalizedTy<'tcx>>
{
debug!("normalize_projection_type(\
let infcx = selcx.infcx();
let projection_ty = infcx.resolve_type_vars_if_possible(&projection_ty);
debug!("opt_normalize_projection_type(\
projection_ty={:?}, \
depth={})",
projection_ty,
@ -418,7 +423,8 @@ fn opt_normalize_projection_type<'a, 'b, 'gcx, 'tcx>(
// an impl, where-clause etc) and hence we must
// re-normalize it
debug!("normalize_projection_type: projected_ty={:?} depth={} obligations={:?}",
debug!("opt_normalize_projection_type: \
projected_ty={:?} depth={} obligations={:?}",
projected_ty,
depth,
obligations);
@ -427,7 +433,8 @@ fn opt_normalize_projection_type<'a, 'b, 'gcx, 'tcx>(
let mut normalizer = AssociatedTypeNormalizer::new(selcx, cause, depth+1);
let normalized_ty = normalizer.fold(&projected_ty);
debug!("normalize_projection_type: normalized_ty={:?} depth={}",
debug!("opt_normalize_projection_type: \
normalized_ty={:?} depth={}",
normalized_ty,
depth);
@ -444,7 +451,8 @@ fn opt_normalize_projection_type<'a, 'b, 'gcx, 'tcx>(
}
}
Ok(ProjectedTy::NoProgress(projected_ty)) => {
debug!("normalize_projection_type: projected_ty={:?} no progress",
debug!("opt_normalize_projection_type: \
projected_ty={:?} no progress",
projected_ty);
Some(Normalized {
value: projected_ty,
@ -452,11 +460,12 @@ fn opt_normalize_projection_type<'a, 'b, 'gcx, 'tcx>(
})
}
Err(ProjectionTyError::TooManyCandidates) => {
debug!("normalize_projection_type: too many candidates");
debug!("opt_normalize_projection_type: \
too many candidates");
None
}
Err(ProjectionTyError::TraitSelectionError(_)) => {
debug!("normalize_projection_type: ERROR");
debug!("opt_normalize_projection_type: ERROR");
// if we got an error processing the `T as Trait` part,
// just return `ty::err` but add the obligation `T :
// Trait`, which when processed will cause the error to be

View file

@ -46,6 +46,7 @@ use rustc_data_structures::snapshot_vec::{SnapshotVecDelegate, SnapshotVec};
use std::cell::RefCell;
use std::fmt;
use std::marker::PhantomData;
use std::mem;
use std::rc::Rc;
use syntax::abi::Abi;
use hir;
@ -1237,6 +1238,9 @@ impl<'cx, 'gcx, 'tcx> SelectionContext<'cx, 'gcx, 'tcx> {
skol_trait_predicate.trait_ref.clone(),
&skol_map,
snapshot);
self.infcx.pop_skolemized(skol_map, snapshot);
assert!(result);
true
}
@ -1263,7 +1267,7 @@ impl<'cx, 'gcx, 'tcx> SelectionContext<'cx, 'gcx, 'tcx> {
Err(_) => { return false; }
}
self.infcx.leak_check(false, skol_map, snapshot).is_ok()
self.infcx.leak_check(false, obligation.cause.span, skol_map, snapshot).is_ok()
}
/// Given an obligation like `<SomeTrait for T>`, search the obligations that the caller
@ -1422,9 +1426,16 @@ impl<'cx, 'gcx, 'tcx> SelectionContext<'cx, 'gcx, 'tcx> {
self.tcx(),
obligation.predicate.0.trait_ref.self_ty(),
|impl_def_id| {
self.probe(|this, snapshot| {
if let Ok(_) = this.match_impl(impl_def_id, obligation, snapshot) {
candidates.vec.push(ImplCandidate(impl_def_id));
self.probe(|this, snapshot| { /* [1] */
match this.match_impl(impl_def_id, obligation, snapshot) {
Ok(skol_map) => {
candidates.vec.push(ImplCandidate(impl_def_id));
// NB: we can safely drop the skol map
// since we are in a probe [1]
mem::drop(skol_map);
}
Err(_) => { }
}
});
}
@ -1509,9 +1520,11 @@ impl<'cx, 'gcx, 'tcx> SelectionContext<'cx, 'gcx, 'tcx> {
return;
}
self.probe(|this, snapshot| {
let (self_ty, _) =
this.infcx().skolemize_late_bound_regions(&obligation.self_ty(), snapshot);
self.probe(|this, _snapshot| {
// the code below doesn't care about regions, and the
// self-ty here doesn't escape this probe, so just erase
// any LBR.
let self_ty = this.tcx().erase_late_bound_regions(&obligation.self_ty());
let poly_trait_ref = match self_ty.sty {
ty::TyTrait(ref data) => {
match this.tcx().lang_items.to_builtin_kind(obligation.predicate.def_id()) {
@ -2710,7 +2723,10 @@ impl<'cx, 'gcx, 'tcx> SelectionContext<'cx, 'gcx, 'tcx> {
})?;
self.inferred_obligations.extend(obligations);
if let Err(e) = self.infcx.leak_check(false, &skol_map, snapshot) {
if let Err(e) = self.infcx.leak_check(false,
obligation.cause.span,
&skol_map,
snapshot) {
debug!("match_impl: failed leak check due to `{}`", e);
return Err(());
}

View file

@ -9,7 +9,7 @@
// except according to those terms.
use middle::free_region::FreeRegionMap;
use rustc::infer::{self, InferOk, TypeOrigin};
use rustc::infer::{self, InferOk, InferResult, TypeOrigin};
use rustc::ty;
use rustc::traits::{self, ProjectionMode};
use rustc::ty::subst::{self, Subst, Substs, VecPerParamSpace};

View file

@ -0,0 +1,116 @@
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Targeted tests for the higher-ranked subtyping code.
#![feature(rustc_attrs)]
#![allow(dead_code)]
// revisions: bound_a_vs_bound_a
// revisions: bound_a_vs_bound_b
// revisions: bound_inv_a_vs_bound_inv_b
// revisions: bound_co_a_vs_bound_co_b
// revisions: bound_a_vs_free_x
// revisions: free_x_vs_free_x
// revisions: free_x_vs_free_y
// revisions: free_inv_x_vs_free_inv_y
// revisions: bound_a_b_vs_bound_a
// revisions: bound_co_a_b_vs_bound_co_a
// revisions: bound_contra_a_contra_b_ret_co_a
// revisions: bound_co_a_co_b_ret_contra_a
// revisions: bound_inv_a_b_vs_bound_inv_a
// revisions: bound_a_b_ret_a_vs_bound_a_ret_a
fn gimme<T>(_: Option<T>) { }
struct Inv<'a> { x: *mut &'a u32 }
struct Co<'a> { x: fn(&'a u32) }
struct Contra<'a> { x: &'a u32 }
macro_rules! check {
($rev:ident: ($t1:ty, $t2:ty)) => {
#[cfg($rev)]
fn subtype<'x,'y:'x,'z:'y>() {
gimme::<$t2>(None::<$t1>);
//[free_inv_x_vs_free_inv_y]~^ ERROR mismatched types
}
#[cfg($rev)]
fn supertype<'x,'y:'x,'z:'y>() {
gimme::<$t1>(None::<$t2>);
//[bound_a_vs_free_x]~^ ERROR mismatched types
//[free_x_vs_free_y]~^^ ERROR mismatched types
//[bound_inv_a_b_vs_bound_inv_a]~^^^ ERROR mismatched types
//[bound_a_b_ret_a_vs_bound_a_ret_a]~^^^^ ERROR mismatched types
//[free_inv_x_vs_free_inv_y]~^^^^^ ERROR mismatched types
}
}
}
// If both have bound regions, they are equivalent, regardless of
// variant.
check! { bound_a_vs_bound_a: (for<'a> fn(&'a u32),
for<'a> fn(&'a u32)) }
check! { bound_a_vs_bound_b: (for<'a> fn(&'a u32),
for<'b> fn(&'b u32)) }
check! { bound_inv_a_vs_bound_inv_b: (for<'a> fn(Inv<'a>),
for<'b> fn(Inv<'b>)) }
check! { bound_co_a_vs_bound_co_b: (for<'a> fn(Co<'a>),
for<'b> fn(Co<'b>)) }
// Bound is a subtype of free.
check! { bound_a_vs_free_x: (for<'a> fn(&'a u32),
fn(&'x u32)) }
// Two free regions are relatable if subtyping holds.
check! { free_x_vs_free_x: (fn(&'x u32),
fn(&'x u32)) }
check! { free_x_vs_free_y: (fn(&'x u32),
fn(&'y u32)) }
check! { free_inv_x_vs_free_inv_y: (fn(Inv<'x>),
fn(Inv<'y>)) }
// Somewhat surprisingly, a fn taking two distinct bound lifetimes and
// a fn taking one bound lifetime can be interchangable, but only if
// we are co- or contra-variant with respect to both lifetimes.
//
// The reason is:
// - if we are covariant, then 'a and 'b can be set to the call-site
// intersection;
// - if we are contravariant, then 'a can be inferred to 'static.
check! { bound_a_b_vs_bound_a: (for<'a,'b> fn(&'a u32, &'b u32),
for<'a> fn(&'a u32, &'a u32)) }
check! { bound_co_a_b_vs_bound_co_a: (for<'a,'b> fn(Co<'a>, Co<'b>),
for<'a> fn(Co<'a>, Co<'a>)) }
check! { bound_contra_a_contra_b_ret_co_a: (for<'a,'b> fn(Contra<'a>, Contra<'b>) -> Co<'a>,
for<'a> fn(Contra<'a>, Contra<'a>) -> Co<'a>) }
check! { bound_co_a_co_b_ret_contra_a: (for<'a,'b> fn(Co<'a>, Co<'b>) -> Contra<'a>,
for<'a> fn(Co<'a>, Co<'a>) -> Contra<'a>) }
// If we make those lifetimes invariant, then the two types are not interchangable.
check! { bound_inv_a_b_vs_bound_inv_a: (for<'a,'b> fn(Inv<'a>, Inv<'b>),
for<'a> fn(Inv<'a>, Inv<'a>)) }
check! { bound_a_b_ret_a_vs_bound_a_ret_a: (for<'a,'b> fn(&'a u32, &'b u32) -> &'a u32,
for<'a> fn(&'a u32, &'a u32) -> &'a u32) }
#[rustc_error]
fn main() {
//[bound_a_vs_bound_a]~^ ERROR compilation successful
//[bound_a_vs_bound_b]~^^ ERROR compilation successful
//[bound_inv_a_vs_bound_inv_b]~^^^ ERROR compilation successful
//[bound_co_a_vs_bound_co_b]~^^^^ ERROR compilation successful
//[free_x_vs_free_x]~^^^^^ ERROR compilation successful
//[bound_a_b_vs_bound_a]~^^^^^^ ERROR compilation successful
//[bound_co_a_b_vs_bound_co_a]~^^^^^^^ ERROR compilation successful
//[bound_contra_a_contra_b_ret_co_a]~^^^^^^^^ ERROR compilation successful
//[bound_co_a_co_b_ret_contra_a]~^^^^^^^^^ ERROR compilation successful
}

View file

@ -28,8 +28,7 @@ fn make_object2<'a,A:SomeTrait+'a>(v: A) -> Box<SomeTrait+'a> {
fn make_object3<'a,'b,A:SomeTrait+'a>(v: A) -> Box<SomeTrait+'b> {
box v as Box<SomeTrait+'b>
//~^ ERROR the parameter type `A` may not live long enough
//~^^ ERROR the parameter type `A` may not live long enough
//~^ ERROR E0478
}
fn main() { }

View file

@ -15,10 +15,10 @@ trait Contravariant {
fn foo(&self) { }
}
impl Contravariant for for<'a,'b> fn(&'a u8, &'b u8) {
impl Contravariant for for<'a,'b> fn(&'a u8, &'b u8) -> &'a u8 {
}
impl Contravariant for for<'a> fn(&'a u8, &'a u8) {
impl Contravariant for for<'a> fn(&'a u8, &'a u8) -> &'a u8 {
}
///////////////////////////////////////////////////////////////////////////
@ -27,10 +27,10 @@ trait Covariant {
fn foo(&self) { }
}
impl Covariant for for<'a,'b> fn(&'a u8, &'b u8) {
impl Covariant for for<'a,'b> fn(&'a u8, &'b u8) -> &'a u8 {
}
impl Covariant for for<'a> fn(&'a u8, &'a u8) {
impl Covariant for for<'a> fn(&'a u8, &'a u8) -> &'a u8 {
}
///////////////////////////////////////////////////////////////////////////
@ -39,10 +39,10 @@ trait Invariant {
fn foo(&self) { }
}
impl Invariant for for<'a,'b> fn(&'a u8, &'b u8) {
impl Invariant for for<'a,'b> fn(&'a u8, &'b u8) -> &'a u8 {
}
impl Invariant for for<'a> fn(&'a u8, &'a u8) {
impl Invariant for for<'a> fn(&'a u8, &'a u8) -> &'a u8 {
}
fn main() { }