rustc: Implement and enforce instance coherence

This commit is contained in:
Patrick Walton 2012-07-11 15:00:40 -07:00
parent b5729bd600
commit db020ab63c
111 changed files with 1746 additions and 526 deletions

View file

@ -1,6 +1,6 @@
// -*- rust -*-
import metadata::{creader, cstore, filesearch};
import session::session;
import session::{session, session_};
import syntax::parse;
import syntax::{ast, codemap};
import syntax::attr;
@ -168,7 +168,10 @@ fn compile_upto(sess: session, cfg: ast::crate_cfg,
session::sess_os_to_meta_os(sess.targ_cfg.os),
sess.opts.static));
let { def_map: def_map, exp_map: exp_map, impl_map: impl_map } =
let { def_map: def_map,
exp_map: exp_map,
impl_map: impl_map,
trait_map: trait_map } =
time(time_passes, ~"resolution", ||
middle::resolve3::resolve_crate(sess, ast_map, crate));
@ -187,6 +190,7 @@ fn compile_upto(sess: session, cfg: ast::crate_cfg,
let (method_map, vtable_map) = time(time_passes, ~"typechecking", ||
typeck::check_crate(ty_cx,
impl_map,
trait_map,
crate));
time(time_passes, ~"const checking", ||
@ -516,11 +520,12 @@ fn build_session(sopts: @session::options,
build_session_(sopts, codemap, demitter, span_diagnostic_handler)
}
fn build_session_(
sopts: @session::options, cm: codemap::codemap,
demitter: diagnostic::emitter,
span_diagnostic_handler: diagnostic::span_handler
) -> session {
fn build_session_(sopts: @session::options,
cm: codemap::codemap,
demitter: diagnostic::emitter,
span_diagnostic_handler: diagnostic::span_handler)
-> session {
let target_cfg = build_target_config(sopts, demitter);
let cstore = cstore::mk_cstore();
let filesearch = filesearch::mk_filesearch(
@ -528,19 +533,19 @@ fn build_session_(
sopts.target_triple,
sopts.addl_lib_search_paths);
let warning_settings = lint::mk_warning_settings();
@{targ_cfg: target_cfg,
opts: sopts,
cstore: cstore,
parse_sess:
session_(@{targ_cfg: target_cfg,
opts: sopts,
cstore: cstore,
parse_sess:
parse::new_parse_sess_special_handler(span_diagnostic_handler, cm),
codemap: cm,
// For a library crate, this is always none
mut main_fn: none,
span_diagnostic: span_diagnostic_handler,
filesearch: filesearch,
mut building_library: false,
working_dir: os::getcwd(),
warning_settings: warning_settings}
codemap: cm,
// For a library crate, this is always none
mut main_fn: none,
span_diagnostic: span_diagnostic_handler,
filesearch: filesearch,
mut building_library: false,
working_dir: os::getcwd(),
warning_settings: warning_settings})
}
fn parse_pretty(sess: session, &&name: ~str) -> pp_mode {

View file

@ -85,7 +85,7 @@ type options =
type crate_metadata = {name: ~str, data: ~[u8]};
type session = @{targ_cfg: @config,
type session_ = {targ_cfg: @config,
opts: @options,
cstore: metadata::cstore::cstore,
parse_sess: parse_sess,
@ -98,6 +98,10 @@ type session = @{targ_cfg: @config,
working_dir: ~str,
warning_settings: lint::warning_settings};
enum session {
session_(@session_)
}
impl session for session {
fn span_fatal(sp: span, msg: ~str) -> ! {
self.span_diagnostic.span_fatal(sp, msg)

View file

@ -10,6 +10,7 @@ import syntax::diagnostic::span_handler;
import syntax::diagnostic::expect;
import common::*;
import std::map::hashmap;
import dvec::{dvec, extensions};
export class_dtor;
export get_symbol;
@ -23,6 +24,7 @@ export lookup_method_purity;
export get_enum_variants;
export get_impls_for_mod;
export get_trait_methods;
export get_method_names_if_trait;
export each_path;
export get_type;
export get_impl_trait;
@ -140,6 +142,13 @@ fn get_trait_methods(tcx: ty::ctxt, def: ast::def_id) -> @~[ty::method] {
decoder::get_trait_methods(cdata, def.node, tcx)
}
fn get_method_names_if_trait(cstore: cstore::cstore, def: ast::def_id)
-> option<@dvec<@~str>> {
let cdata = cstore::get_crate_data(cstore, def.crate);
ret decoder::get_method_names_if_trait(cdata, def.node);
}
fn get_class_fields(tcx: ty::ctxt, def: ast::def_id) -> ~[ty::field_ty] {
let cstore = tcx.cstore;
let cdata = cstore::get_crate_data(cstore, def.crate);

View file

@ -2,6 +2,7 @@
import std::{ebml, map};
import std::map::{hashmap, str_hash};
import dvec::{dvec, extensions};
import io::writer_util;
import syntax::{ast, ast_util};
import syntax::attr;
@ -37,6 +38,7 @@ export get_crate_hash;
export get_crate_vers;
export get_impls_for_mod;
export get_trait_methods;
export get_method_names_if_trait;
export get_crate_module_paths;
export def_like;
export dl_def;
@ -640,6 +642,23 @@ fn get_trait_methods(cdata: cmd, id: ast::node_id, tcx: ty::ctxt)
@result
}
// If the item in question is a trait, returns its set of methods. Otherwise,
// returns none.
fn get_method_names_if_trait(cdata: cmd, node_id: ast::node_id)
-> option<@dvec<@~str>> {
let item = lookup_item(node_id, cdata.data);
if item_family(item) != 'I' {
ret none;
}
let resulting_method_names = @dvec();
do ebml::tagged_docs(item, tag_item_trait_method) |method| {
(*resulting_method_names).push(item_name(method));
}
ret some(resulting_method_names);
}
// Helper function that gets either fields or methods
fn get_class_members(cdata: cmd, id: ast::node_id,
p: fn(char) -> bool) -> ~[ty::field_ty] {

View file

@ -65,12 +65,16 @@ type decode_ctxt = @{
maps: maps
};
type extended_decode_ctxt = @{
type extended_decode_ctxt_ = {
dcx: decode_ctxt,
from_id_range: ast_util::id_range,
to_id_range: ast_util::id_range
};
enum extended_decode_ctxt {
extended_decode_ctxt_(@extended_decode_ctxt_)
}
iface tr {
fn tr(xcx: extended_decode_ctxt) -> self;
}
@ -112,9 +116,9 @@ fn decode_inlined_item(cdata: cstore::crate_metadata,
let ast_dsr = ebml::ebml_deserializer(ast_doc);
let from_id_range = ast_util::deserialize_id_range(ast_dsr);
let to_id_range = reserve_id_range(dcx.tcx.sess, from_id_range);
let xcx = @{dcx: dcx,
from_id_range: from_id_range,
to_id_range: to_id_range};
let xcx = extended_decode_ctxt_(@{dcx: dcx,
from_id_range: from_id_range,
to_id_range: to_id_range});
let raw_ii = decode_ast(ast_doc);
let ii = renumber_ast(xcx, raw_ii);
ast_map::map_decoded_item(tcx.sess.diagnostic(),
@ -182,13 +186,23 @@ impl of tr for span {
}
}
impl serializer_helpers<S: serializer> for S {
trait def_id_serializer_helpers {
fn emit_def_id(did: ast::def_id);
}
impl serializer_helpers<S: serializer> of def_id_serializer_helpers for S {
fn emit_def_id(did: ast::def_id) {
ast::serialize_def_id(self, did)
}
}
impl deserializer_helpers<D: deserializer> for D {
trait def_id_deserializer_helpers {
fn read_def_id(xcx: extended_decode_ctxt) -> ast::def_id;
}
impl deserializer_helpers<D: deserializer> of def_id_deserializer_helpers
for D {
fn read_def_id(xcx: extended_decode_ctxt) -> ast::def_id {
let did = ast::deserialize_def_id(self);
did.tr(xcx)
@ -370,7 +384,11 @@ fn encode_freevar_entry(ebml_w: ebml::writer, fv: freevar_entry) {
serialize_freevar_entry(ebml_w, fv)
}
impl helper for ebml::ebml_deserializer {
trait ebml_deserializer_helper {
fn read_freevar_entry(xcx: extended_decode_ctxt) -> freevar_entry;
}
impl helper of ebml_deserializer_helper for ebml::ebml_deserializer {
fn read_freevar_entry(xcx: extended_decode_ctxt) -> freevar_entry {
let fv = deserialize_freevar_entry(self);
fv.tr(xcx)
@ -386,7 +404,11 @@ impl of tr for freevar_entry {
// ______________________________________________________________________
// Encoding and decoding of method_map_entry
impl helper for ebml::ebml_deserializer {
trait read_method_map_entry_helper {
fn read_method_map_entry(xcx: extended_decode_ctxt) -> method_map_entry;
}
impl helper of read_method_map_entry_helper for ebml::ebml_deserializer {
fn read_method_map_entry(xcx: extended_decode_ctxt) -> method_map_entry {
let mme = deserialize_method_map_entry(self);
{derefs: mme.derefs, origin: mme.origin.tr(xcx)}
@ -412,7 +434,11 @@ impl of tr for method_origin {
// ______________________________________________________________________
// Encoding and decoding of borrow
impl helper for ebml::ebml_deserializer {
trait read_borrow_helper {
fn read_borrow(xcx: extended_decode_ctxt) -> ty::borrow;
}
impl helper of read_borrow_helper for ebml::ebml_deserializer {
fn read_borrow(xcx: extended_decode_ctxt) -> ty::borrow {
let borrow = ty::deserialize_borrow(self);
{scope_id: xcx.tr_id(borrow.scope_id),
@ -478,7 +504,12 @@ fn encode_vtable_origin(ecx: @e::encode_ctxt,
}
impl helpers for ebml::ebml_deserializer {
trait vtable_deserialization_helpers {
fn read_vtable_res(xcx: extended_decode_ctxt) -> typeck::vtable_res;
fn read_vtable_origin(xcx: extended_decode_ctxt) -> typeck::vtable_origin;
}
impl helpers of vtable_deserialization_helpers for ebml::ebml_deserializer {
fn read_vtable_res(xcx: extended_decode_ctxt) -> typeck::vtable_res {
@self.read_to_vec(|| self.read_vtable_origin(xcx) )
}
@ -530,7 +561,11 @@ impl helpers for ebml::ebml_deserializer {
// ______________________________________________________________________
// Encoding and decoding the side tables
impl helpers for @e::encode_ctxt {
trait get_ty_str_ctxt {
fn ty_str_ctxt() -> @tyencode::ctxt;
}
impl helpers of get_ty_str_ctxt for @e::encode_ctxt {
fn ty_str_ctxt() -> @tyencode::ctxt {
@{diag: self.tcx.sess.diagnostic(),
ds: e::def_to_str,
@ -540,7 +575,14 @@ impl helpers for @e::encode_ctxt {
}
}
impl helpers for ebml::writer {
trait ebml_writer_helpers {
fn emit_ty(ecx: @e::encode_ctxt, ty: ty::t);
fn emit_tys(ecx: @e::encode_ctxt, tys: ~[ty::t]);
fn emit_bounds(ecx: @e::encode_ctxt, bs: ty::param_bounds);
fn emit_tpbt(ecx: @e::encode_ctxt, tpbt: ty::ty_param_bounds_and_ty);
}
impl helpers of ebml_writer_helpers for ebml::writer {
fn emit_ty(ecx: @e::encode_ctxt, ty: ty::t) {
e::write_type(ecx, self, ty)
}
@ -572,7 +614,12 @@ impl helpers for ebml::writer {
}
}
impl writer for ebml::writer {
trait write_tag_and_id {
fn tag(tag_id: c::astencode_tag, f: fn());
fn id(id: ast::node_id);
}
impl writer of write_tag_and_id for ebml::writer {
fn tag(tag_id: c::astencode_tag, f: fn()) {
do self.wr_tag(tag_id as uint) { f() }
}
@ -724,7 +771,13 @@ fn encode_side_tables_for_id(ecx: @e::encode_ctxt,
}
}
impl decoder for ebml::doc {
trait doc_decoder_helpers {
fn as_int() -> int;
fn [](tag: c::astencode_tag) -> ebml::doc;
fn opt_child(tag: c::astencode_tag) -> option<ebml::doc>;
}
impl decoder of doc_decoder_helpers for ebml::doc {
fn as_int() -> int { ebml::doc_as_u64(self) as int }
fn [](tag: c::astencode_tag) -> ebml::doc {
ebml::get_doc(self, tag as uint)
@ -734,7 +787,17 @@ impl decoder for ebml::doc {
}
}
impl decoder for ebml::ebml_deserializer {
trait ebml_deserializer_decoder_helpers {
fn read_ty(xcx: extended_decode_ctxt) -> ty::t;
fn read_tys(xcx: extended_decode_ctxt) -> ~[ty::t];
fn read_bounds(xcx: extended_decode_ctxt) -> @~[ty::param_bound];
fn read_ty_param_bounds_and_ty(xcx: extended_decode_ctxt)
-> ty::ty_param_bounds_and_ty;
}
impl decoder of ebml_deserializer_decoder_helpers
for ebml::ebml_deserializer {
fn read_ty(xcx: extended_decode_ctxt) -> ty::t {
// Note: regions types embed local node ids. In principle, we
// should translate these node ids into the new decode

View file

@ -169,17 +169,18 @@ fn check_crate(tcx: ty::ctxt,
method_map: typeck::method_map,
last_use_map: liveness::last_use_map,
crate: @ast::crate) -> (root_map, mutbl_map) {
let bccx = @{tcx: tcx,
method_map: method_map,
last_use_map: last_use_map,
binding_map: int_hash(),
root_map: root_map(),
mutbl_map: int_hash(),
mut loaned_paths_same: 0,
mut loaned_paths_imm: 0,
mut stable_paths: 0,
mut req_pure_paths: 0,
mut guaranteed_paths: 0};
let bccx = borrowck_ctxt_(@{tcx: tcx,
method_map: method_map,
last_use_map: last_use_map,
binding_map: int_hash(),
root_map: root_map(),
mutbl_map: int_hash(),
mut loaned_paths_same: 0,
mut loaned_paths_imm: 0,
mut stable_paths: 0,
mut req_pure_paths: 0,
mut guaranteed_paths: 0});
let req_maps = gather_loans::gather_loans(bccx, crate);
check_loans::check_loans(bccx, req_maps, crate);
@ -210,7 +211,7 @@ fn check_crate(tcx: ty::ctxt,
// ----------------------------------------------------------------------
// Type definitions
type borrowck_ctxt = @{tcx: ty::ctxt,
type borrowck_ctxt_ = {tcx: ty::ctxt,
method_map: typeck::method_map,
last_use_map: liveness::last_use_map,
binding_map: binding_map,
@ -224,6 +225,10 @@ type borrowck_ctxt = @{tcx: ty::ctxt,
mut req_pure_paths: uint,
mut guaranteed_paths: uint};
enum borrowck_ctxt {
borrowck_ctxt_(@borrowck_ctxt_)
}
// a map mapping id's of expressions of gc'd type (@T, @[], etc) where
// the box needs to be kept live to the id of the scope for which they
// must stay live.
@ -365,7 +370,11 @@ impl of ast_node for @ast::pat {
fn span() -> span { self.span }
}
impl methods for ty::ctxt {
trait get_type_for_node {
fn ty<N: ast_node>(node: N) -> ty::t;
}
impl methods of get_type_for_node for ty::ctxt {
fn ty<N: ast_node>(node: N) -> ty::t {
ty::node_id_to_type(self, node.id())
}

View file

@ -6,17 +6,21 @@ export public_methods;
impl public_methods for borrowck_ctxt {
fn loan(cmt: cmt, mutbl: ast::mutability) -> @dvec<loan> {
let lc = @{bccx: self, loans: @dvec()};
let lc = loan_ctxt_(@{bccx: self, loans: @dvec()});
lc.loan(cmt, mutbl);
ret lc.loans;
}
}
type loan_ctxt = @{
type loan_ctxt_ = {
bccx: borrowck_ctxt,
loans: @dvec<loan>
};
enum loan_ctxt {
loan_ctxt_(@loan_ctxt_)
}
impl loan_methods for loan_ctxt {
fn ok_with_loan_of(cmt: cmt,
mutbl: ast::mutability) {

View file

@ -162,14 +162,17 @@ fn get_warning_settings_level(settings: warning_settings,
// This is kind of unfortunate. It should be somewhere else, or we should use
// a persistent data structure...
fn clone_lint_modes(modes: lint_modes) -> lint_modes {
@{v: copy modes.v}
std::smallintmap::smallintmap_(@{v: copy modes.v})
}
type ctxt = {dict: lint_dict,
curr: lint_modes,
is_default: bool,
sess: session};
type ctxt_ = {dict: lint_dict,
curr: lint_modes,
is_default: bool,
sess: session};
enum ctxt {
ctxt_(ctxt_)
}
impl methods for ctxt {
fn get_level(lint: lint) -> level {
@ -216,9 +219,10 @@ impl methods for ctxt {
// we do multiple unneeded copies of the map
// if many attributes are set, but this shouldn't
// actually be a problem...
new_ctxt = {is_default: false,
curr: clone_lint_modes(new_ctxt.curr)
with new_ctxt};
new_ctxt =
ctxt_({is_default: false,
curr: clone_lint_modes(new_ctxt.curr)
with *new_ctxt});
new_ctxt.set_level(lint, new_level);
}
}
@ -271,10 +275,10 @@ fn build_settings_item(i: @ast::item, &&cx: ctxt, v: visit::vt<ctxt>) {
fn build_settings_crate(sess: session::session, crate: @ast::crate) {
let cx = {dict: get_lint_dict(),
curr: std::smallintmap::mk(),
is_default: true,
sess: sess};
let cx = ctxt_({dict: get_lint_dict(),
curr: std::smallintmap::mk(),
is_default: true,
sess: sess});
// Install defaults.
for cx.dict.each |_k, spec| { cx.set_level(spec.lint, spec.default); }
@ -291,7 +295,7 @@ fn build_settings_crate(sess: session::session, crate: @ast::crate) {
sess.warning_settings.default_settings.insert(k, v);
}
let cx = {is_default: true with cx};
let cx = ctxt_({is_default: true with *cx});
let visit = visit::mk_vt(@{
visit_item: build_settings_item

View file

@ -426,7 +426,7 @@ fn resolve_crate(sess: session, def_map: resolve::def_map, crate: @ast::crate)
type region_paramd_items = hashmap<ast::node_id, ()>;
type dep_map = hashmap<ast::node_id, @dvec<ast::node_id>>;
type determine_rp_ctxt = @{
type determine_rp_ctxt_ = {
sess: session,
ast_map: ast_map::map,
def_map: resolve::def_map,
@ -442,6 +442,10 @@ type determine_rp_ctxt = @{
mut anon_implies_rp: bool
};
enum determine_rp_ctxt {
determine_rp_ctxt_(@determine_rp_ctxt_)
}
impl methods for determine_rp_ctxt {
fn add_rp(id: ast::node_id) {
assert id != 0;
@ -608,14 +612,14 @@ fn determine_rp_in_crate(sess: session,
ast_map: ast_map::map,
def_map: resolve::def_map,
crate: @ast::crate) -> region_paramd_items {
let cx = @{sess: sess,
ast_map: ast_map,
def_map: def_map,
region_paramd_items: int_hash(),
dep_map: int_hash(),
worklist: dvec(),
mut item_id: 0,
mut anon_implies_rp: false};
let cx = determine_rp_ctxt_(@{sess: sess,
ast_map: ast_map,
def_map: def_map,
region_paramd_items: int_hash(),
dep_map: int_hash(),
worklist: dvec(),
mut item_id: 0,
mut anon_implies_rp: false});
// gather up the base set, worklist and dep_map:
let visitor = visit::mk_vt(@{

View file

@ -1,5 +1,6 @@
import driver::session::session;
import metadata::csearch::{each_path, get_impls_for_mod, lookup_defs};
import metadata::csearch::{each_path, get_impls_for_mod};
import metadata::csearch::{get_method_names_if_trait, lookup_defs};
import metadata::cstore::find_use_stmt_cnum;
import metadata::decoder::{def_like, dl_def, dl_field, dl_impl};
import middle::lint::{error, ignore, level, unused_imports, warn};
@ -59,6 +60,9 @@ type ImplScope = @~[@Impl];
type ImplScopes = @list<ImplScope>;
type ImplMap = hashmap<node_id,ImplScopes>;
// Trait method resolution
type TraitMap = @hashmap<node_id,@dvec<def_id>>;
// Export mapping
type Export = { reexp: bool, id: def_id };
type ExportMap = hashmap<node_id, ~[Export]>;
@ -599,6 +603,8 @@ class Resolver {
let unused_import_lint_level: level;
let trait_info: hashmap<def_id,@hashmap<Atom,()>>;
// The number of imports that are currently unresolved.
let mut unresolved_imports: uint;
@ -617,6 +623,9 @@ class Resolver {
// allowed to access private names of any module.
let mut xray_context: XrayFlag;
// The trait that the current context can refer to.
let mut current_trait_ref: option<def_id>;
// The atom for the keyword "self".
let self_atom: Atom;
@ -629,6 +638,7 @@ class Resolver {
let def_map: DefMap;
let impl_map: ImplMap;
let export_map: ExportMap;
let trait_map: TraitMap;
new(session: session, ast_map: ASTMap, crate: @crate) {
self.session = session;
@ -646,12 +656,16 @@ class Resolver {
self.unused_import_lint_level = unused_import_lint_level(session);
self.trait_info = new_def_hash();
self.unresolved_imports = 0u;
self.current_module = (*self.graph_root).get_module();
self.value_ribs = @dvec();
self.type_ribs = @dvec();
self.xray_context = NoXray;
self.current_trait_ref = none;
self.self_atom = (*self.atom_table).intern(@~"self");
self.primitive_type_table = @PrimitiveTypeTable(self.atom_table);
@ -661,6 +675,7 @@ class Resolver {
self.def_map = int_hash();
self.impl_map = int_hash();
self.export_map = int_hash();
self.trait_map = @int_hash();
}
/// The main name resolution procedure.
@ -930,14 +945,34 @@ class Resolver {
visit_item(item, new_parent, visitor);
}
item_trait(*) {
(*name_bindings).define_type(def_ty(local_def(item.id)));
item_trait(_, methods) {
// Add the names of all the methods to the trait info.
let method_names = @atom_hashmap();
for methods.each |method| {
let atom;
alt method {
required(required_method) {
atom = (*self.atom_table).intern
(required_method.ident);
}
provided(provided_method) {
atom = (*self.atom_table).intern
(provided_method.ident);
}
}
(*method_names).insert(atom, ());
}
let def_id = local_def(item.id);
self.trait_info.insert(def_id, method_names);
(*name_bindings).define_type(def_ty(def_id));
visit_item(item, new_parent, visitor);
}
item_mac(*) {
fail ~"item macros unimplemented"
}
item_mac(*) {
fail ~"item macros unimplemented"
}
}
}
@ -1300,6 +1335,34 @@ class Resolver {
def_ty(def_id) {
#debug("(building reduced graph for external \
crate) building type %s", final_ident);
// If this is a trait, add all the method names
// to the trait info.
alt get_method_names_if_trait(self.session.cstore,
def_id) {
none {
// Nothing to do.
}
some(method_names) {
let interned_method_names =
@atom_hashmap();
for method_names.each |method_name| {
#debug("(building reduced graph for \
external crate) ... adding \
trait method '%?'",
method_name);
let atom =
(*self.atom_table).intern
(method_name);
(*interned_method_names).insert(atom,
());
}
self.trait_info.insert
(def_id, interned_method_names);
}
}
(*child_name_bindings).define_type(def);
}
def_class(def_id) {
@ -2724,7 +2787,9 @@ class Resolver {
// Move down in the graph.
alt name {
none { /* Nothing to do. */ }
none {
// Nothing to do.
}
some(name) {
alt orig_module.children.find(name) {
none {
@ -2903,6 +2968,7 @@ class Resolver {
item_impl(type_parameters, interface_reference, self_type,
methods) {
self.resolve_implementation(item.id,
item.span,
type_parameters,
@ -2922,8 +2988,7 @@ class Resolver {
// Create a new rib for the interface-wide type parameters.
do self.with_type_parameter_rib
(HasTypeParameters(&type_parameters, item.id, 0u,
NormalRibKind))
|| {
NormalRibKind)) {
self.resolve_type_parameters(type_parameters, visitor);
@ -2939,8 +3004,7 @@ class Resolver {
(HasTypeParameters(&ty_m.tps,
item.id,
type_parameters.len(),
NormalRibKind))
|| {
NormalRibKind)) {
// Resolve the method-specific type
// parameters.
@ -3318,13 +3382,13 @@ class Resolver {
let borrowed_type_parameters: &~[ty_param] = &type_parameters;
do self.with_type_parameter_rib(HasTypeParameters
(borrowed_type_parameters, id, 0u,
NormalRibKind))
|| {
NormalRibKind)) {
// Resolve the type parameters.
self.resolve_type_parameters(type_parameters, visitor);
// Resolve the interface reference, if necessary.
let original_trait_ref = self.current_trait_ref;
alt interface_reference {
none {
// Nothing to do.
@ -3339,6 +3403,9 @@ class Resolver {
}
some(def) {
self.record_def(interface_reference.ref_id, def);
// Record the current trait reference.
self.current_trait_ref = some(def_id_of_def(def));
}
}
}
@ -3364,6 +3431,9 @@ class Resolver {
NoCaptureClause,
visitor);
}
// Restore the original trait reference.
self.current_trait_ref = original_trait_ref;
}
}
@ -3828,9 +3898,10 @@ class Resolver {
ret ImportNameDefinition(def);
}
none {
fail ~"target for namespace doesn't refer to \
bindings that contain a definition for \
that namespace!";
// This can happen with external impls, due to
// the imperfect way we read the metadata.
ret NoNameDefinition;
}
}
}
@ -4040,6 +4111,11 @@ class Resolver {
self.record_impls_for_expr_if_necessary(expr);
// Then record candidate traits for this expression if it could result
// in the invocation of a method call.
self.record_candidate_traits_for_expr_if_necessary(expr);
// Next, resolve the node.
alt expr.node {
// The interpretation of paths depends on whether the path has
@ -4101,6 +4177,109 @@ class Resolver {
}
}
fn record_candidate_traits_for_expr_if_necessary(expr: @expr) {
alt expr.node {
expr_field(_, ident, _) {
let atom = (*self.atom_table).intern(ident);
let traits = self.search_for_traits_containing_method(atom);
self.trait_map.insert(expr.id, traits);
}
_ {
// Nothing to do.
//
// XXX: Handle more here... operator overloading, placement
// new, etc.
}
}
}
fn search_for_traits_containing_method(name: Atom) -> @dvec<def_id> {
let found_traits = @dvec();
let mut search_module = self.current_module;
loop {
// Look for the current trait.
alt copy self.current_trait_ref {
some(trait_def_id) {
self.add_trait_info_if_containing_method(found_traits,
trait_def_id,
name);
}
none {
// Nothing to do.
}
}
// Look for trait children.
for search_module.children.each |_name, child_name_bindings| {
alt child_name_bindings.def_for_namespace(TypeNS) {
some(def_ty(trait_def_id)) {
self.add_trait_info_if_containing_method(found_traits,
trait_def_id,
name);
}
some(_) | none {
// Continue.
}
}
}
// Look for imports.
for search_module.import_resolutions.each
|_atom, import_resolution| {
alt import_resolution.target_for_namespace(TypeNS) {
none {
// Continue.
}
some(target) {
alt target.bindings.def_for_namespace(TypeNS) {
some(def_ty(trait_def_id)) {
self.add_trait_info_if_containing_method
(found_traits, trait_def_id, name);
}
some(_) | none {
// Continue.
}
}
}
}
}
// Move to the next parent.
alt search_module.parent_link {
NoParentLink {
// Done.
break;
}
ModuleParentLink(parent_module, _) |
BlockParentLink(parent_module, _) {
search_module = parent_module;
}
}
}
ret found_traits;
}
fn add_trait_info_if_containing_method(found_traits: @dvec<def_id>,
trait_def_id: def_id,
name: Atom) {
alt self.trait_info.find(trait_def_id) {
some(trait_info) if trait_info.contains_key(name) {
#debug("(adding trait info if containing method) found trait \
%d:%d for method '%s'",
trait_def_id.crate,
trait_def_id.node,
*(*self.atom_table).atom_to_str(name));
(*found_traits).push(trait_def_id);
}
some(_) | none {
// Continue.
}
}
}
fn record_def(node_id: node_id, def: def) {
#debug("(recording def) recording %? for %?", def, node_id);
self.def_map.insert(node_id, def);
@ -4310,14 +4489,18 @@ class Resolver {
/// Entry point to crate resolution.
fn resolve_crate(session: session, ast_map: ASTMap, crate: @crate)
-> { def_map: DefMap, exp_map: ExportMap, impl_map: ImplMap } {
-> { def_map: DefMap,
exp_map: ExportMap,
impl_map: ImplMap,
trait_map: TraitMap } {
let resolver = @Resolver(session, ast_map, crate);
(*resolver).resolve(resolver);
ret {
def_map: resolver.def_map,
exp_map: resolver.export_map,
impl_map: resolver.impl_map
impl_map: resolver.impl_map,
trait_map: resolver.trait_map
};
}

View file

@ -94,7 +94,11 @@ class icx_popper {
}
}
impl ccx_icx for @crate_ctxt {
trait get_insn_ctxt {
fn insn_ctxt(s: ~str) -> icx_popper;
}
impl ccx_icx of get_insn_ctxt for @crate_ctxt {
fn insn_ctxt(s: ~str) -> icx_popper {
#debug("new insn_ctxt: %s", s);
if self.sess.count_llvm_insns() {
@ -104,13 +108,13 @@ impl ccx_icx for @crate_ctxt {
}
}
impl bcx_icx for block {
impl bcx_icx of get_insn_ctxt for block {
fn insn_ctxt(s: ~str) -> icx_popper {
self.ccx().insn_ctxt(s)
}
}
impl fcx_icx for fn_ctxt {
impl fcx_icx of get_insn_ctxt for fn_ctxt {
fn insn_ctxt(s: ~str) -> icx_popper {
self.ccx.insn_ctxt(s)
}

View file

@ -348,19 +348,23 @@ type scope_info = {
mut landing_pad: option<BasicBlockRef>,
};
impl node_info for @ast::expr {
trait get_node_info {
fn info() -> option<node_info>;
}
impl node_info of get_node_info for @ast::expr {
fn info() -> option<node_info> {
some({id: self.id, span: self.span})
}
}
impl node_info for ast::blk {
impl node_info of get_node_info for ast::blk {
fn info() -> option<node_info> {
some({id: self.node.id, span: self.span})
}
}
impl node_info for option<@ast::expr> {
impl node_info of get_node_info for option<@ast::expr> {
fn info() -> option<node_info> {
self.chain(|s| s.info())
}

View file

@ -165,6 +165,7 @@ export terr_sorts, terr_vec, terr_str, terr_record_size, terr_tuple_size;
export terr_regions_differ, terr_mutability, terr_purity_mismatch;
export terr_proto_mismatch;
export terr_ret_style_mismatch;
export purity_to_str;
// Data types
@ -441,7 +442,11 @@ impl of vid for region_vid {
fn to_str() -> ~str { #fmt["<R%u>", self.to_uint()] }
}
impl of to_str::to_str for purity {
trait purity_to_str {
fn to_str() -> ~str;
}
impl of purity_to_str for purity {
fn to_str() -> ~str {
purity_to_str(self)
}
@ -2359,7 +2364,8 @@ fn type_err_to_str(cx: ctxt, err: type_err) -> ~str {
~" function was expected";
}
terr_purity_mismatch(f1, f2) {
ret #fmt["expected %s fn but found %s fn", f1.to_str(), f2.to_str()];
ret #fmt["expected %s fn but found %s fn",
purity_to_str(f1), purity_to_str(f2)];
}
terr_proto_mismatch(e, a) {
ret #fmt["closure protocol mismatch (%s vs %s)",

View file

@ -154,10 +154,16 @@ type ty_param_substs_and_ty = {substs: ty::substs, ty: ty::t};
type ty_table = hashmap<ast::def_id, ty::t>;
type crate_ctxt = {impl_map: resolve::impl_map,
method_map: method_map,
vtable_map: vtable_map,
tcx: ty::ctxt};
type crate_ctxt_ = {impl_map: resolve::impl_map,
trait_map: resolve3::TraitMap,
method_map: method_map,
vtable_map: vtable_map,
coherence_info: @coherence::CoherenceInfo,
tcx: ty::ctxt};
enum crate_ctxt {
crate_ctxt_(crate_ctxt_)
}
// Functions that write types into the node type table
fn write_ty_to_tcx(tcx: ty::ctxt, node_id: ast::node_id, ty: ty::t) {
@ -284,17 +290,20 @@ fn check_for_main_fn(ccx: @crate_ctxt) {
}
}
fn check_crate(tcx: ty::ctxt, impl_map: resolve::impl_map,
crate: @ast::crate) -> (method_map, vtable_map) {
let ccx = @{impl_map: impl_map,
method_map: std::map::int_hash(),
vtable_map: std::map::int_hash(),
tcx: tcx};
collect::collect_item_types(ccx, crate);
fn check_crate(tcx: ty::ctxt,
impl_map: resolve::impl_map,
trait_map: resolve3::TraitMap,
crate: @ast::crate)
-> (method_map, vtable_map) {
if tcx.sess.coherence() {
coherence::check_coherence(ccx, crate);
}
let ccx = @crate_ctxt_({impl_map: impl_map,
trait_map: trait_map,
method_map: std::map::int_hash(),
vtable_map: std::map::int_hash(),
coherence_info: @coherence::CoherenceInfo(),
tcx: tcx});
collect::collect_item_types(ccx, crate);
coherence::check_coherence(ccx, crate);
check::check_item_types(ccx, crate);
check_for_main_fn(ccx);

View file

@ -76,7 +76,7 @@ import syntax::ast::ty_i;
import typeck::infer::{unify_methods}; // infcx.set()
import typeck::infer::{resolve_type, force_tvar};
type fn_ctxt =
type fn_ctxt_ =
// var_bindings, locals and next_var_id are shared
// with any nested functions that capture the environment
// (and with any functions whose environment is being captured).
@ -111,30 +111,39 @@ type fn_ctxt =
ccx: @crate_ctxt};
enum fn_ctxt {
fn_ctxt_(fn_ctxt_)
}
// Used by check_const and check_enum_variants
fn blank_fn_ctxt(ccx: @crate_ctxt, rty: ty::t,
region_bnd: ast::node_id) -> @fn_ctxt {
// It's kind of a kludge to manufacture a fake function context
// and statement context, but we might as well do write the code only once
@{self_ty: none,
ret_ty: rty,
indirect_ret_ty: none,
purity: ast::pure_fn,
infcx: infer::new_infer_ctxt(ccx.tcx),
locals: int_hash(),
mut region_lb: region_bnd,
mut region_ub: region_bnd,
in_scope_regions: @nil,
node_types: smallintmap::mk(),
node_type_substs: map::int_hash(),
ccx: ccx}
@fn_ctxt_({self_ty: none,
ret_ty: rty,
indirect_ret_ty: none,
purity: ast::pure_fn,
infcx: infer::new_infer_ctxt(ccx.tcx),
locals: int_hash(),
mut region_lb: region_bnd,
mut region_ub: region_bnd,
in_scope_regions: @nil,
node_types: smallintmap::mk(),
node_type_substs: map::int_hash(),
ccx: ccx})
}
// a list of mapping from in-scope-region-names ("isr") to the
// corresponding ty::region
type isr_alist = @list<(ty::bound_region, ty::region)>;
impl methods for isr_alist {
trait get_and_find_region {
fn get(br: ty::bound_region) -> ty::region;
fn find(br: ty::bound_region) -> option<ty::region>;
}
impl methods of get_and_find_region for isr_alist {
fn get(br: ty::bound_region) -> ty::region {
option::get(self.find(br))
}
@ -227,18 +236,18 @@ fn check_fn(ccx: @crate_ctxt,
}
} else { none };
@{self_ty: self_ty,
ret_ty: ret_ty,
indirect_ret_ty: indirect_ret_ty,
purity: purity,
infcx: infcx,
locals: locals,
mut region_lb: body.node.id,
mut region_ub: body.node.id,
in_scope_regions: isr,
node_types: node_types,
node_type_substs: node_type_substs,
ccx: ccx}
@fn_ctxt_({self_ty: self_ty,
ret_ty: ret_ty,
indirect_ret_ty: indirect_ret_ty,
purity: purity,
infcx: infcx,
locals: locals,
mut region_lb: body.node.id,
mut region_ub: body.node.id,
in_scope_regions: isr,
node_types: node_types,
node_type_substs: node_type_substs,
ccx: ccx})
};
gather_locals(fcx, decl, body, arg_tys);
@ -1787,8 +1796,8 @@ fn check_block_no_value(fcx: @fn_ctxt, blk: ast::blk) -> bool {
fn check_block(fcx0: @fn_ctxt, blk: ast::blk) -> bool {
let fcx = alt blk.node.rules {
ast::unchecked_blk { @{purity: ast::impure_fn with *fcx0} }
ast::unsafe_blk { @{purity: ast::unsafe_fn with *fcx0} }
ast::unchecked_blk { @fn_ctxt_({purity: ast::impure_fn with **fcx0}) }
ast::unsafe_blk { @fn_ctxt_({purity: ast::unsafe_fn with **fcx0}) }
ast::default_blk { fcx0 }
};
do fcx.with_region_lb(blk.node.id) {

View file

@ -1,9 +1,12 @@
/* Code to handle method lookups (which can be quite complex) */
import coherence::get_base_type_def_id;
import middle::resolve3::Impl;
import middle::typeck::infer::methods; // next_ty_vars
import syntax::ast::def_id;
import syntax::ast_map;
import syntax::ast_map::node_id_to_str;
import syntax::ast_util::new_def_hash;
import middle::typeck::infer::methods; // next_ty_vars
import dvec::{dvec, extensions};
type candidate = {
@ -55,11 +58,34 @@ class lookup {
// Entrypoint:
fn method() -> option<method_map_entry> {
#debug["method lookup(m_name=%s, self_ty=%s)",
*self.m_name, self.fcx.infcx.ty_to_str(self.self_ty)];
#debug["method lookup(m_name=%s, self_ty=%s, %?)",
*self.m_name, self.fcx.infcx.ty_to_str(self.self_ty),
ty::get(self.self_ty).struct];
// Determine if there are any inherent methods we can call.
let optional_inherent_methods;
alt get_base_type_def_id(self.fcx.infcx,
self.self_expr.span,
self.self_ty) {
none {
optional_inherent_methods = none;
}
some(base_type_def_id) {
#debug("(checking method) found base type");
optional_inherent_methods =
self.fcx.ccx.coherence_info.inherent_methods.find
(base_type_def_id);
if optional_inherent_methods.is_none() {
#debug("(checking method) ... no inherent methods found");
} else {
#debug("(checking method) ... inherent methods found");
}
}
}
loop {
// First, see whether this is an interface-bounded parameter
// First, see whether this is an interface-bounded parameter.
alt ty::get(self.self_ty).struct {
ty::ty_param(n, did) {
self.add_candidates_from_param(n, did);
@ -83,12 +109,20 @@ class lookup {
// would require doing an implicit borrow of the lhs.
self.add_candidates_from_scope(false);
// Look for inherent methods.
self.add_inherent_and_extension_candidates
(optional_inherent_methods, false);
// if we found anything, stop before trying borrows
if self.candidates.len() > 0u { break; }
// now look for impls in scope that might require a borrow
self.add_candidates_from_scope(true);
// Again, look for inherent methods.
self.add_inherent_and_extension_candidates
(optional_inherent_methods, true);
// if we found anything, stop before attempting auto-deref.
if self.candidates.len() > 0u { break; }
@ -296,6 +330,14 @@ class lookup {
}
fn add_candidates_from_scope(use_assignability: bool) {
// If we're using coherence and this is one of the method invocation
// forms it supports, don't use this method; it'll result in lots of
// multiple-methods-in-scope errors.
if self.fcx.ccx.trait_map.contains_key(self.expr.id) {
ret;
}
let impls_vecs = self.fcx.ccx.impl_map.get(self.expr.id);
let mut added_any = false;
@ -303,43 +345,8 @@ class lookup {
for list::each(impls_vecs) |impls| {
for vec::each(*impls) |im| {
// Check whether this impl has a method with the right name.
for im.methods.find(|m| m.ident == self.m_name).each |m| {
// determine the `self` of the impl with fresh
// variables for each parameter:
let {substs: impl_substs, ty: impl_ty} =
impl_self_ty(self.fcx, im.did);
// Depending on our argument, we find potential
// matches either by checking subtypability or
// type assignability. Collect the matches.
let matches = if use_assignability {
self.fcx.can_mk_assignty(
self.self_expr, self.borrow_lb,
self.self_ty, impl_ty)
} else {
self.fcx.can_mk_subty(self.self_ty, impl_ty)
};
#debug["matches = %?", matches];
alt matches {
result::err(_) { /* keep looking */ }
result::ok(_) {
if !self.candidate_impls.contains_key(im.did) {
let fty = self.ty_from_did(m.did);
self.candidates.push(
{self_ty: self.self_ty,
self_substs: impl_substs,
rcvr_ty: impl_ty,
n_tps_m: m.n_tps,
fty: fty,
entry: {derefs: self.derefs,
origin: method_static(m.did)}});
self.candidate_impls.insert(im.did, ());
added_any = true;
}
}
}
if self.add_candidates_from_impl(im, use_assignability) {
added_any = true;
}
}
@ -349,6 +356,53 @@ class lookup {
}
}
// Returns true if any were added and false otherwise.
fn add_candidates_from_impl(im: @resolve3::Impl,
use_assignability: bool) -> bool {
let mut added_any = false;
// Check whether this impl has a method with the right name.
for im.methods.find(|m| m.ident == self.m_name).each |m| {
// determine the `self` of the impl with fresh
// variables for each parameter:
let {substs: impl_substs, ty: impl_ty} =
impl_self_ty(self.fcx, im.did);
// Depending on our argument, we find potential
// matches either by checking subtypability or
// type assignability. Collect the matches.
let matches = if use_assignability {
self.fcx.can_mk_assignty(self.self_expr, self.borrow_lb,
self.self_ty, impl_ty)
} else {
self.fcx.can_mk_subty(self.self_ty, impl_ty)
};
#debug["matches = %?", matches];
alt matches {
result::err(_) { /* keep looking */ }
result::ok(_) {
if !self.candidate_impls.contains_key(im.did) {
let fty = self.ty_from_did(m.did);
self.candidates.push(
{self_ty: self.self_ty,
self_substs: impl_substs,
rcvr_ty: impl_ty,
n_tps_m: m.n_tps,
fty: fty,
entry: {derefs: self.derefs,
origin: method_static(m.did)}});
self.candidate_impls.insert(im.did, ());
added_any = true;
}
}
}
}
ret added_any;
}
fn add_candidates_from_m(self_substs: ty::substs,
m: ty::method,
origin: method_origin) {
@ -367,6 +421,58 @@ class lookup {
entry: {derefs: self.derefs, origin: origin}});
}
fn add_inherent_and_extension_candidates(optional_inherent_methods:
option<@dvec<@Impl>>,
use_assignability: bool) {
// Add inherent methods.
alt optional_inherent_methods {
none {
// Continue.
}
some(inherent_methods) {
#debug("(adding inherent and extension candidates) adding \
inherent candidates");
for inherent_methods.each |implementation| {
#debug("(adding inherent and extension candidates) \
adding candidates from impl: %s",
node_id_to_str(self.tcx().items,
implementation.did.node));
self.add_candidates_from_impl(implementation,
use_assignability);
}
}
}
// Add trait methods.
alt self.fcx.ccx.trait_map.find(self.expr.id) {
none {
// XXX: This particular operation is not yet trait-ified;
// leave it alone for now.
}
some(trait_ids) {
for (*trait_ids).each |trait_id| {
#debug("(adding inherent and extension candidates) \
trying trait: %s",
node_id_to_str(self.tcx().items, trait_id.node));
let coherence_info = self.fcx.ccx.coherence_info;
alt coherence_info.extension_methods.find(trait_id) {
none {
// Do nothing.
}
some(extension_methods) {
for extension_methods.each |implementation| {
self.add_candidates_from_impl
(implementation, use_assignability);
}
}
}
}
}
}
}
fn write_mty_from_candidate(cand: candidate) -> method_map_entry {
let tcx = self.fcx.ccx.tcx;

View file

@ -4,40 +4,121 @@
// has at most one implementation for each type. Then we build a mapping from
// each trait in the system to its implementations.
import middle::ty::{get, t, ty_box, ty_uniq, ty_ptr, ty_rptr, ty_enum};
import metadata::csearch::{each_path, get_impl_trait, get_impls_for_mod};
import metadata::cstore::{cstore, iter_crate_data};
import metadata::decoder::{dl_def, dl_field, dl_impl};
import middle::resolve3::Impl;
import middle::ty::{get, lookup_item_type, subst, t, ty_box};
import middle::ty::{ty_uniq, ty_ptr, ty_rptr, ty_enum};
import middle::ty::{ty_class, ty_nil, ty_bot, ty_bool, ty_int, ty_uint};
import middle::ty::{ty_float, ty_estr, ty_evec, ty_rec};
import middle::ty::{ty_fn, ty_trait, ty_tup, ty_var, ty_var_integral};
import middle::ty::{ty_param, ty_self, ty_type, ty_opaque_box};
import middle::ty::{ty_opaque_closure_ptr, ty_unboxed_vec, new_ty_hash};
import middle::ty::{subst};
import middle::typeck::infer::{infer_ctxt, mk_subty, new_infer_ctxt};
import syntax::ast::{crate, def_id, item, item_class, item_const, item_enum};
import syntax::ast::{item_fn, item_foreign_mod, item_impl, item_mac};
import syntax::ast::{item_mod, item_trait, item_ty, local_crate, method};
import syntax::ast::{node_id, trait_ref};
import syntax::ast_util::{def_id_of_def, new_def_hash};
import middle::ty::{ty_opaque_closure_ptr, ty_unboxed_vec, type_is_var};
import middle::typeck::infer::{infer_ctxt, mk_subty};
import middle::typeck::infer::{new_infer_ctxt, resolve_ivar, resolve_type};
import syntax::ast::{crate, def_id, def_mod, item, item_class, item_const};
import syntax::ast::{item_enum, item_fn, item_foreign_mod, item_impl};
import syntax::ast::{item_mac, item_mod, item_trait, item_ty, local_crate};
import syntax::ast::{method, node_id, region_param, rp_none, rp_self};
import syntax::ast::{trait_ref};
import syntax::ast_map::node_item;
import syntax::ast_util::{def_id_of_def, dummy_sp, new_def_hash};
import syntax::codemap::span;
import syntax::visit::{default_simple_visitor, default_visitor};
import syntax::visit::{mk_simple_visitor, mk_vt, visit_crate, visit_item};
import syntax::visit::{visit_mod};
import util::ppaux::ty_to_str;
import dvec::{dvec, extensions};
import result::{extensions};
import result::{extensions, ok};
import std::map::{hashmap, int_hash};
import uint::range;
import vec::{len, push};
fn get_base_type(inference_context: infer_ctxt, span: span, original_type: t)
-> option<t> {
let resolved_type;
alt resolve_type(inference_context,
original_type,
resolve_ivar) {
ok(resulting_type) if !type_is_var(resulting_type) {
resolved_type = resulting_type;
}
_ {
inference_context.tcx.sess.span_fatal(span,
~"the type of this value \
must be known in order \
to determine the base \
type");
}
}
alt get(resolved_type).struct {
ty_box(base_mutability_and_type) |
ty_uniq(base_mutability_and_type) |
ty_ptr(base_mutability_and_type) |
ty_rptr(_, base_mutability_and_type) {
#debug("(getting base type) recurring");
get_base_type(inference_context, span,
base_mutability_and_type.ty)
}
ty_enum(*) | ty_trait(*) | ty_class(*) {
#debug("(getting base type) found base type");
some(resolved_type)
}
ty_nil | ty_bot | ty_bool | ty_int(*) | ty_uint(*) | ty_float(*) |
ty_estr(*) | ty_evec(*) | ty_rec(*) |
ty_fn(*) | ty_tup(*) | ty_var(*) | ty_var_integral(*) |
ty_param(*) | ty_self | ty_type | ty_opaque_box |
ty_opaque_closure_ptr(*) | ty_unboxed_vec(*) {
#debug("(getting base type) no base type; found %?",
get(original_type).struct);
none
}
}
}
// Returns the def ID of the base type, if there is one.
fn get_base_type_def_id(inference_context: infer_ctxt,
span: span,
original_type: t)
-> option<def_id> {
alt get_base_type(inference_context, span, original_type) {
none {
ret none;
}
some(base_type) {
alt get(base_type).struct {
ty_enum(def_id, _) |
ty_class(def_id, _) |
ty_trait(def_id, _) {
ret some(def_id);
}
_ {
fail ~"get_base_type() returned a type that wasn't an \
enum, class, or trait";
}
}
}
}
}
class CoherenceInfo {
// Contains implementations of methods that are inherent to a type.
// Methods in these implementations don't need to be exported.
let inherent_methods: hashmap<t,@dvec<@item>>;
let inherent_methods: hashmap<def_id,@dvec<@Impl>>;
// Contains implementations of methods associated with a trait. For these,
// the associated trait must be imported at the call site.
let extension_methods: hashmap<def_id,@dvec<@item>>;
let extension_methods: hashmap<def_id,@dvec<@Impl>>;
new() {
self.inherent_methods = new_ty_hash();
self.inherent_methods = new_def_hash();
self.extension_methods = new_def_hash();
}
}
@ -45,11 +126,10 @@ class CoherenceInfo {
class CoherenceChecker {
let crate_context: @crate_ctxt;
let inference_context: infer_ctxt;
let info: @CoherenceInfo;
// A mapping from implementations to the corresponding base type
// definition ID.
let base_type_def_ids: hashmap<node_id,def_id>;
let base_type_def_ids: hashmap<def_id,def_id>;
// A set of implementations in privileged scopes; i.e. those
// implementations that are defined in the same scope as their base types.
@ -62,9 +142,8 @@ class CoherenceChecker {
new(crate_context: @crate_ctxt) {
self.crate_context = crate_context;
self.inference_context = new_infer_ctxt(crate_context.tcx);
self.info = @CoherenceInfo();
self.base_type_def_ids = int_hash();
self.base_type_def_ids = new_def_hash();
self.privileged_implementations = int_hash();
self.privileged_types = new_def_hash();
}
@ -88,12 +167,20 @@ class CoherenceChecker {
}));
// Check trait coherence.
for self.info.extension_methods.each |def_id, items| {
for self.crate_context.coherence_info.extension_methods.each
|def_id, items| {
self.check_implementation_coherence(def_id, items);
}
// Check whether traits with base types are in privileged scopes.
self.check_privileged_scopes(crate);
// Bring in external crates. It's fine for this to happen after the
// coherence checks, because we ensure by construction that no errors
// can happen at link time.
self.add_external_crates();
}
fn check_implementation(item: @item,
@ -102,111 +189,89 @@ class CoherenceChecker {
let self_type = self.crate_context.tcx.tcache.get(local_def(item.id));
alt optional_associated_trait {
none {
alt self.get_base_type(self_type.ty) {
alt get_base_type_def_id(self.inference_context,
item.span,
self_type.ty) {
none {
let session = self.crate_context.tcx.sess;
session.span_warn(item.span,
~"no base type found for inherent \
implementation; implement a trait \
instead");
session.span_err(item.span,
~"no base type found for inherent \
implementation; implement a \
trait instead");
}
some(base_type) {
let implementation_list;
alt self.info.inherent_methods.find(base_type) {
none {
implementation_list = @dvec();
}
some(existing_implementation_list) {
implementation_list =
existing_implementation_list;
}
}
implementation_list.push(item);
some(_) {
// Nothing to do.
}
}
}
some(associated_trait) {
let def =
self.crate_context.tcx.def_map.get(associated_trait.ref_id);
let def_id = def_id_of_def(def);
let implementation_list;
alt self.info.extension_methods.find(def_id) {
none {
implementation_list = @dvec();
}
some(existing_implementation_list) {
implementation_list = existing_implementation_list;
}
}
implementation_list.push(item);
let def = self.crate_context.tcx.def_map.get
(associated_trait.ref_id);
let implementation = self.create_impl_from_item(item);
self.add_trait_method(def_id_of_def(def), implementation);
}
}
// Add the implementation to the mapping from implementation to base
// type def ID, if there is a base type for this implementation.
alt self.get_base_type_def_id(self_type.ty) {
alt get_base_type_def_id(self.inference_context,
item.span,
self_type.ty) {
none {
// Nothing to do.
}
some(base_type_def_id) {
self.base_type_def_ids.insert(item.id, base_type_def_id);
let implementation = self.create_impl_from_item(item);
self.add_inherent_method(base_type_def_id, implementation);
self.base_type_def_ids.insert(local_def(item.id),
base_type_def_id);
}
}
}
fn get_base_type(original_type: t) -> option<t> {
alt get(original_type).struct {
ty_box(base_mutability_and_type) |
ty_uniq(base_mutability_and_type) |
ty_ptr(base_mutability_and_type) |
ty_rptr(_, base_mutability_and_type) {
self.get_base_type(base_mutability_and_type.ty)
}
fn add_inherent_method(base_def_id: def_id, implementation: @Impl) {
let implementation_list;
alt self.crate_context.coherence_info.inherent_methods
.find(base_def_id) {
ty_enum(*) | ty_trait(*) | ty_class(*) {
some(original_type)
}
ty_nil | ty_bot | ty_bool | ty_int(*) | ty_uint(*) | ty_float(*) |
ty_estr(*) | ty_evec(*) | ty_rec(*) |
ty_fn(*) | ty_tup(*) | ty_var(*) | ty_var_integral(*) |
ty_param(*) | ty_self | ty_type | ty_opaque_box |
ty_opaque_closure_ptr(*) | ty_unboxed_vec(*) {
none
}
}
}
// Returns the def ID of the base type.
fn get_base_type_def_id(original_type: t) -> option<def_id> {
alt self.get_base_type(original_type) {
none {
ret none;
implementation_list = @dvec();
self.crate_context.coherence_info.inherent_methods
.insert(base_def_id, implementation_list);
}
some(base_type) {
alt get(base_type).struct {
ty_enum(def_id, _) |
ty_class(def_id, _) |
ty_trait(def_id, _) {
ret some(def_id);
}
_ {
fail ~"get_base_type() returned a type that \
wasn't an enum, class, or trait";
}
}
some(existing_implementation_list) {
implementation_list = existing_implementation_list;
}
}
implementation_list.push(implementation);
}
fn add_trait_method(trait_id: def_id, implementation: @Impl) {
let implementation_list;
alt self.crate_context.coherence_info.extension_methods
.find(trait_id) {
none {
implementation_list = @dvec();
self.crate_context.coherence_info.extension_methods
.insert(trait_id, implementation_list);
}
some(existing_implementation_list) {
implementation_list = existing_implementation_list;
}
}
implementation_list.push(implementation);
}
fn check_implementation_coherence(_trait_def_id: def_id,
implementations: @dvec<@item>) {
implementations: @dvec<@Impl>) {
// Unify pairs of polytypes.
for implementations.eachi |i, implementation_a| {
for range(0, implementations.len()) |i| {
let implementation_a = implementations.get_elt(i);
let polytype_a =
self.get_self_type_for_implementation(implementation_a);
for range(i + 1, implementations.len()) |j| {
@ -216,12 +281,12 @@ class CoherenceChecker {
if self.polytypes_unify(polytype_a, polytype_b) {
let session = self.crate_context.tcx.sess;
session.span_err(implementation_b.span,
session.span_err(self.span_of_impl(implementation_b),
~"conflicting implementations for a \
trait");
session.span_note(
implementation_a.span,
~"note conflicting implementation here");
trait");
session.span_note(self.span_of_impl(implementation_a),
~"note conflicting implementation \
here");
}
}
}
@ -241,7 +306,7 @@ class CoherenceChecker {
// type variables.
fn universally_quantify_polytype(polytype: ty_param_bounds_and_ty) -> t {
let self_region =
if polytype.rp {none}
if !polytype.rp {none}
else {some(self.inference_context.next_region_var_nb())};
let bounds_count = polytype.bounds.len();
@ -257,25 +322,22 @@ class CoherenceChecker {
ret subst(self.crate_context.tcx, substitutions, polytype.ty);
}
fn get_self_type_for_implementation(implementation: @item)
fn get_self_type_for_implementation(implementation: @Impl)
-> ty_param_bounds_and_ty {
alt implementation.node {
item_impl(*) {
let def = local_def(implementation.id);
ret self.crate_context.tcx.tcache.get(def);
}
_ {
self.crate_context.tcx.sess.span_bug(
implementation.span,
~"not an implementation");
}
}
ret self.crate_context.tcx.tcache.get(implementation.did);
}
// Privileged scope checking
fn check_privileged_scopes(crate: @crate) {
// Gather up all privileged types.
let privileged_types =
self.gather_privileged_types(crate.node.module.items);
for privileged_types.each |privileged_type| {
self.privileged_types.insert(privileged_type, ());
}
visit_crate(*crate, (), mk_vt(@{
visit_item: |item, _context, visitor| {
alt item.node {
@ -301,7 +363,7 @@ class CoherenceChecker {
}
}
item_impl(_, optional_trait_ref, _, _) {
alt self.base_type_def_ids.find(item.id) {
alt self.base_type_def_ids.find(local_def(item.id)) {
none {
// Nothing to do.
}
@ -329,19 +391,18 @@ class CoherenceChecker {
let session =
self.crate_context.tcx.sess;
session.span_warn(item.span,
~"cannot \
implement \
inherent \
methods for a \
type outside \
the scope the \
type was \
defined in; \
define and \
implement a \
trait \
instead");
session.span_err(item.span,
~"cannot \
implement \
inherent \
methods for a \
type outside \
the scope the \
type was \
defined in; \
define and \
implement a \
trait instead");
}
some(trait_ref) {
// This is OK if and only if the
@ -357,13 +418,13 @@ class CoherenceChecker {
if trait_id.crate != local_crate {
let session = self
.crate_context.tcx.sess;
session.span_warn(item.span,
~"cannot \
session.span_err(item.span,
~"cannot \
provide \
an \
extension \
implement\
ation \
implementa\
tion \
for a \
trait not \
defined \
@ -405,6 +466,164 @@ class CoherenceChecker {
ret results;
}
// Converts an implementation in the AST to an Impl structure.
fn create_impl_from_item(item: @item) -> @Impl {
alt item.node {
item_impl(ty_params, _, _, ast_methods) {
let mut methods = ~[];
for ast_methods.each |ast_method| {
push(methods, @{
did: local_def(ast_method.id),
n_tps: ast_method.tps.len(),
ident: ast_method.ident
});
}
ret @{
did: local_def(item.id),
ident: item.ident,
methods: methods
};
}
_ {
self.crate_context.tcx.sess.span_bug(item.span,
~"can't convert a \
non-impl to an impl");
}
}
}
fn span_of_impl(implementation: @Impl) -> span {
assert implementation.did.crate == local_crate;
alt self.crate_context.tcx.items.find(implementation.did.node) {
some(node_item(item, _)) {
ret item.span;
}
_ {
self.crate_context.tcx.sess.bug(~"span_of_impl() called on \
something that wasn't an \
impl!");
}
}
}
// External crate handling
fn add_impls_for_module(impls_seen: hashmap<def_id,()>,
crate_store: cstore,
module_def_id: def_id) {
let implementations = get_impls_for_mod(crate_store,
module_def_id,
none);
for (*implementations).each |implementation| {
// Make sure we don't visit the same implementation
// multiple times.
alt impls_seen.find(implementation.did) {
none {
// Good. Continue.
impls_seen.insert(implementation.did, ());
}
some(_) {
// Skip this one.
again;
}
}
let self_type = lookup_item_type(self.crate_context.tcx,
implementation.did);
let optional_trait =
get_impl_trait(self.crate_context.tcx,
implementation.did);
alt optional_trait {
none {
// This is an inherent method. There should be
// no problems here, but perform a sanity check
// anyway.
alt get_base_type_def_id(self.inference_context,
dummy_sp(),
self_type.ty) {
none {
let session = self.crate_context.tcx.sess;
session.bug(#fmt("no base type for \
external impl with no \
trait: %s (type %s)!",
*implementation.ident,
ty_to_str
(self.crate_context.tcx,
self_type.ty)));
}
some(_) {
// Nothing to do.
}
}
}
some(trait_type) {
alt get(trait_type).struct {
ty_trait(trait_id, _) {
self.add_trait_method(trait_id,
implementation);
}
_ {
self.crate_context.tcx.sess
.bug(~"trait type returned is not a \
trait");
}
}
}
}
// Add the implementation to the mapping from
// implementation to base type def ID, if there is a base
// type for this implementation.
alt get_base_type_def_id(self.inference_context,
dummy_sp(),
self_type.ty) {
none {
// Nothing to do.
}
some(base_type_def_id) {
self.add_inherent_method(base_type_def_id,
implementation);
self.base_type_def_ids.insert(implementation.did,
base_type_def_id);
}
}
}
}
fn add_external_crates() {
let impls_seen = new_def_hash();
let crate_store = self.crate_context.tcx.sess.cstore;
do iter_crate_data(crate_store) |crate_number, _crate_metadata| {
self.add_impls_for_module(impls_seen,
crate_store,
{ crate: crate_number, node: 0 });
for each_path(crate_store, crate_number) |path_entry| {
let module_def_id;
alt path_entry.def_like {
dl_def(def_mod(def_id)) {
module_def_id = def_id;
}
dl_def(_) | dl_impl(_) | dl_field {
// Skip this.
again;
}
}
self.add_impls_for_module(impls_seen,
crate_store,
module_def_id);
}
}
}
}
fn check_coherence(crate_context: @crate_ctxt, crate: @crate) {

View file

@ -437,14 +437,24 @@ fn resolve_borrowings(cx: infer_ctxt) {
}
}
impl methods for ures {
trait then {
fn then<T:copy>(f: fn() -> result<T,ty::type_err>)
-> result<T,ty::type_err>;
}
impl methods of then for ures {
fn then<T:copy>(f: fn() -> result<T,ty::type_err>)
-> result<T,ty::type_err> {
self.chain(|_i| f())
}
}
impl methods<T:copy> for cres<T> {
trait cres_helpers<T> {
fn to_ures() -> ures;
fn compare(t: T, f: fn() -> ty::type_err) -> cres<T>;
}
impl methods<T:copy> of cres_helpers<T> for cres<T> {
fn to_ures() -> ures {
alt self {
ok(_v) { ok(()) }
@ -1097,19 +1107,22 @@ const force_rvar: uint = 0b00100000;
const force_ivar: uint = 0b01000000;
const force_all: uint = 0b01110000;
type resolve_state = @{
type resolve_state_ = {
infcx: infer_ctxt,
modes: uint,
mut err: option<fixup_err>,
mut v_seen: ~[tv_vid]
};
fn resolver(infcx: infer_ctxt, modes: uint)
-> resolve_state {
@{infcx: infcx,
modes: modes,
mut err: none,
mut v_seen: ~[]}
enum resolve_state {
resolve_state_(@resolve_state_)
}
fn resolver(infcx: infer_ctxt, modes: uint) -> resolve_state {
resolve_state_(@{infcx: infcx,
modes: modes,
mut err: none,
mut v_seen: ~[]})
}
impl methods for resolve_state {