m1!{...} -> m1!(...)

This commit is contained in:
Paul Stansifer 2012-08-22 17:24:52 -07:00
parent 226fd87199
commit 29f32b4a72
380 changed files with 5330 additions and 2706 deletions

View file

@ -343,7 +343,7 @@ fn build_link_meta(sess: session, c: ast::crate, output: ~str,
metas: provided_metas,
dep_hashes: ~[~str]) -> ~str {
fn len_and_str(s: ~str) -> ~str {
return fmt!{"%u_%s", str::len(s), s};
return fmt!("%u_%s", str::len(s), s);
}
fn len_and_str_lit(l: ast::lit) -> ~str {
@ -379,8 +379,8 @@ fn build_link_meta(sess: session, c: ast::crate, output: ~str,
fn warn_missing(sess: session, name: ~str, default: ~str) {
if !sess.building_library { return; }
sess.warn(fmt!{"missing crate link meta `%s`, using `%s` as default",
name, default});
sess.warn(fmt!("missing crate link meta `%s`, using `%s` as default",
name, default));
}
fn crate_meta_name(sess: session, _crate: ast::crate,
@ -393,8 +393,8 @@ fn build_link_meta(sess: session, c: ast::crate, output: ~str,
let mut os =
str::split_char(path::basename(output), '.');
if (vec::len(os) < 2u) {
sess.fatal(fmt!{"output file name `%s` doesn't\
appear to have an extension", output});
sess.fatal(fmt!("output file name `%s` doesn't\
appear to have an extension", output));
}
vec::pop(os);
str::connect(os, ~".")
@ -505,7 +505,7 @@ fn mangle(sess: session, ss: path) -> ~str {
for ss.each |s| {
match s { path_name(s) | path_mod(s) => {
let sani = sanitize(sess.str_of(s));
n += fmt!{"%u%s", str::len(sani), sani};
n += fmt!("%u%s", str::len(sani), sani);
} }
}
n += ~"E"; // End name-sequence.
@ -581,12 +581,12 @@ fn link_binary(sess: session,
let output = if sess.building_library {
let long_libname =
os::dll_filename(fmt!{"%s-%s-%s",
lm.name, lm.extras_hash, lm.vers});
debug!{"link_meta.name: %s", lm.name};
debug!{"long_libname: %s", long_libname};
debug!{"out_filename: %s", out_filename};
debug!{"dirname(out_filename): %s", path::dirname(out_filename)};
os::dll_filename(fmt!("%s-%s-%s",
lm.name, lm.extras_hash, lm.vers));
debug!("link_meta.name: %s", lm.name);
debug!("long_libname: %s", long_libname);
debug!("out_filename: %s", out_filename);
debug!("dirname(out_filename): %s", path::dirname(out_filename));
path::connect(path::dirname(out_filename), long_libname)
} else { out_filename };
@ -703,14 +703,14 @@ fn link_binary(sess: session,
// extern libraries might live, based on the addl_lib_search_paths
vec::push_all(cc_args, rpath::get_rpath_flags(sess, output));
debug!{"%s link args: %s", cc_prog, str::connect(cc_args, ~" ")};
debug!("%s link args: %s", cc_prog, str::connect(cc_args, ~" "));
// We run 'cc' here
let prog = run::program_output(cc_prog, cc_args);
if 0 != prog.status {
sess.err(fmt!{"linking with `%s` failed with code %d",
cc_prog, prog.status});
sess.note(fmt!{"%s arguments: %s",
cc_prog, str::connect(cc_args, ~" ")});
sess.err(fmt!("linking with `%s` failed with code %d",
cc_prog, prog.status));
sess.note(fmt!("%s arguments: %s",
cc_prog, str::connect(cc_args, ~" ")));
sess.note(prog.err + prog.out);
sess.abort_if_errors();
}
@ -723,8 +723,8 @@ fn link_binary(sess: session,
// Remove the temporary object file if we aren't saving temps
if !sess.opts.save_temps {
if ! os::remove_file(obj_filename) {
sess.warn(fmt!{"failed to delete object file `%s`",
obj_filename});
sess.warn(fmt!("failed to delete object file `%s`",
obj_filename));
}
}
}

View file

@ -21,7 +21,7 @@ fn get_rpath_flags(sess: session::session, out_filename: ~str) -> ~[~str] {
return ~[];
}
debug!{"preparing the RPATH!"};
debug!("preparing the RPATH!");
let cwd = os::getcwd();
let sysroot = sess.filesearch.sysroot();
@ -45,20 +45,20 @@ fn get_sysroot_absolute_rt_lib(sess: session::session) -> path::Path {
}
fn rpaths_to_flags(rpaths: ~[~str]) -> ~[~str] {
vec::map(rpaths, |rpath| fmt!{"-Wl,-rpath,%s",rpath} )
vec::map(rpaths, |rpath| fmt!("-Wl,-rpath,%s",rpath) )
}
fn get_rpaths(os: session::os, cwd: path::Path, sysroot: path::Path,
output: path::Path, libs: ~[path::Path],
target_triple: ~str) -> ~[~str] {
debug!{"cwd: %s", cwd};
debug!{"sysroot: %s", sysroot};
debug!{"output: %s", output};
debug!{"libs:"};
debug!("cwd: %s", cwd);
debug!("sysroot: %s", sysroot);
debug!("output: %s", output);
debug!("libs:");
for libs.each |libpath| {
debug!{" %s", libpath};
debug!(" %s", libpath);
}
debug!{"target_triple: %s", target_triple};
debug!("target_triple: %s", target_triple);
// Use relative paths to the libraries. Binaries can be moved
// as long as they maintain the relative relationship to the
@ -73,9 +73,9 @@ fn get_rpaths(os: session::os, cwd: path::Path, sysroot: path::Path,
let fallback_rpaths = ~[get_install_prefix_rpath(cwd, target_triple)];
fn log_rpaths(desc: ~str, rpaths: ~[~str]) {
debug!{"%s rpaths:", desc};
debug!("%s rpaths:", desc);
for rpaths.each |rpath| {
debug!{" %s", rpath};
debug!(" %s", rpath);
}
}
@ -124,8 +124,8 @@ fn get_rpath_relative_to_output(os: session::os,
fn get_relative_to(abs1: path::Path, abs2: path::Path) -> path::Path {
assert path::path_is_absolute(abs1);
assert path::path_is_absolute(abs2);
debug!{"finding relative path from %s to %s",
abs1, abs2};
debug!("finding relative path from %s to %s",
abs1, abs2);
let normal1 = path::normalize(abs1);
let normal2 = path::normalize(abs2);
let split1 = path::split(normal1);
@ -171,7 +171,7 @@ fn get_absolute(cwd: path::Path, lib: path::Path) -> path::Path {
}
fn get_install_prefix_rpath(cwd: path::Path, target_triple: ~str) -> ~str {
let install_prefix = env!{"CFG_PREFIX"};
let install_prefix = env!("CFG_PREFIX");
if install_prefix == ~"" {
fail ~"rustc compiled without CFG_PREFIX environment variable";
@ -222,7 +222,7 @@ mod test {
#[test]
fn test_prefix_rpath() {
let res = get_install_prefix_rpath(~"/usr/lib", ~"triple");
let d = path::connect(env!{"CFG_PREFIX"}, ~"/lib/rustc/triple/lib");
let d = path::connect(env!("CFG_PREFIX"), ~"/lib/rustc/triple/lib");
assert str::ends_with(res, d);
}

View file

@ -116,8 +116,8 @@ fn time<T>(do_it: bool, what: ~str, thunk: fn() -> T) -> T {
let start = std::time::precise_time_s();
let rv = thunk();
let end = std::time::precise_time_s();
io::stdout().write_str(fmt!{"time: %3.3f s\t%s\n",
end - start, what});
io::stdout().write_str(fmt!("time: %3.3f s\t%s\n",
end - start, what));
return rv;
}
@ -403,7 +403,7 @@ fn host_triple() -> ~str {
// FIXME (#2400): Instead of grabbing the host triple we really should
// be grabbing (at compile time) the target triple that this rustc is
// built with and calling that (at runtime) the host triple.
let ht = env!{"CFG_HOST_TRIPLE"};
let ht = env!("CFG_HOST_TRIPLE");
return if ht != ~"" {
ht
} else {
@ -438,8 +438,8 @@ fn build_session_options(matches: getopts::matches,
let lint_name = str::replace(lint_name, ~"-", ~"_");
match lint_dict.find(lint_name) {
none => {
early_error(demitter, fmt!{"unknown %s flag: %s",
level_name, lint_name});
early_error(demitter, fmt!("unknown %s flag: %s",
level_name, lint_name));
}
some(lint) => {
vec::push(lint_opts, (lint.lint, level));
@ -458,7 +458,7 @@ fn build_session_options(matches: getopts::matches,
if name == debug_flag { this_bit = bit; break; }
}
if this_bit == 0u {
early_error(demitter, fmt!{"unknown debug flag: %s", debug_flag})
early_error(demitter, fmt!("unknown debug flag: %s", debug_flag))
}
debugging_opts |= this_bit;
}

View file

@ -21,14 +21,14 @@ import rustc::middle::lint;
fn version(argv0: ~str) {
let mut vers = ~"unknown version";
let env_vers = env!{"CFG_VERSION"};
let env_vers = env!("CFG_VERSION");
if str::len(env_vers) != 0u { vers = env_vers; }
io::println(fmt!{"%s %s", argv0, vers});
io::println(fmt!{"host: %s", host_triple()});
io::println(fmt!("%s %s", argv0, vers));
io::println(fmt!("host: %s", host_triple()));
}
fn usage(argv0: ~str) {
io::println(fmt!{"Usage: %s [options] <input>\n", argv0} +
io::println(fmt!("Usage: %s [options] <input>\n", argv0) +
~"
Options:
@ -85,14 +85,14 @@ fn describe_warnings() {
fn padded(max: uint, s: ~str) -> ~str {
str::from_bytes(vec::from_elem(max - s.len(), ' ' as u8)) + s
}
io::println(fmt!{"\nAvailable lint checks:\n"});
io::println(fmt!{" %s %7.7s %s",
padded(max_key, ~"name"), ~"default", ~"meaning"});
io::println(fmt!{" %s %7.7s %s\n",
padded(max_key, ~"----"), ~"-------", ~"-------"});
io::println(fmt!("\nAvailable lint checks:\n"));
io::println(fmt!(" %s %7.7s %s",
padded(max_key, ~"name"), ~"default", ~"meaning"));
io::println(fmt!(" %s %7.7s %s\n",
padded(max_key, ~"----"), ~"-------", ~"-------"));
for lint_dict.each |k, v| {
let k = str::replace(k, ~"_", ~"-");
io::println(fmt!{" %s %7.7s %s",
io::println(fmt!(" %s %7.7s %s",
padded(max_key, k),
match v.default {
lint::allow => ~"allow",
@ -100,16 +100,16 @@ fn describe_warnings() {
lint::deny => ~"deny",
lint::forbid => ~"forbid"
},
v.desc});
v.desc));
}
io::println(~"");
}
fn describe_debug_flags() {
io::println(fmt!{"\nAvailable debug options:\n"});
io::println(fmt!("\nAvailable debug options:\n"));
for session::debugging_opts_map().each |pair| {
let (name, desc, _) = pair;
io::println(fmt!{" -Z%-20s -- %s", name, desc});
io::println(fmt!(" -Z%-20s -- %s", name, desc));
}
}

View file

@ -7,7 +7,7 @@ export inject_intrinsic;
fn inject_intrinsic(sess: session,
crate: @ast::crate) -> @ast::crate {
let intrinsic_module = @include_str!{"intrinsic.rs"};
let intrinsic_module = @include_str!("intrinsic.rs");
let item = parse::parse_item_from_source_str(~"<intrinsic>",
intrinsic_module,

View file

@ -100,8 +100,8 @@ fn fold_item(cx: test_ctxt, &&i: @ast::item, fld: fold::ast_fold) ->
option<@ast::item> {
vec::push(cx.path, i.ident);
debug!{"current path: %s",
ast_util::path_name_i(cx.path, cx.sess.parse_sess.interner)};
debug!("current path: %s",
ast_util::path_name_i(cx.path, cx.sess.parse_sess.interner));
if is_test_fn(i) {
match i.node {
@ -111,12 +111,12 @@ fn fold_item(cx: test_ctxt, &&i: @ast::item, fld: fold::ast_fold) ->
~"unsafe functions cannot be used for tests");
}
_ => {
debug!{"this is a test function"};
debug!("this is a test function");
let test = {span: i.span,
path: cx.path, ignore: is_ignored(cx, i),
should_fail: should_fail(i)};
cx.testfns.push(test);
debug!{"have %u test functions", cx.testfns.len()};
debug!("have %u test functions", cx.testfns.len());
}
}
}
@ -203,8 +203,8 @@ fn mk_test_module(cx: test_ctxt) -> @ast::item {
vis: ast::public,
span: dummy_sp()};
debug!{"Synthetic test module:\n%s\n",
pprust::item_to_str(@item, cx.sess.intr())};
debug!("Synthetic test module:\n%s\n",
pprust::item_to_str(@item, cx.sess.intr()));
return @item;
}
@ -280,7 +280,7 @@ fn mk_test_desc_vec_ty(cx: test_ctxt) -> @ast::ty {
}
fn mk_test_desc_vec(cx: test_ctxt) -> @ast::expr {
debug!{"building test vector from %u tests", cx.testfns.len()};
debug!("building test vector from %u tests", cx.testfns.len());
let mut descs = ~[];
for cx.testfns.each |test| {
vec::push(descs, mk_test_desc_rec(cx, test));
@ -300,8 +300,8 @@ fn mk_test_desc_rec(cx: test_ctxt, test: test) -> @ast::expr {
let span = test.span;
let path = test.path;
debug!{"encoding %s", ast_util::path_name_i(path,
cx.sess.parse_sess.interner)};
debug!("encoding %s", ast_util::path_name_i(path,
cx.sess.parse_sess.interner));
let name_lit: ast::lit =
nospan(ast::lit_str(@ast_util::path_name_i(path, cx.sess.parse_sess

View file

@ -1090,7 +1090,7 @@ fn type_to_str_inner(names: type_names, outer0: ~[TypeRef], ty: TypeRef) ->
if addrspace == 0u {
~""
} else {
fmt!{"addrspace(%u)", addrspace}
fmt!("addrspace(%u)", addrspace)
}
};
return addrstr + ~"*" +

View file

@ -45,11 +45,11 @@ type cache_entry = {
};
fn dump_crates(crate_cache: DVec<cache_entry>) {
debug!{"resolved crates:"};
debug!("resolved crates:");
for crate_cache.each |entry| {
debug!{"cnum: %?", entry.cnum};
debug!{"span: %?", entry.span};
debug!{"hash: %?", entry.hash};
debug!("cnum: %?", entry.cnum);
debug!("span: %?", entry.span);
debug!("hash: %?", entry.hash);
}
}
@ -73,7 +73,7 @@ fn warn_if_multiple_versions(e: env, diag: span_handler,
if matches.len() != 1u {
diag.handler().warn(
fmt!{"using multiple versions of crate `%s`", name});
fmt!("using multiple versions of crate `%s`", name));
for matches.each |match_| {
diag.span_note(match_.span, ~"used here");
let attrs = ~[
@ -99,7 +99,7 @@ type env = @{diag: span_handler,
fn visit_view_item(e: env, i: @ast::view_item) {
match i.node {
ast::view_item_use(ident, meta_items, id) => {
debug!{"resolving use stmt. ident: %?, meta: %?", ident, meta_items};
debug!("resolving use stmt. ident: %?, meta: %?", ident, meta_items);
let cnum = resolve_crate(e, ident, meta_items, ~"", i.span);
cstore::add_use_stmt_cnum(e.cstore, id, cnum);
}
@ -236,7 +236,7 @@ fn resolve_crate(e: env, ident: ast::ident, metas: ~[@ast::meta_item],
// Go through the crate metadata and load any crates that it references
fn resolve_crate_deps(e: env, cdata: @~[u8]) -> cstore::cnum_map {
debug!{"resolving deps of external crate"};
debug!("resolving deps of external crate");
// The map from crate numbers in the crate we're resolving to local crate
// numbers
let cnum_map = int_hash::<ast::crate_num>();
@ -244,17 +244,17 @@ fn resolve_crate_deps(e: env, cdata: @~[u8]) -> cstore::cnum_map {
let extrn_cnum = dep.cnum;
let cname = dep.name;
let cmetas = metas_with(dep.vers, ~"vers", ~[]);
debug!{"resolving dep crate %s ver: %s hash: %s",
*e.intr.get(dep.name), dep.vers, dep.hash};
debug!("resolving dep crate %s ver: %s hash: %s",
*e.intr.get(dep.name), dep.vers, dep.hash);
match existing_match(e, metas_with_ident(*e.intr.get(cname), cmetas),
dep.hash) {
some(local_cnum) => {
debug!{"already have it"};
debug!("already have it");
// We've already seen this crate
cnum_map.insert(extrn_cnum, local_cnum);
}
none => {
debug!{"need to load it"};
debug!("need to load it");
// This is a new one so we've got to load it
// FIXME (#2404): Need better error reporting than just a bogus
// span.

View file

@ -137,17 +137,17 @@ fn get_field_type(tcx: ty::ctxt, class_id: ast::def_id,
let cstore = tcx.cstore;
let cdata = cstore::get_crate_data(cstore, class_id.crate);
let all_items = ebml::get_doc(ebml::doc(cdata.data), tag_items);
debug!{"Looking up %?", class_id};
debug!("Looking up %?", class_id);
let class_doc = expect(tcx.diag,
decoder::maybe_find_item(class_id.node, all_items),
|| fmt!{"get_field_type: class ID %? not found",
class_id} );
debug!{"looking up %? : %?", def, class_doc};
|| fmt!("get_field_type: class ID %? not found",
class_id) );
debug!("looking up %? : %?", def, class_doc);
let the_field = expect(tcx.diag,
decoder::maybe_find_item(def.node, class_doc),
|| fmt!{"get_field_type: in class %?, field ID %? not found",
class_id, def} );
debug!{"got field data %?", the_field};
|| fmt!("get_field_type: in class %?, field ID %? not found",
class_id, def) );
debug!("got field data %?", the_field);
let ty = decoder::item_type(def, the_field, tcx, cdata);
return {bounds: @~[],
region_param: none,

View file

@ -163,14 +163,14 @@ fn get_dep_hashes(cstore: cstore) -> ~[~str] {
for p(cstore).use_crate_map.each_value |cnum| {
let cdata = cstore::get_crate_data(cstore, cnum);
let hash = decoder::get_crate_hash(cdata.data);
debug!{"Add hash[%s]: %s", cdata.name, hash};
debug!("Add hash[%s]: %s", cdata.name, hash);
vec::push(result, {name: cdata.name, hash: hash});
};
pure fn lteq(a: &crate_hash, b: &crate_hash) -> bool {a.name <= b.name}
let sorted = std::sort::merge_sort(lteq, result);
debug!{"sorted:"};
debug!("sorted:");
for sorted.each |x| {
debug!{" hash[%s]: %s", x.name, x.hash};
debug!(" hash[%s]: %s", x.name, x.hash);
}
fn mapper(ch: crate_hash) -> ~str { return ch.hash; }
return vec::map(sorted, mapper);

View file

@ -100,7 +100,7 @@ fn find_item(item_id: int, items: ebml::doc) -> ebml::doc {
fn lookup_item(item_id: int, data: @~[u8]) -> ebml::doc {
let items = ebml::get_doc(ebml::doc(data), tag_items);
match maybe_find_item(item_id, items) {
none => fail(fmt!{"lookup_item: id not found: %d", item_id}),
none => fail(fmt!("lookup_item: id not found: %d", item_id)),
some(d) => d
}
}
@ -379,8 +379,8 @@ fn get_class_method(intr: ident_interner, cdata: cmd, id: ast::node_id,
let mut found = none;
let cls_items = match maybe_find_item(id, items) {
some(it) => it,
none => fail (fmt!{"get_class_method: class id not found \
when looking up method %s", *intr.get(name)})
none => fail (fmt!("get_class_method: class id not found \
when looking up method %s", *intr.get(name)))
};
for ebml::tagged_docs(cls_items, tag_item_trait_method) |mid| {
let m_did = item_def_id(mid, cdata);
@ -390,8 +390,8 @@ fn get_class_method(intr: ident_interner, cdata: cmd, id: ast::node_id,
}
match found {
some(found) => found,
none => fail (fmt!{"get_class_method: no method named %s",
*intr.get(name)})
none => fail (fmt!("get_class_method: no method named %s",
*intr.get(name)))
}
}
@ -400,8 +400,8 @@ fn class_dtor(cdata: cmd, id: ast::node_id) -> option<ast::def_id> {
let mut found = none;
let cls_items = match maybe_find_item(id, items) {
some(it) => it,
none => fail (fmt!{"class_dtor: class id not found \
when looking up dtor for %d", id})
none => fail (fmt!("class_dtor: class id not found \
when looking up dtor for %d", id))
};
for ebml::tagged_docs(cls_items, tag_item_dtor) |doc| {
let doc1 = ebml::get_doc(doc, tag_def_id);
@ -461,7 +461,7 @@ fn each_path(intr: ident_interner, cdata: cmd, f: fn(path_entry) -> bool) {
let def_id = item_def_id(item_doc, cdata);
// Construct the def for this item.
debug!{"(each_path) yielding explicit item: %s", path};
debug!("(each_path) yielding explicit item: %s", path);
let def_like = item_to_def_like(item_doc, def_id, cdata.cnum);
// Hand the information off to the iteratee.
@ -539,7 +539,7 @@ fn maybe_get_item_ast(intr: ident_interner, cdata: cmd, tcx: ty::ctxt,
id: ast::node_id,
decode_inlined_item: decode_inlined_item
) -> csearch::found_ast {
debug!{"Looking up item: %d", id};
debug!("Looking up item: %d", id);
let item_doc = lookup_item(id, cdata.data);
let path = vec::init(item_path(intr, item_doc));
match decode_inlined_item(cdata, tcx, path, item_doc) {
@ -609,7 +609,7 @@ fn get_self_ty(item: ebml::doc) -> ast::self_ty_ {
'm' => { ast::m_mutbl }
'c' => { ast::m_const }
_ => {
fail fmt!{"unknown mutability character: `%c`", ch as char}
fail fmt!("unknown mutability character: `%c`", ch as char)
}
}
}
@ -626,7 +626,7 @@ fn get_self_ty(item: ebml::doc) -> ast::self_ty_ {
'~' => { return ast::sty_uniq(get_mutability(string[1])); }
'&' => { return ast::sty_region(get_mutability(string[1])); }
_ => {
fail fmt!{"unknown self type code: `%c`", self_ty_kind as char};
fail fmt!("unknown self type code: `%c`", self_ty_kind as char);
}
}
}
@ -658,8 +658,8 @@ fn get_impls_for_mod(intr: ident_interner, cdata: cmd,
for ebml::tagged_docs(mod_item, tag_mod_impl) |doc| {
let did = ebml::with_doc_data(doc, |d| parse_def_id(d));
let local_did = translate_def_id(cdata, did);
debug!{"(get impls for mod) getting did %? for '%?'",
local_did, name};
debug!("(get impls for mod) getting did %? for '%?'",
local_did, name);
// The impl may be defined in a different crate. Ask the caller
// to give us the metadata
let impl_cdata = get_cdata(local_did.crate);
@ -797,7 +797,7 @@ fn describe_def(items: ebml::doc, id: ast::def_id) -> ~str {
if id.crate != ast::local_crate { return ~"external"; }
let it = match maybe_find_item(id.node, items) {
some(it) => it,
none => fail (fmt!{"describe_def: item not found %?", id})
none => fail (fmt!("describe_def: item not found %?", id))
};
return item_family_to_str(item_family(it));
}
@ -877,16 +877,16 @@ fn get_attributes(md: ebml::doc) -> ~[ast::attribute] {
fn list_meta_items(intr: ident_interner,
meta_items: ebml::doc, out: io::Writer) {
for get_meta_items(meta_items).each |mi| {
out.write_str(fmt!{"%s\n", pprust::meta_item_to_str(mi, intr)});
out.write_str(fmt!("%s\n", pprust::meta_item_to_str(mi, intr)));
}
}
fn list_crate_attributes(intr: ident_interner, md: ebml::doc, hash: ~str,
out: io::Writer) {
out.write_str(fmt!{"=Crate Attributes (%s)=\n", hash});
out.write_str(fmt!("=Crate Attributes (%s)=\n", hash));
for get_attributes(md).each |attr| {
out.write_str(fmt!{"%s\n", pprust::attribute_to_str(attr, intr)});
out.write_str(fmt!("%s\n", pprust::attribute_to_str(attr, intr)));
}
out.write_str(~"\n\n");
@ -922,8 +922,8 @@ fn list_crate_deps(intr: ident_interner, data: @~[u8], out: io::Writer) {
for get_crate_deps(intr, data).each |dep| {
out.write_str(
fmt!{"%d %s-%s-%s\n",
dep.cnum, *intr.get(dep.name), dep.hash, dep.vers});
fmt!("%d %s-%s-%s\n",
dep.cnum, *intr.get(dep.name), dep.hash, dep.vers));
}
out.write_str(~"\n");

View file

@ -126,7 +126,7 @@ fn encode_family(ebml_w: ebml::writer, c: char) {
ebml_w.end_tag();
}
fn def_to_str(did: def_id) -> ~str { fmt!{"%d:%d", did.crate, did.node} }
fn def_to_str(did: def_id) -> ~str { fmt!("%d:%d", did.crate, did.node) }
fn encode_ty_type_param_bounds(ebml_w: ebml::writer, ecx: @encode_ctxt,
params: @~[ty::param_bounds]) {
@ -178,7 +178,7 @@ fn encode_symbol(ecx: @encode_ctxt, ebml_w: ebml::writer, id: node_id) {
some(x) => x,
none => {
ecx.diag.handler().bug(
fmt!{"encode_symbol: id not found %d", id});
fmt!("encode_symbol: id not found %d", id));
}
};
ebml_w.writer.write(str::bytes(sym));
@ -265,20 +265,20 @@ fn encode_info_for_mod(ecx: @encode_ctxt, ebml_w: ebml::writer, md: _mod,
encode_def_id(ebml_w, local_def(id));
encode_family(ebml_w, 'm');
encode_name(ecx, ebml_w, name);
debug!{"(encoding info for module) encoding info for module ID %d", id};
debug!("(encoding info for module) encoding info for module ID %d", id);
// Encode info about all the module children.
for md.items.each |item| {
match item.node {
item_impl(*) | item_class(*) => {
let (ident, did) = (item.ident, item.id);
debug!{"(encoding info for module) ... encoding impl %s \
debug!("(encoding info for module) ... encoding impl %s \
(%?/%?), exported? %?",
ecx.tcx.sess.str_of(ident),
did,
ast_map::node_id_to_str(ecx.tcx.items, did, ecx.tcx
.sess.parse_sess.interner),
ast_util::is_exported(ident, md)};
ast_util::is_exported(ident, md));
ebml_w.start_tag(tag_mod_impl);
ebml_w.wr_str(def_to_str(local_def(did)));
@ -378,8 +378,8 @@ fn encode_info_for_class(ecx: @encode_ctxt, ebml_w: ebml::writer,
vec::push(*global_index, {val: id,
pos: ebml_w.writer.tell()});
ebml_w.start_tag(tag_items_data_item);
debug!{"encode_info_for_class: doing %s %d",
tcx.sess.str_of(nm), id};
debug!("encode_info_for_class: doing %s %d",
tcx.sess.str_of(nm), id);
encode_visibility(ebml_w, vis);
encode_name(ecx, ebml_w, nm);
encode_path(ecx, ebml_w, path, ast_map::path_name(nm));
@ -400,8 +400,8 @@ fn encode_info_for_class(ecx: @encode_ctxt, ebml_w: ebml::writer,
{val: m.id, pos: ebml_w.writer.tell()});
let impl_path = vec::append_one(path,
ast_map::path_name(m.ident));
debug!{"encode_info_for_class: doing %s %d",
ecx.tcx.sess.str_of(m.ident), m.id};
debug!("encode_info_for_class: doing %s %d",
ecx.tcx.sess.str_of(m.ident), m.id);
encode_info_for_method(ecx, ebml_w, impl_path,
should_inline(m.attrs), id, m,
vec::append(class_tps, m.tps));
@ -423,9 +423,9 @@ fn encode_info_for_fn(ecx: @encode_ctxt, ebml_w: ebml::writer,
encode_family(ebml_w, purity_fn_family(decl.purity));
encode_type_param_bounds(ebml_w, ecx, tps);
let its_ty = node_id_to_type(ecx.tcx, id);
debug!{"fn name = %s ty = %s its node id = %d",
debug!("fn name = %s ty = %s its node id = %d",
ecx.tcx.sess.str_of(ident),
util::ppaux::ty_to_str(ecx.tcx, its_ty), id};
util::ppaux::ty_to_str(ecx.tcx, its_ty), id);
encode_type(ecx, ebml_w, its_ty);
encode_path(ecx, ebml_w, path, ast_map::path_name(ident));
match item {
@ -443,8 +443,8 @@ fn encode_info_for_method(ecx: @encode_ctxt, ebml_w: ebml::writer,
impl_path: ast_map::path, should_inline: bool,
parent_id: node_id,
m: @method, all_tps: ~[ty_param]) {
debug!{"encode_info_for_method: %d %s %u", m.id,
ecx.tcx.sess.str_of(m.ident), all_tps.len()};
debug!("encode_info_for_method: %d %s %u", m.id,
ecx.tcx.sess.str_of(m.ident), all_tps.len());
ebml_w.start_tag(tag_items_data_item);
encode_def_id(ebml_w, local_def(m.id));
encode_family(ebml_w, purity_fn_family(m.decl.purity));
@ -668,8 +668,8 @@ fn encode_info_for_item(ecx: @encode_ctxt, ebml_w: ebml::writer, item: @item,
/* Encode the constructor */
for struct_def.ctor.each |ctor| {
debug!{"encoding info for ctor %s %d",
ecx.tcx.sess.str_of(item.ident), ctor.node.id};
debug!("encoding info for ctor %s %d",
ecx.tcx.sess.str_of(item.ident), ctor.node.id);
vec::push(*index, {
val: ctor.node.id,
pos: ebml_w.writer.tell()

View file

@ -62,7 +62,7 @@ fn mk_filesearch(maybe_sysroot: option<Path>,
}
let sysroot = get_sysroot(maybe_sysroot);
debug!{"using sysroot = %s", sysroot};
debug!("using sysroot = %s", sysroot);
{sysroot: sysroot,
addl_lib_search_paths: addl_lib_search_paths,
target_triple: target_triple} as filesearch
@ -71,16 +71,16 @@ fn mk_filesearch(maybe_sysroot: option<Path>,
fn search<T: copy>(filesearch: filesearch, pick: pick<T>) -> option<T> {
let mut rslt = none;
for filesearch.lib_search_paths().each |lib_search_path| {
debug!{"searching %s", lib_search_path};
debug!("searching %s", lib_search_path);
for os::list_dir_path(lib_search_path).each |path| {
debug!{"testing %s", path};
debug!("testing %s", path);
let maybe_picked = pick(path);
if option::is_some(maybe_picked) {
debug!{"picked %s", path};
debug!("picked %s", path);
rslt = maybe_picked;
break;
} else {
debug!{"rejected %s", path};
debug!("rejected %s", path);
}
}
if option::is_some(rslt) { break; }
@ -168,7 +168,7 @@ fn get_cargo_lib_path_nearest() -> result<Path, ~str> {
// The name of the directory rustc expects libraries to be located.
// On Unix should be "lib", on windows "bin"
fn libdir() -> ~str {
let libdir = env!{"CFG_LIBDIR"};
let libdir = env!("CFG_LIBDIR");
if str::is_empty(libdir) {
fail ~"rustc compiled without CFG_LIBDIR environment variable";
}

View file

@ -43,8 +43,8 @@ fn load_library_crate(cx: ctxt) -> {ident: ~str, data: @~[u8]} {
some(t) => return t,
none => {
cx.diag.span_fatal(
cx.span, fmt!{"can't find crate for `%s`",
*cx.intr.get(cx.ident)});
cx.span, fmt!("can't find crate for `%s`",
*cx.intr.get(cx.ident)));
}
}
}
@ -74,27 +74,27 @@ fn find_library_crate_aux(cx: ctxt,
let mut matches = ~[];
filesearch::search(filesearch, |path| {
debug!{"inspecting file %s", path};
debug!("inspecting file %s", path);
let f: ~str = path::basename(path);
if !(str::starts_with(f, prefix) && str::ends_with(f, suffix)) {
debug!{"skipping %s, doesn't look like %s*%s", path, prefix,
suffix};
debug!("skipping %s, doesn't look like %s*%s", path, prefix,
suffix);
option::none::<()>
} else {
debug!{"%s is a candidate", path};
debug!("%s is a candidate", path);
match get_metadata_section(cx.os, path) {
option::some(cvec) => {
if !crate_matches(cvec, cx.metas, cx.hash) {
debug!{"skipping %s, metadata doesn't match", path};
debug!("skipping %s, metadata doesn't match", path);
option::none::<()>
} else {
debug!{"found %s with matching metadata", path};
debug!("found %s with matching metadata", path);
vec::push(matches, {ident: path, data: cvec});
option::none::<()>
}
}
_ => {
debug!{"could not load metadata for %s", path};
debug!("could not load metadata for %s", path);
option::none::<()>
}
}
@ -107,10 +107,10 @@ fn find_library_crate_aux(cx: ctxt,
some(matches[0])
} else {
cx.diag.span_err(
cx.span, fmt!{"multiple matching crates for `%s`", crate_name});
cx.span, fmt!("multiple matching crates for `%s`", crate_name));
cx.diag.handler().note(~"candidates:");
for matches.each |match_| {
cx.diag.handler().note(fmt!{"path: %s", match_.ident});
cx.diag.handler().note(fmt!("path: %s", match_.ident));
let attrs = decoder::get_crate_attributes(match_.data);
note_linkage_attrs(cx.intr, cx.diag, attrs);
}
@ -137,8 +137,8 @@ fn crate_name_from_metas(metas: ~[@ast::meta_item]) -> ~str {
fn note_linkage_attrs(intr: ident_interner, diag: span_handler,
attrs: ~[ast::attribute]) {
for attr::find_linkage_metas(attrs).each |mi| {
diag.handler().note(fmt!{"meta: %s",
pprust::meta_item_to_str(mi,intr)});
diag.handler().note(fmt!("meta: %s",
pprust::meta_item_to_str(mi,intr)));
}
}
@ -156,8 +156,8 @@ fn crate_matches(crate_data: @~[u8], metas: ~[@ast::meta_item],
fn metadata_matches(extern_metas: ~[@ast::meta_item],
local_metas: ~[@ast::meta_item]) -> bool {
debug!{"matching %u metadata requirements against %u items",
vec::len(local_metas), vec::len(extern_metas)};
debug!("matching %u metadata requirements against %u items",
vec::len(local_metas), vec::len(extern_metas));
for local_metas.each |needed| {
if !attr::contains(extern_metas, needed) {

View file

@ -308,16 +308,16 @@ fn parse_ty(st: @pstate, conv: conv_did) -> ty::t {
}
'B' => ty::mk_opaque_box(st.tcx),
'a' => {
debug!{"saw a class"};
debug!("saw a class");
assert (next(st) == '[');
debug!{"saw a ["};
debug!("saw a [");
let did = parse_def(st, conv);
debug!{"parsed a def_id %?", did};
debug!("parsed a def_id %?", did);
let substs = parse_substs(st, conv);
assert (next(st) == ']');
return ty::mk_class(st.tcx, did, substs);
}
c => { error!{"unexpected char in type string: %c", c}; fail;}
c => { error!("unexpected char in type string: %c", c); fail;}
}
}
@ -403,7 +403,7 @@ fn parse_def_id(buf: &[u8]) -> ast::def_id {
let len = vec::len(buf);
while colon_idx < len && buf[colon_idx] != ':' as u8 { colon_idx += 1u; }
if colon_idx == len {
error!{"didn't find ':' when parsing def id"};
error!("didn't find ':' when parsing def id");
fail;
}
@ -412,13 +412,13 @@ fn parse_def_id(buf: &[u8]) -> ast::def_id {
let crate_num = match uint::parse_buf(crate_part, 10u) {
some(cn) => cn as int,
none => fail (fmt!{"internal error: parse_def_id: crate number \
expected, but found %?", crate_part})
none => fail (fmt!("internal error: parse_def_id: crate number \
expected, but found %?", crate_part))
};
let def_num = match uint::parse_buf(def_part, 10u) {
some(dn) => dn as int,
none => fail (fmt!{"internal error: parse_def_id: id expected, but \
found %?", def_part})
none => fail (fmt!("internal error: parse_def_id: id expected, but \
found %?", def_part))
};
return {crate: crate_num, node: def_num};
}

View file

@ -298,15 +298,15 @@ fn enc_sty(w: io::Writer, cx: @ctxt, st: ty::sty) {
ty::ty_opaque_closure_ptr(ty::ck_uniq) => w.write_str(&"C~"),
ty::ty_opaque_box => w.write_char('B'),
ty::ty_class(def, substs) => {
debug!{"~~~~ %s", ~"a["};
debug!("~~~~ %s", ~"a[");
w.write_str(&"a[");
let s = cx.ds(def);
debug!{"~~~~ %s", s};
debug!("~~~~ %s", s);
w.write_str(s);
debug!{"~~~~ %s", ~"|"};
debug!("~~~~ %s", ~"|");
w.write_char('|');
enc_substs(w, cx, substs);
debug!{"~~~~ %s", ~"]"};
debug!("~~~~ %s", ~"]");
w.write_char(']');
}
}

View file

@ -82,10 +82,10 @@ fn encode_inlined_item(ecx: @e::encode_ctxt,
path: ast_map::path,
ii: ast::inlined_item,
maps: maps) {
debug!{"> Encoding inlined item: %s::%s (%u)",
debug!("> Encoding inlined item: %s::%s (%u)",
ast_map::path_to_str(path, ecx.tcx.sess.parse_sess.interner),
ecx.tcx.sess.str_of(ii.ident()),
ebml_w.writer.tell()};
ebml_w.writer.tell());
let id_range = ast_util::compute_id_range_for_inlined_item(ii);
do ebml_w.wr_tag(c::tag_ast as uint) {
@ -94,10 +94,10 @@ fn encode_inlined_item(ecx: @e::encode_ctxt,
encode_side_tables_for_ii(ecx, maps, ebml_w, ii);
}
debug!{"< Encoded inlined fn: %s::%s (%u)",
debug!("< Encoded inlined fn: %s::%s (%u)",
ast_map::path_to_str(path, ecx.tcx.sess.parse_sess.interner),
ecx.tcx.sess.str_of(ii.ident()),
ebml_w.writer.tell()};
ebml_w.writer.tell());
}
fn decode_inlined_item(cdata: cstore::crate_metadata,
@ -109,8 +109,8 @@ fn decode_inlined_item(cdata: cstore::crate_metadata,
match par_doc.opt_child(c::tag_ast) {
none => none,
some(ast_doc) => {
debug!{"> Decoding inlined fn: %s::?",
ast_map::path_to_str(path, tcx.sess.parse_sess.interner)};
debug!("> Decoding inlined fn: %s::?",
ast_map::path_to_str(path, tcx.sess.parse_sess.interner));
let ast_dsr = ebml::ebml_deserializer(ast_doc);
let from_id_range = ast_util::deserialize_id_range(ast_dsr);
let to_id_range = reserve_id_range(dcx.tcx.sess, from_id_range);
@ -121,15 +121,15 @@ fn decode_inlined_item(cdata: cstore::crate_metadata,
let ii = renumber_ast(xcx, raw_ii);
ast_map::map_decoded_item(tcx.sess.diagnostic(),
dcx.tcx.items, path, ii);
debug!{"Fn named: %s", tcx.sess.str_of(ii.ident())};
debug!("Fn named: %s", tcx.sess.str_of(ii.ident()));
decode_side_tables(xcx, ast_doc);
debug!{"< Decoded inlined fn: %s::%s",
debug!("< Decoded inlined fn: %s::%s",
ast_map::path_to_str(path, tcx.sess.parse_sess.interner),
tcx.sess.str_of(ii.ident())};
tcx.sess.str_of(ii.ident()));
match ii {
ast::ii_item(i) => {
debug!{">>> DECODED ITEM >>>\n%s\n<<< DECODED ITEM <<<",
syntax::print::pprust::item_to_str(i, tcx.sess.intr())};
debug!(">>> DECODED ITEM >>>\n%s\n<<< DECODED ITEM <<<",
syntax::print::pprust::item_to_str(i, tcx.sess.intr()));
}
_ => { }
}
@ -650,7 +650,7 @@ fn encode_side_tables_for_id(ecx: @e::encode_ctxt,
id: ast::node_id) {
let tcx = ecx.tcx;
debug!{"Encoding side tables for id %d", id};
debug!("Encoding side tables for id %d", id);
do option::iter(tcx.def_map.find(id)) |def| {
do ebml_w.tag(c::tag_table_def) {
@ -838,9 +838,9 @@ fn decode_side_tables(xcx: extended_decode_ctxt,
let id0 = entry_doc[c::tag_table_id as uint].as_int();
let id = xcx.tr_id(id0);
debug!{">> Side table document with tag 0x%x \
debug!(">> Side table document with tag 0x%x \
found for id %d (orig %d)",
tag, id, id0};
tag, id, id0);
if tag == (c::tag_table_mutbl as uint) {
dcx.maps.mutbl_map.insert(id, ());
@ -889,11 +889,11 @@ fn decode_side_tables(xcx: extended_decode_ctxt,
dcx.tcx.borrowings.insert(id, borrow);
} else {
xcx.dcx.tcx.sess.bug(
fmt!{"unknown tag found in side tables: %x", tag});
fmt!("unknown tag found in side tables: %x", tag));
}
}
debug!{">< Side table doc loaded"};
debug!(">< Side table doc loaded");
}
}
@ -947,8 +947,8 @@ fn roundtrip(in_item: @ast::item) {
let out_str =
io::with_str_writer(|w| ast::serialize_item(w, *out_item) );
debug!{"expected string: %s", exp_str};
debug!{"actual string : %s", out_str};
debug!("expected string: %s", exp_str);
debug!("actual string : %s", out_str);
assert exp_str == out_str;
}

View file

@ -255,16 +255,16 @@ fn check_crate(tcx: ty::ctxt,
if tcx.sess.borrowck_stats() {
io::println(~"--- borrowck stats ---");
io::println(fmt!{"paths requiring guarantees: %u",
bccx.guaranteed_paths});
io::println(fmt!{"paths requiring loans : %s",
make_stat(bccx, bccx.loaned_paths_same)});
io::println(fmt!{"paths requiring imm loans : %s",
make_stat(bccx, bccx.loaned_paths_imm)});
io::println(fmt!{"stable paths : %s",
make_stat(bccx, bccx.stable_paths)});
io::println(fmt!{"paths requiring purity : %s",
make_stat(bccx, bccx.req_pure_paths)});
io::println(fmt!("paths requiring guarantees: %u",
bccx.guaranteed_paths));
io::println(fmt!("paths requiring loans : %s",
make_stat(bccx, bccx.loaned_paths_same)));
io::println(fmt!("paths requiring imm loans : %s",
make_stat(bccx, bccx.loaned_paths_imm)));
io::println(fmt!("stable paths : %s",
make_stat(bccx, bccx.stable_paths)));
io::println(fmt!("paths requiring purity : %s",
make_stat(bccx, bccx.req_pure_paths)));
}
return (bccx.root_map, bccx.mutbl_map);
@ -272,7 +272,7 @@ fn check_crate(tcx: ty::ctxt,
fn make_stat(bccx: borrowck_ctxt, stat: uint) -> ~str {
let stat_f = stat as float;
let total = bccx.guaranteed_paths as float;
fmt!{"%u (%.0f%%)", stat , stat_f * 100f / total}
fmt!("%u (%.0f%%)", stat , stat_f * 100f / total)
}
}
@ -412,8 +412,8 @@ impl borrowck_ctxt {
fn report(err: bckerr) {
self.span_err(
err.cmt.span,
fmt!{"illegal borrow: %s",
self.bckerr_code_to_str(err.code)});
fmt!("illegal borrow: %s",
self.bckerr_code_to_str(err.code)));
}
fn span_err(s: span, m: ~str) {
@ -439,8 +439,8 @@ impl borrowck_ctxt {
fn bckerr_code_to_str(code: bckerr_code) -> ~str {
match code {
err_mutbl(req, act) => {
fmt!{"creating %s alias to aliasable, %s memory",
self.mut_to_str(req), self.mut_to_str(act)}
fmt!("creating %s alias to aliasable, %s memory",
self.mut_to_str(req), self.mut_to_str(act))
}
err_mut_uniq => {
~"unique value in aliasable, mutable location"
@ -455,16 +455,16 @@ impl borrowck_ctxt {
~"rooting is not permitted"
}
err_out_of_root_scope(super_scope, sub_scope) => {
fmt!{"managed value would have to be rooted for %s, \
fmt!("managed value would have to be rooted for %s, \
but can only be rooted for %s",
explain_region(self.tcx, sub_scope),
explain_region(self.tcx, super_scope)}
explain_region(self.tcx, super_scope))
}
err_out_of_scope(super_scope, sub_scope) => {
fmt!{"borrowed pointer must be valid for %s, \
fmt!("borrowed pointer must be valid for %s, \
but the borrowed value is only valid for %s",
explain_region(self.tcx, sub_scope),
explain_region(self.tcx, super_scope)}
explain_region(self.tcx, super_scope))
}
}
}

View file

@ -156,12 +156,12 @@ impl check_loan_ctxt {
callee_span: span) {
let tcx = self.tcx();
debug!{"check_pure_callee_or_arg(pc=%?, expr=%?, \
debug!("check_pure_callee_or_arg(pc=%?, expr=%?, \
callee_id=%d, ty=%s)",
pc,
opt_expr.map(|e| pprust::expr_to_str(e, tcx.sess.intr()) ),
callee_id,
ty_to_str(self.tcx(), ty::node_id_to_type(tcx, callee_id))};
ty_to_str(self.tcx(), ty::node_id_to_type(tcx, callee_id)));
// Purity rules: an expr B is a legal callee or argument to a
// call within a pure function A if at least one of the
@ -204,8 +204,8 @@ impl check_loan_ctxt {
ast::impure_fn | ast::unsafe_fn | ast::extern_fn => {
self.report_purity_error(
pc, callee_span,
fmt!{"access to %s function",
pprust::purity_to_str(fn_ty.purity)});
fmt!("access to %s function",
pprust::purity_to_str(fn_ty.purity)));
}
}
}
@ -256,14 +256,14 @@ impl check_loan_ctxt {
(m_mutbl, m_imm) | (m_imm, m_mutbl) => {
self.bccx.span_err(
new_loan.cmt.span,
fmt!{"loan of %s as %s \
fmt!("loan of %s as %s \
conflicts with prior loan",
self.bccx.cmt_to_str(new_loan.cmt),
self.bccx.mut_to_str(new_loan.mutbl)});
self.bccx.mut_to_str(new_loan.mutbl)));
self.bccx.span_note(
old_loan.cmt.span,
fmt!{"prior loan as %s granted here",
self.bccx.mut_to_str(old_loan.mutbl)});
fmt!("prior loan as %s granted here",
self.bccx.mut_to_str(old_loan.mutbl)));
}
}
}
@ -293,8 +293,8 @@ impl check_loan_ctxt {
fn check_assignment(at: assignment_type, ex: @ast::expr) {
let cmt = self.bccx.cat_expr(ex);
debug!{"check_assignment(cmt=%s)",
self.bccx.cmt_to_repr(cmt)};
debug!("check_assignment(cmt=%s)",
self.bccx.cmt_to_repr(cmt));
if self.in_ctor && self.is_self_field(cmt)
&& at.checked_by_liveness() {
@ -375,12 +375,12 @@ impl check_loan_ctxt {
m_imm => {
self.bccx.span_err(
ex.span,
fmt!{"%s prohibited due to outstanding loan",
at.ing_form(self.bccx.cmt_to_str(cmt))});
fmt!("%s prohibited due to outstanding loan",
at.ing_form(self.bccx.cmt_to_str(cmt))));
self.bccx.span_note(
loan.cmt.span,
fmt!{"loan of %s granted here",
self.bccx.cmt_to_str(loan.cmt)});
fmt!("loan of %s granted here",
self.bccx.cmt_to_str(loan.cmt)));
return;
}
}
@ -407,17 +407,17 @@ impl check_loan_ctxt {
pc_pure_fn => {
self.tcx().sess.span_err(
sp,
fmt!{"%s prohibited in pure context", msg});
fmt!("%s prohibited in pure context", msg));
}
pc_cmt(e) => {
if self.reported.insert(e.cmt.id, ()) {
self.tcx().sess.span_err(
e.cmt.span,
fmt!{"illegal borrow unless pure: %s",
self.bccx.bckerr_code_to_str(e.code)});
fmt!("illegal borrow unless pure: %s",
self.bccx.bckerr_code_to_str(e.code)));
self.tcx().sess.span_note(
sp,
fmt!{"impure due to %s", msg});
fmt!("impure due to %s", msg));
}
}
}
@ -429,8 +429,8 @@ impl check_loan_ctxt {
}
fn check_move_out_from_cmt(cmt: cmt) {
debug!{"check_move_out_from_cmt(cmt=%s)",
self.bccx.cmt_to_repr(cmt)};
debug!("check_move_out_from_cmt(cmt=%s)",
self.bccx.cmt_to_repr(cmt));
match cmt.cat {
// Rvalues, locals, and arguments can be moved:
@ -447,7 +447,7 @@ impl check_loan_ctxt {
_ => {
self.bccx.span_err(
cmt.span,
fmt!{"moving out of %s", self.bccx.cmt_to_str(cmt)});
fmt!("moving out of %s", self.bccx.cmt_to_str(cmt)));
return;
}
}
@ -462,12 +462,12 @@ impl check_loan_ctxt {
for self.walk_loans_of(cmt.id, lp) |loan| {
self.bccx.span_err(
cmt.span,
fmt!{"moving out of %s prohibited due to outstanding loan",
self.bccx.cmt_to_str(cmt)});
fmt!("moving out of %s prohibited due to outstanding loan",
self.bccx.cmt_to_str(cmt)));
self.bccx.span_note(
loan.cmt.span,
fmt!{"loan of %s granted here",
self.bccx.cmt_to_str(loan.cmt)});
fmt!("loan of %s granted here",
self.bccx.cmt_to_str(loan.cmt)));
return;
}
}
@ -482,8 +482,8 @@ impl check_loan_ctxt {
some(lp) => lp
};
for self.walk_loans_of(cmt.id, lp) |_loan| {
debug!{"Removing last use entry %? due to outstanding loan",
expr.id};
debug!("Removing last use entry %? due to outstanding loan",
expr.id);
self.bccx.last_use_map.remove(expr.id);
return;
}
@ -527,7 +527,7 @@ fn check_loans_in_fn(fk: visit::fn_kind, decl: ast::fn_decl, body: ast::blk,
sp: span, id: ast::node_id, &&self: check_loan_ctxt,
visitor: visit::vt<check_loan_ctxt>) {
debug!{"purity on entry=%?", copy self.declared_purity};
debug!("purity on entry=%?", copy self.declared_purity);
do save_and_restore(self.in_ctor) {
do save_and_restore(self.declared_purity) {
do save_and_restore(self.fn_args) {
@ -563,7 +563,7 @@ fn check_loans_in_fn(fk: visit::fn_kind, decl: ast::fn_decl, body: ast::blk,
}
}
}
debug!{"purity on exit=%?", copy self.declared_purity};
debug!("purity on exit=%?", copy self.declared_purity);
}
fn check_loans_in_local(local: @ast::local,

View file

@ -90,8 +90,8 @@ fn req_loans_in_expr(ex: @ast::expr,
let tcx = bccx.tcx;
let old_root_ub = self.root_ub;
debug!{"req_loans_in_expr(ex=%s)",
pprust::expr_to_str(ex, tcx.sess.intr())};
debug!("req_loans_in_expr(ex=%s)",
pprust::expr_to_str(ex, tcx.sess.intr()));
// If this expression is borrowed, have to ensure it remains valid:
for tcx.borrowings.find(ex.id).each |borrow| {
@ -257,10 +257,10 @@ impl gather_loan_ctxt {
self.bccx.guaranteed_paths += 1;
debug!{"guarantee_valid(cmt=%s, req_mutbl=%s, scope_r=%s)",
debug!("guarantee_valid(cmt=%s, req_mutbl=%s, scope_r=%s)",
self.bccx.cmt_to_repr(cmt),
self.bccx.mut_to_str(req_mutbl),
region_to_str(self.tcx(), scope_r)};
region_to_str(self.tcx(), scope_r));
let _i = indenter();
match cmt.lp {
@ -289,7 +289,7 @@ impl gather_loan_ctxt {
if self.tcx().sess.borrowck_note_loan() {
self.bccx.span_note(
cmt.span,
fmt!{"immutable loan required"});
fmt!("immutable loan required"));
}
} else {
self.bccx.loaned_paths_same += 1;
@ -343,7 +343,7 @@ impl gather_loan_ctxt {
if self.tcx().sess.borrowck_note_pure() {
self.bccx.span_note(
cmt.span,
fmt!{"purity required"});
fmt!("purity required"));
}
}
_ => {

View file

@ -57,9 +57,9 @@ impl loan_ctxt {
}
fn loan(cmt: cmt, req_mutbl: ast::mutability) -> bckres<()> {
debug!{"loan(%s, %s)",
debug!("loan(%s, %s)",
self.bccx.cmt_to_repr(cmt),
self.bccx.mut_to_str(req_mutbl)};
self.bccx.mut_to_str(req_mutbl));
let _i = indenter();
// see stable() above; should only be called when `cmt` is lendable

View file

@ -58,9 +58,9 @@ priv impl &preserve_ctxt {
fn tcx() -> ty::ctxt { self.bccx.tcx }
fn preserve(cmt: cmt) -> bckres<preserve_condition> {
debug!{"preserve(cmt=%s, root_ub=%?, root_managed_data=%b)",
debug!("preserve(cmt=%s, root_ub=%?, root_managed_data=%b)",
self.bccx.cmt_to_repr(cmt), self.root_ub,
self.root_managed_data};
self.root_managed_data);
let _i = indenter();
match cmt.cat {
@ -155,7 +155,7 @@ priv impl &preserve_ctxt {
// otherwise we have no guarantee the pointer will stay
// live, so we must root the pointer (i.e., inc the ref
// count) for the duration of the loan.
debug!{"base.mutbl = %?", self.bccx.mut_to_str(base.mutbl)};
debug!("base.mutbl = %?", self.bccx.mut_to_str(base.mutbl));
if base.mutbl == m_imm {
let non_rooting_ctxt =
preserve_ctxt({root_managed_data: false with **self});
@ -164,12 +164,12 @@ priv impl &preserve_ctxt {
ok(pc_ok)
}
ok(pc_if_pure(_)) => {
debug!{"must root @T, otherwise purity req'd"};
debug!("must root @T, otherwise purity req'd");
self.attempt_root(cmt, base, derefs)
}
err(e) => {
debug!{"must root @T, err: %s",
self.bccx.bckerr_code_to_str(e.code)};
debug!("must root @T, err: %s",
self.bccx.bckerr_code_to_str(e.code));
self.attempt_root(cmt, base, derefs)
}
}

View file

@ -43,16 +43,16 @@ fn check_capture_clause(tcx: ty::ctxt,
if !vec::any(*freevars, |fv| fv.def == cap_def ) {
tcx.sess.span_warn(
cap_item.span,
fmt!{"captured variable `%s` not used in closure",
tcx.sess.str_of(cap_item.name)});
fmt!("captured variable `%s` not used in closure",
tcx.sess.str_of(cap_item.name)));
}
let cap_def_id = ast_util::def_id_of_def(cap_def).node;
if !seen_defs.insert(cap_def_id, ()) {
tcx.sess.span_err(
cap_item.span,
fmt!{"variable `%s` captured more than once",
tcx.sess.str_of(cap_item.name)});
fmt!("variable `%s` captured more than once",
tcx.sess.str_of(cap_item.name)));
}
}
}
@ -67,8 +67,8 @@ fn compute_capture_vars(tcx: ty::ctxt,
// first add entries for anything explicitly named in the cap clause
for (*cap_clause).each |cap_item| {
debug!{"Doing capture var: %s (%?)",
tcx.sess.str_of(cap_item.name), cap_item.id};
debug!("Doing capture var: %s (%?)",
tcx.sess.str_of(cap_item.name), cap_item.id);
let cap_def = tcx.def_map.get(cap_item.id);
let cap_def_id = ast_util::def_id_of_def(cap_def).node;

View file

@ -214,7 +214,7 @@ fn check_block(b: blk, cx: ctx, v: visit::vt<ctx>) {
}
fn check_expr(e: @expr, cx: ctx, v: visit::vt<ctx>) {
debug!{"kind::check_expr(%s)", expr_to_str(e, cx.tcx.sess.intr())};
debug!("kind::check_expr(%s)", expr_to_str(e, cx.tcx.sess.intr()));
// Handle any kind bounds on type parameters
do option::iter(cx.tcx.node_type_substs.find(e.id)) |ts| {
@ -433,7 +433,7 @@ fn check_imm_free_var(cx: ctx, def: def, sp: span) {
_ => {
cx.tcx.sess.span_bug(
sp,
fmt!{"unknown def for free variable: %?", def});
fmt!("unknown def for free variable: %?", def));
}
}
}
@ -524,7 +524,7 @@ fn check_cast_for_escaping_regions(
some(ty::re_bound(*)) | some(ty::re_var(*)) => {
cx.tcx.sess.span_bug(
source.span,
fmt!{"bad region found in kind: %?", target_substs.self_r});
fmt!("bad region found in kind: %?", target_substs.self_r));
}
}

View file

@ -126,8 +126,8 @@ struct LanguageItemCollector {
some(original_def_id)
if original_def_id != item_def_id => {
self.session.err(fmt!{"duplicate entry for `%s`",
value});
self.session.err(fmt!("duplicate entry for `%s`",
value));
}
some(_) | none => {
// OK.
@ -185,7 +185,7 @@ struct LanguageItemCollector {
for self.item_refs.each |key, item_ref| {
match copy *item_ref {
none => {
self.session.err(fmt!{"no item found for `%s`", key});
self.session.err(fmt!("no item found for `%s`", key));
}
some(did) => {
// OK.

View file

@ -251,8 +251,8 @@ impl ctxt {
self.span_lint(
new_ctxt.get_level(unrecognized_lint),
meta.span,
fmt!{"unknown `%s` attribute: `%s`",
level_to_str(level), lintname});
fmt!("unknown `%s` attribute: `%s`",
level_to_str(level), lintname));
}
some(lint) => {
@ -261,9 +261,9 @@ impl ctxt {
self.span_lint(
forbid,
meta.span,
fmt!{"%s(%s) overruled by outer forbid(%s)",
fmt!("%s(%s) overruled by outer forbid(%s)",
level_to_str(level),
lintname, lintname});
lintname, lintname));
}
// we do multiple unneeded copies of the
@ -486,7 +486,7 @@ fn check_item_non_camel_case_types(cx: ty::ctxt, it: @ast::item) {
}
fn check_pat(tcx: ty::ctxt, pat: @ast::pat) {
debug!{"lint check_pat pat=%s", pat_to_str(pat, tcx.sess.intr())};
debug!("lint check_pat pat=%s", pat_to_str(pat, tcx.sess.intr()));
do pat_bindings(tcx.def_map, pat) |binding_mode, id, span, path| {
match binding_mode {
@ -498,8 +498,8 @@ fn check_pat(tcx: ty::ctxt, pat: @ast::pat) {
tcx.sess.span_lint(
deprecated_pattern, id, id,
span,
fmt!{"binding `%s` should use ref or copy mode",
tcx.sess.str_of(path_to_ident(path))});
fmt!("binding `%s` should use ref or copy mode",
tcx.sess.str_of(path_to_ident(path))));
}
}
}
@ -508,7 +508,7 @@ fn check_pat(tcx: ty::ctxt, pat: @ast::pat) {
fn check_fn(tcx: ty::ctxt, fk: visit::fn_kind, decl: ast::fn_decl,
_body: ast::blk, span: span, id: ast::node_id) {
debug!{"lint check_fn fk=%? id=%?", fk, id};
debug!("lint check_fn fk=%? id=%?", fk, id);
// don't complain about blocks, since they tend to get their modes
// specified from the outside
@ -523,10 +523,10 @@ fn check_fn(tcx: ty::ctxt, fk: visit::fn_kind, decl: ast::fn_decl,
let mut counter = 0;
do vec::iter2(fn_ty.inputs, decl.inputs) |arg_ty, arg_ast| {
counter += 1;
debug!{"arg %d, ty=%s, mode=%s",
debug!("arg %d, ty=%s, mode=%s",
counter,
ty_to_str(tcx, arg_ty.ty),
mode_to_str(arg_ast.mode)};
mode_to_str(arg_ast.mode));
match arg_ast.mode {
ast::expl(ast::by_copy) => {
/* always allow by-copy */
@ -536,7 +536,7 @@ fn check_fn(tcx: ty::ctxt, fk: visit::fn_kind, decl: ast::fn_decl,
tcx.sess.span_lint(
deprecated_mode, id, id,
span,
fmt!{"argument %d uses an explicit mode", counter});
fmt!("argument %d uses an explicit mode", counter));
}
ast::infer(_) => {
@ -545,9 +545,9 @@ fn check_fn(tcx: ty::ctxt, fk: visit::fn_kind, decl: ast::fn_decl,
tcx.sess.span_lint(
deprecated_mode, id, id,
span,
fmt!{"argument %d uses the default mode \
fmt!("argument %d uses the default mode \
but shouldn't",
counter});
counter));
}
}
}

View file

@ -153,11 +153,11 @@ fn check_crate(tcx: ty::ctxt,
}
impl live_node: to_str::ToStr {
fn to_str() -> ~str { fmt!{"ln(%u)", *self} }
fn to_str() -> ~str { fmt!("ln(%u)", *self) }
}
impl variable: to_str::ToStr {
fn to_str() -> ~str { fmt!{"v(%u)", *self} }
fn to_str() -> ~str { fmt!("v(%u)", *self) }
}
// ______________________________________________________________________
@ -243,7 +243,7 @@ struct ir_maps {
vec::push(self.lnks, lnk);
self.num_live_nodes += 1u;
debug!{"%s is of kind %?", ln.to_str(), lnk};
debug!("%s is of kind %?", ln.to_str(), lnk);
ln
}
@ -252,7 +252,7 @@ struct ir_maps {
let ln = self.add_live_node(lnk);
self.live_node_map.insert(node_id, ln);
debug!{"%s is node %d", ln.to_str(), node_id};
debug!("%s is node %d", ln.to_str(), node_id);
}
fn add_variable(vk: var_kind) -> variable {
@ -271,7 +271,7 @@ struct ir_maps {
}
}
debug!{"%s is %?", v.to_str(), vk};
debug!("%s is %?", v.to_str(), vk);
v
}
@ -281,7 +281,7 @@ struct ir_maps {
some(var) => var,
none => {
self.tcx.sess.span_bug(
span, fmt!{"No variable registered for id %d", node_id});
span, fmt!("No variable registered for id %d", node_id));
}
}
}
@ -314,7 +314,7 @@ struct ir_maps {
fn add_last_use(expr_id: node_id, var: variable) {
let vk = self.var_kinds[*var];
debug!{"Node %d is a last use of variable %?", expr_id, vk};
debug!("Node %d is a last use of variable %?", expr_id, vk);
match vk {
vk_arg(id, name, by_move) |
vk_arg(id, name, by_copy) |
@ -332,7 +332,7 @@ struct ir_maps {
}
vk_arg(_, _, by_ref) | vk_arg(_, _, by_mutbl_ref) |
vk_arg(_, _, by_val) | vk_self | vk_field(_) | vk_implicit_ret => {
debug!{"--but it is not owned"};
debug!("--but it is not owned");
}
}
}
@ -340,17 +340,17 @@ struct ir_maps {
fn visit_fn(fk: visit::fn_kind, decl: fn_decl, body: blk,
sp: span, id: node_id, &&self: @ir_maps, v: vt<@ir_maps>) {
debug!{"visit_fn: id=%d", id};
debug!("visit_fn: id=%d", id);
let _i = util::common::indenter();
// swap in a new set of IR maps for this function body:
let fn_maps = @ir_maps(self.tcx, self.method_map,
self.last_use_map);
debug!{"creating fn_maps: %x", ptr::addr_of(*fn_maps) as uint};
debug!("creating fn_maps: %x", ptr::addr_of(*fn_maps) as uint);
for decl.inputs.each |arg| {
debug!{"adding argument %d", arg.id};
debug!("adding argument %d", arg.id);
let mode = ty::resolved_mode(self.tcx, arg.mode);
(*fn_maps).add_variable(vk_arg(arg.id, arg.ident, mode));
};
@ -405,7 +405,7 @@ fn add_class_fields(self: @ir_maps, did: def_id) {
fn visit_local(local: @local, &&self: @ir_maps, vt: vt<@ir_maps>) {
let def_map = self.tcx.def_map;
do pat_util::pat_bindings(def_map, local.node.pat) |_bm, p_id, sp, path| {
debug!{"adding local variable %d", p_id};
debug!("adding local variable %d", p_id);
let name = ast_util::path_to_ident(path);
(*self).add_live_node_for_node(p_id, lnk_vdef(sp));
(*self).add_variable(vk_local(p_id, name));
@ -418,7 +418,7 @@ fn visit_expr(expr: @expr, &&self: @ir_maps, vt: vt<@ir_maps>) {
// live nodes required for uses or definitions of variables:
expr_path(_) => {
let def = self.tcx.def_map.get(expr.id);
debug!{"expr %d: path that leads to %?", expr.id, def};
debug!("expr %d: path that leads to %?", expr.id, def);
if relevant_def(def).is_some() {
(*self).add_live_node_for_node(expr.id, lnk_expr(expr.span));
}
@ -541,8 +541,8 @@ struct liveness {
// code have to agree about which AST nodes are worth
// creating liveness nodes for.
self.tcx.sess.span_bug(
span, fmt!{"No live node registered for node %d",
node_id});
span, fmt!("No live node registered for node %d",
node_id));
}
}
}
@ -665,7 +665,7 @@ struct liveness {
wr.write_str(~"[ln(");
wr.write_uint(*ln);
wr.write_str(~") of kind ");
wr.write_str(fmt!{"%?", copy self.ir.lnks[*ln]});
wr.write_str(fmt!("%?", copy self.ir.lnks[*ln]));
wr.write_str(~" reads");
self.write_vars(wr, ln, |idx| self.users[idx].reader );
wr.write_str(~" writes");
@ -696,8 +696,8 @@ struct liveness {
self.indices2(ln, succ_ln, |idx, succ_idx| {
self.users[idx] = self.users[succ_idx]
});
debug!{"init_from_succ(ln=%s, succ=%s)",
self.ln_str(ln), self.ln_str(succ_ln)};
debug!("init_from_succ(ln=%s, succ=%s)",
self.ln_str(ln), self.ln_str(succ_ln));
}
fn merge_from_succ(ln: live_node, succ_ln: live_node,
@ -716,8 +716,8 @@ struct liveness {
}
}
debug!{"merge_from_succ(ln=%s, succ=%s, first_merge=%b, changed=%b)",
ln.to_str(), self.ln_str(succ_ln), first_merge, changed};
debug!("merge_from_succ(ln=%s, succ=%s, first_merge=%b, changed=%b)",
ln.to_str(), self.ln_str(succ_ln), first_merge, changed);
return changed;
fn copy_if_invalid(src: live_node, &dst: live_node) -> bool {
@ -739,8 +739,8 @@ struct liveness {
self.users[idx].reader = invalid_node();
self.users[idx].writer = invalid_node();
debug!{"%s defines %s (idx=%u): %s", writer.to_str(), var.to_str(),
idx, self.ln_str(writer)};
debug!("%s defines %s (idx=%u): %s", writer.to_str(), var.to_str(),
idx, self.ln_str(writer));
}
// Either read, write, or both depending on the acc bitset
@ -763,8 +763,8 @@ struct liveness {
self.users[idx].used = true;
}
debug!{"%s accesses[%x] %s: %s",
ln.to_str(), acc, var.to_str(), self.ln_str(ln)};
debug!("%s accesses[%x] %s: %s",
ln.to_str(), acc, var.to_str(), self.ln_str(ln));
}
// _______________________________________________________________________
@ -779,14 +779,14 @@ struct liveness {
});
// hack to skip the loop unless #debug is enabled:
debug!{"^^ liveness computation results for body %d (entry=%s)",
debug!("^^ liveness computation results for body %d (entry=%s)",
{
for uint::range(0u, self.ir.num_live_nodes) |ln_idx| {
#debug["%s", self.ln_str(live_node(ln_idx))];
}
body.node.id
},
entry_ln.to_str()};
entry_ln.to_str());
entry_ln
}
@ -1372,7 +1372,7 @@ fn check_local(local: @local, &&self: @liveness, vt: vt<@liveness>) {
// No initializer: the variable might be unused; if not, it
// should not be live at this point.
debug!{"check_local() with no initializer"};
debug!("check_local() with no initializer");
do (*self).pat_bindings(local.node.pat) |ln, var, sp| {
if !self.warn_about_unused(sp, ln, var) {
match (*self).live_on_exit(ln, var) {
@ -1492,8 +1492,8 @@ impl @liveness {
none => { /* ok */ }
some(lnk_exit) => {
self.tcx.sess.span_err(
sp, fmt!{"field `self.%s` is never initialized",
self.tcx.sess.str_of(nm)});
sp, fmt!("field `self.%s` is never initialized",
self.tcx.sess.str_of(nm)));
}
some(lnk) => {
self.report_illegal_read(
@ -1531,8 +1531,8 @@ impl @liveness {
}
fn check_move_from_var(span: span, ln: live_node, var: variable) {
debug!{"check_move_from_var(%s, %s)",
ln.to_str(), var.to_str()};
debug!("check_move_from_var(%s, %s)",
ln.to_str(), var.to_str());
match (*self).live_on_exit(ln, var) {
none => {}
@ -1548,8 +1548,8 @@ impl @liveness {
}
fn check_move_from_expr(expr: @expr, vt: vt<@liveness>) {
debug!{"check_move_from_expr(node %d: %s)",
expr.id, expr_to_str(expr, self.tcx.sess.intr())};
debug!("check_move_from_expr(node %d: %s)",
expr.id, expr_to_str(expr, self.tcx.sess.intr()));
if self.ir.method_map.contains_key(expr.id) {
// actually an rvalue, since this calls a method
@ -1645,7 +1645,7 @@ impl @liveness {
some(lnk) => {
self.tcx.sess.span_bug(
orig_span,
fmt!{"illegal writer: %?", lnk});
fmt!("illegal writer: %?", lnk));
}
none => {}
}
@ -1664,15 +1664,15 @@ impl @liveness {
vk_arg(_, name, _) => {
self.tcx.sess.span_err(
move_span,
fmt!{"illegal move from argument `%s`, which is not \
copy or move mode", self.tcx.sess.str_of(name)});
fmt!("illegal move from argument `%s`, which is not \
copy or move mode", self.tcx.sess.str_of(name)));
return;
}
vk_field(name) => {
self.tcx.sess.span_err(
move_span,
fmt!{"illegal move from field `%s`",
self.tcx.sess.str_of(name)});
fmt!("illegal move from field `%s`",
self.tcx.sess.str_of(name)));
return;
}
vk_self => {
@ -1685,8 +1685,8 @@ impl @liveness {
vk_local(*) | vk_implicit_ret => {
self.tcx.sess.span_bug(
move_span,
fmt!{"illegal reader (%?) for `%?`",
lnk, vk});
fmt!("illegal reader (%?) for `%?`",
lnk, vk));
}
}
}
@ -1713,18 +1713,18 @@ impl @liveness {
lnk_freevar(span) => {
self.tcx.sess.span_err(
span,
fmt!{"capture of %s: `%s`", msg, name});
fmt!("capture of %s: `%s`", msg, name));
}
lnk_expr(span) => {
self.tcx.sess.span_err(
span,
fmt!{"use of %s: `%s`", msg, name});
fmt!("use of %s: `%s`", msg, name));
}
lnk_exit |
lnk_vdef(_) => {
self.tcx.sess.span_bug(
chk_span,
fmt!{"illegal reader: %?", lnk});
fmt!("illegal reader: %?", lnk));
}
}
}
@ -1781,11 +1781,11 @@ impl @liveness {
if is_assigned {
self.tcx.sess.span_warn(
sp, fmt!{"variable `%s` is assigned to, \
but never used", name});
sp, fmt!("variable `%s` is assigned to, \
but never used", name));
} else {
self.tcx.sess.span_warn(
sp, fmt!{"unused variable: `%s`", name});
sp, fmt!("unused variable: `%s`", name));
}
}
return true;
@ -1798,7 +1798,7 @@ impl @liveness {
for self.should_warn(var).each |name| {
self.tcx.sess.span_warn(
sp,
fmt!{"value assigned to `%s` is never read", name});
fmt!("value assigned to `%s` is never read", name));
}
}
}

View file

@ -149,8 +149,8 @@ fn deref_kind(tcx: ty::ctxt, t: ty::t) -> deref_kind {
some(k) => k,
none => {
tcx.sess.bug(
fmt!{"deref_cat() invoked on non-derefable type %s",
ty_to_str(tcx, t)});
fmt!("deref_cat() invoked on non-derefable type %s",
ty_to_str(tcx, t)));
}
}
}
@ -262,8 +262,8 @@ impl &mem_categorization_ctxt {
}
fn cat_expr(expr: @ast::expr) -> cmt {
debug!{"cat_expr: id=%d expr=%s",
expr.id, pprust::expr_to_str(expr, self.tcx.sess.intr())};
debug!("cat_expr: id=%d expr=%s",
expr.id, pprust::expr_to_str(expr, self.tcx.sess.intr()));
let tcx = self.tcx;
let expr_ty = tcx.ty(expr);
@ -279,8 +279,8 @@ impl &mem_categorization_ctxt {
none => {
tcx.sess.span_bug(
e_base.span,
fmt!{"Explicit deref of non-derefable type `%s`",
ty_to_str(tcx, tcx.ty(e_base))});
fmt!("Explicit deref of non-derefable type `%s`",
ty_to_str(tcx, tcx.ty(e_base))));
}
}
}
@ -468,9 +468,9 @@ impl &mem_categorization_ctxt {
none => {
self.tcx.sess.span_bug(
node.span(),
fmt!{"Cannot find field `%s` in type `%s`",
fmt!("Cannot find field `%s` in type `%s`",
self.tcx.sess.str_of(f_name),
ty_to_str(self.tcx, base_cmt.ty)});
ty_to_str(self.tcx, base_cmt.ty)));
}
};
let m = self.inherited_mutability(base_cmt.mutbl, f_mutbl);
@ -533,8 +533,8 @@ impl &mem_categorization_ctxt {
none => {
self.tcx.sess.span_bug(
expr.span,
fmt!{"Explicit index of non-index type `%s`",
ty_to_str(self.tcx, base_cmt.ty)});
fmt!("Explicit index of non-index type `%s`",
ty_to_str(self.tcx, base_cmt.ty)));
}
};
@ -655,9 +655,9 @@ impl &mem_categorization_ctxt {
let _i = indenter();
let tcx = self.tcx;
debug!{"cat_pattern: id=%d pat=%s cmt=%s",
debug!("cat_pattern: id=%d pat=%s cmt=%s",
pat.id, pprust::pat_to_str(pat, tcx.sess.intr()),
self.cmt_to_repr(cmt)};
self.cmt_to_repr(cmt));
match pat.node {
ast::pat_wild => {
@ -672,7 +672,7 @@ impl &mem_categorization_ctxt {
let enum_did = match self.tcx.def_map.find(pat.id) {
some(ast::def_variant(enum_did, _)) => enum_did,
e => tcx.sess.span_bug(pat.span,
fmt!{"resolved to %?, not variant", e})
fmt!("resolved to %?, not variant", e))
};
for subpats.each |subpat| {
@ -737,15 +737,15 @@ impl &mem_categorization_ctxt {
cat_special(sk_heap_upvar) => ~"heap-upvar",
cat_stack_upvar(_) => ~"stack-upvar",
cat_rvalue => ~"rvalue",
cat_local(node_id) => fmt!{"local(%d)", node_id},
cat_binding(node_id) => fmt!{"binding(%d)", node_id},
cat_arg(node_id) => fmt!{"arg(%d)", node_id},
cat_local(node_id) => fmt!("local(%d)", node_id),
cat_binding(node_id) => fmt!("binding(%d)", node_id),
cat_arg(node_id) => fmt!("arg(%d)", node_id),
cat_deref(cmt, derefs, ptr) => {
fmt!{"%s->(%s, %u)", self.cat_to_repr(cmt.cat),
self.ptr_sigil(ptr), derefs}
fmt!("%s->(%s, %u)", self.cat_to_repr(cmt.cat),
self.ptr_sigil(ptr), derefs)
}
cat_comp(cmt, comp) => {
fmt!{"%s.%s", self.cat_to_repr(cmt.cat), self.comp_to_repr(comp)}
fmt!("%s.%s", self.cat_to_repr(cmt.cat), self.comp_to_repr(comp))
}
cat_discr(cmt, _) => self.cat_to_repr(cmt.cat)
}
@ -780,29 +780,29 @@ impl &mem_categorization_ctxt {
fn lp_to_str(lp: @loan_path) -> ~str {
match *lp {
lp_local(node_id) => {
fmt!{"local(%d)", node_id}
fmt!("local(%d)", node_id)
}
lp_arg(node_id) => {
fmt!{"arg(%d)", node_id}
fmt!("arg(%d)", node_id)
}
lp_deref(lp, ptr) => {
fmt!{"%s->(%s)", self.lp_to_str(lp),
self.ptr_sigil(ptr)}
fmt!("%s->(%s)", self.lp_to_str(lp),
self.ptr_sigil(ptr))
}
lp_comp(lp, comp) => {
fmt!{"%s.%s", self.lp_to_str(lp),
self.comp_to_repr(comp)}
fmt!("%s.%s", self.lp_to_str(lp),
self.comp_to_repr(comp))
}
}
}
fn cmt_to_repr(cmt: cmt) -> ~str {
fmt!{"{%s id:%d m:%s lp:%s ty:%s}",
fmt!("{%s id:%d m:%s lp:%s ty:%s}",
self.cat_to_repr(cmt.cat),
cmt.id,
self.mut_to_str(cmt.mutbl),
cmt.lp.map_default(~"none", |p| self.lp_to_str(p) ),
ty_to_str(self.tcx, cmt.ty)}
ty_to_str(self.tcx, cmt.ty))
}
fn cmt_to_str(cmt: cmt) -> ~str {
@ -818,8 +818,8 @@ impl &mem_categorization_ctxt {
cat_local(_) => mut_str + ~" local variable",
cat_binding(_) => ~"pattern binding",
cat_arg(_) => ~"argument",
cat_deref(_, _, pk) => fmt!{"dereference of %s %s pointer",
mut_str, self.ptr_sigil(pk)},
cat_deref(_, _, pk) => fmt!("dereference of %s %s pointer",
mut_str, self.ptr_sigil(pk)),
cat_stack_upvar(_) => {
~"captured outer " + mut_str + ~" variable in a stack closure"
}

View file

@ -196,7 +196,7 @@ fn parent_id(cx: ctxt, span: span) -> ast::node_id {
/// Records the current parent (if any) as the parent of `child_id`.
fn record_parent(cx: ctxt, child_id: ast::node_id) {
for cx.parent.each |parent_id| {
debug!{"parent of node %d is node %d", child_id, parent_id};
debug!("parent of node %d is node %d", child_id, parent_id);
cx.region_map.insert(child_id, parent_id);
}
}
@ -255,13 +255,13 @@ fn resolve_expr(expr: @ast::expr, cx: ctxt, visitor: visit::vt<ctxt>) {
let mut new_cx = cx;
match expr.node {
ast::expr_call(*) => {
debug!{"node %d: %s", expr.id, pprust::expr_to_str(expr,
cx.sess.intr())};
debug!("node %d: %s", expr.id, pprust::expr_to_str(expr,
cx.sess.intr()));
new_cx.parent = some(expr.id);
}
ast::expr_match(subexpr, _, _) => {
debug!{"node %d: %s", expr.id, pprust::expr_to_str(expr,
cx.sess.intr())};
debug!("node %d: %s", expr.id, pprust::expr_to_str(expr,
cx.sess.intr()));
new_cx.parent = some(expr.id);
}
ast::expr_fn(_, _, _, cap_clause) |
@ -314,9 +314,9 @@ fn resolve_fn(fk: visit::fn_kind, decl: ast::fn_decl, body: ast::blk,
}
};
debug!{"visiting fn with body %d. cx.parent: %? \
debug!("visiting fn with body %d. cx.parent: %? \
fn_cx.parent: %?",
body.node.id, cx.parent, fn_cx.parent};
body.node.id, cx.parent, fn_cx.parent);
for decl.inputs.each |input| {
cx.region_map.insert(input.id, body.node.id);
@ -441,10 +441,10 @@ impl determine_rp_ctxt {
some(v) => join_variance(v, variance)
};
debug!["add_rp() variance for %s: %? == %? ^ %?",
debug!("add_rp() variance for %s: %? == %? ^ %?",
ast_map::node_id_to_str(self.ast_map, id,
self.sess.parse_sess.interner),
joined_variance, old_variance, variance];
joined_variance, old_variance, variance);
if some(joined_variance) != old_variance {
self.region_paramd_items.insert(id, joined_variance);
@ -458,13 +458,13 @@ impl determine_rp_ctxt {
/// contains a value of type `from`, so if `from` is
/// region-parameterized, so is the current item.
fn add_dep(from: ast::node_id) {
debug!["add dependency from %d -> %d (%s -> %s) with variance %?",
debug!("add dependency from %d -> %d (%s -> %s) with variance %?",
from, self.item_id,
ast_map::node_id_to_str(self.ast_map, from,
self.sess.parse_sess.interner),
ast_map::node_id_to_str(self.ast_map, self.item_id,
self.sess.parse_sess.interner),
copy self.ambient_variance];
copy self.ambient_variance);
let vec = match self.dep_map.find(from) {
some(vec) => vec,
none => {
@ -525,7 +525,7 @@ impl determine_rp_ctxt {
let old_anon_implies_rp = self.anon_implies_rp;
self.item_id = item_id;
self.anon_implies_rp = anon_implies_rp;
debug!{"with_item_id(%d, %b)", item_id, anon_implies_rp};
debug!("with_item_id(%d, %b)", item_id, anon_implies_rp);
let _i = util::common::indenter();
f();
self.item_id = old_item_id;
@ -590,8 +590,8 @@ fn determine_rp_in_ty(ty: @ast::ty,
// locations)
match ty.node {
ast::ty_rptr(r, _) => {
debug!["referenced rptr type %s",
pprust::ty_to_str(ty, cx.sess.intr())];
debug!("referenced rptr type %s",
pprust::ty_to_str(ty, cx.sess.intr()));
if cx.region_is_relevant(r) {
cx.add_rp(cx.item_id, cx.add_variance(rv_contravariant))
@ -623,8 +623,8 @@ fn determine_rp_in_ty(ty: @ast::ty,
match csearch::get_region_param(cstore, did) {
none => {}
some(variance) => {
debug!["reference to external, rp'd type %s",
pprust::ty_to_str(ty, cx.sess.intr())];
debug!("reference to external, rp'd type %s",
pprust::ty_to_str(ty, cx.sess.intr()));
cx.add_rp(cx.item_id, cx.add_variance(variance))
}
}
@ -743,7 +743,7 @@ fn determine_rp_in_crate(sess: session,
while cx.worklist.len() != 0 {
let c_id = cx.worklist.pop();
let c_variance = cx.region_paramd_items.get(c_id);
debug!["popped %d from worklist", c_id];
debug!("popped %d from worklist", c_id);
match cx.dep_map.find(c_id) {
none => {}
some(deps) => {

View file

@ -1243,9 +1243,9 @@ struct Resolver {
if self.block_needs_anonymous_module(block) {
let block_id = block.node.id;
debug!{"(building reduced graph for block) creating a new \
debug!("(building reduced graph for block) creating a new \
anonymous module for block %d",
block_id};
block_id);
let parent_module = self.get_module_from_parent(parent);
let new_module = @Module(BlockParentLink(parent_module, block_id),
@ -1414,8 +1414,8 @@ struct Resolver {
// Define or reuse the module node.
match child_name_bindings.module_def {
NoModuleDef => {
debug!{"(building reduced graph for external crate) \
autovivifying %s", ident_str};
debug!("(building reduced graph for external crate) \
autovivifying %s", ident_str);
let parent_link = self.get_parent_link(new_parent,
ident);
(*child_name_bindings).define_module(parent_link,
@ -1445,12 +1445,12 @@ struct Resolver {
// written, we can't process this impl now. We'll get it
// later.
debug!{"(building reduced graph for external crate) \
ignoring impl %s", final_ident_str};
debug!("(building reduced graph for external crate) \
ignoring impl %s", final_ident_str);
}
dl_field => {
debug!{"(building reduced graph for external crate) \
ignoring field %s", final_ident_str};
debug!("(building reduced graph for external crate) \
ignoring field %s", final_ident_str);
}
}
}
@ -1508,14 +1508,14 @@ struct Resolver {
let mut i = 0u;
let mut prev_unresolved_imports = 0u;
loop {
debug!{"(resolving imports) iteration %u, %u imports left",
i, self.unresolved_imports};
debug!("(resolving imports) iteration %u, %u imports left",
i, self.unresolved_imports);
let module_root = (*self.graph_root).get_module();
self.resolve_imports_for_module_subtree(module_root);
if self.unresolved_imports == 0u {
debug!{"(resolving imports) success"};
debug!("(resolving imports) success");
break;
}
@ -1535,8 +1535,8 @@ struct Resolver {
* submodules.
*/
fn resolve_imports_for_module_subtree(module_: @Module) {
debug!{"(resolving imports for module subtree) resolving %s",
self.module_to_str(module_)};
debug!("(resolving imports for module subtree) resolving %s",
self.module_to_str(module_));
self.resolve_imports_for_module(module_);
for module_.children.each |_name, child_node| {
@ -1558,9 +1558,9 @@ struct Resolver {
/// Attempts to resolve imports for the given module only.
fn resolve_imports_for_module(module_: @Module) {
if (*module_).all_imports_resolved() {
debug!{"(resolving imports for module) all imports resolved for \
debug!("(resolving imports for module) all imports resolved for \
%s",
self.module_to_str(module_)};
self.module_to_str(module_));
return;
}
@ -1616,10 +1616,10 @@ struct Resolver {
let mut resolution_result;
let module_path = import_directive.module_path;
debug!{"(resolving import for module) resolving import `%s::...` in \
debug!("(resolving import for module) resolving import `%s::...` in \
`%s`",
self.atoms_to_str((*module_path).get()),
self.module_to_str(module_)};
self.module_to_str(module_));
// One-level renaming imports of the form `import foo = bar;` are
// handled specially.
@ -1700,16 +1700,16 @@ struct Resolver {
target: Atom, source: Atom)
-> ResolveResult<()> {
debug!{"(resolving single import) resolving `%s` = `%s::%s` from \
debug!("(resolving single import) resolving `%s` = `%s::%s` from \
`%s`",
self.session.str_of(target),
self.module_to_str(containing_module),
self.session.str_of(source),
self.module_to_str(module_)};
self.module_to_str(module_));
if !self.name_is_exported(containing_module, source) {
debug!{"(resolving single import) name `%s` is unexported",
self.session.str_of(source)};
debug!("(resolving single import) name `%s` is unexported",
self.session.str_of(source));
return Failed;
}
@ -1756,8 +1756,8 @@ struct Resolver {
// able to resolve this import.
if containing_module.glob_count > 0u {
debug!{"(resolving single import) unresolved glob; \
bailing out"};
debug!("(resolving single import) unresolved glob; \
bailing out");
return Indeterminate;
}
@ -1820,8 +1820,8 @@ struct Resolver {
}
some(_) => {
// The import is unresolved. Bail out.
debug!{"(resolving single import) unresolved import; \
bailing out"};
debug!("(resolving single import) unresolved import; \
bailing out");
return Indeterminate;
}
}
@ -1834,13 +1834,13 @@ struct Resolver {
match module_result {
BoundResult(target_module, name_bindings) => {
debug!{"(resolving single import) found module binding"};
debug!("(resolving single import) found module binding");
import_resolution.module_target =
some(Target(target_module, name_bindings));
}
UnboundResult => {
debug!{"(resolving single import) didn't find module \
binding"};
debug!("(resolving single import) didn't find module \
binding");
}
UnknownResult => {
fail ~"module result should be known at this point";
@ -1880,7 +1880,7 @@ struct Resolver {
assert import_resolution.outstanding_references >= 1u;
import_resolution.outstanding_references -= 1u;
debug!{"(resolving single import) successfully resolved import"};
debug!("(resolving single import) successfully resolved import");
return Success(());
}
@ -1902,8 +1902,8 @@ struct Resolver {
// (including globs).
if !(*containing_module).all_imports_resolved() {
debug!{"(resolving glob import) target module has unresolved \
imports; bailing out"};
debug!("(resolving glob import) target module has unresolved \
imports; bailing out");
return Indeterminate;
}
@ -1914,15 +1914,15 @@ struct Resolver {
|atom, target_import_resolution| {
if !self.name_is_exported(containing_module, atom) {
debug!{"(resolving glob import) name `%s` is unexported",
self.session.str_of(atom)};
debug!("(resolving glob import) name `%s` is unexported",
self.session.str_of(atom));
again;
}
debug!{"(resolving glob import) writing module resolution \
debug!("(resolving glob import) writing module resolution \
%? into `%s`",
is_none(target_import_resolution.module_target),
self.module_to_str(module_)};
self.module_to_str(module_));
// Here we merge two import resolutions.
match module_.import_resolutions.find(atom) {
@ -1978,8 +1978,8 @@ struct Resolver {
// Add all children from the containing module.
for containing_module.children.each |atom, name_bindings| {
if !self.name_is_exported(containing_module, atom) {
debug!{"(resolving glob import) name `%s` is unexported",
self.session.str_of(atom)};
debug!("(resolving glob import) name `%s` is unexported",
self.session.str_of(atom));
again;
}
@ -1997,31 +1997,31 @@ struct Resolver {
}
debug!{"(resolving glob import) writing resolution `%s` in `%s` \
debug!("(resolving glob import) writing resolution `%s` in `%s` \
to `%s`",
self.session.str_of(atom),
self.module_to_str(containing_module),
self.module_to_str(module_)};
self.module_to_str(module_));
// Merge the child item into the import resolution.
if (*name_bindings).defined_in_namespace(ModuleNS) {
debug!{"(resolving glob import) ... for module target"};
debug!("(resolving glob import) ... for module target");
dest_import_resolution.module_target =
some(Target(containing_module, name_bindings));
}
if (*name_bindings).defined_in_namespace(ValueNS) {
debug!{"(resolving glob import) ... for value target"};
debug!("(resolving glob import) ... for value target");
dest_import_resolution.value_target =
some(Target(containing_module, name_bindings));
}
if (*name_bindings).defined_in_namespace(TypeNS) {
debug!{"(resolving glob import) ... for type target"};
debug!("(resolving glob import) ... for type target");
dest_import_resolution.type_target =
some(Target(containing_module, name_bindings));
}
}
debug!{"(resolving glob import) successfully resolved import"};
debug!("(resolving glob import) successfully resolved import");
return Success(());
}
@ -2050,9 +2050,9 @@ struct Resolver {
return Failed;
}
Indeterminate => {
debug!{"(resolving module path for import) module \
debug!("(resolving module path for import) module \
resolution is indeterminate: %s",
self.session.str_of(name)};
self.session.str_of(name));
return Indeterminate;
}
Success(target) => {
@ -2060,9 +2060,9 @@ struct Resolver {
NoModuleDef => {
// Not a module.
self.session.span_err(span,
fmt!{"not a module: %s",
fmt!("not a module: %s",
self.session.
str_of(name)});
str_of(name)));
return Failed;
}
ModuleDef(module_) => {
@ -2091,10 +2091,10 @@ struct Resolver {
let module_path_len = (*module_path).len();
assert module_path_len > 0u;
debug!{"(resolving module path for import) processing `%s` rooted at \
debug!("(resolving module path for import) processing `%s` rooted at \
`%s`",
self.atoms_to_str((*module_path).get()),
self.module_to_str(module_)};
self.module_to_str(module_));
// The first element of the module path must be in the current scope
// chain.
@ -2107,8 +2107,8 @@ struct Resolver {
return Failed;
}
Indeterminate => {
debug!{"(resolving module path for import) indeterminate; \
bailing"};
debug!("(resolving module path for import) indeterminate; \
bailing");
return Indeterminate;
}
Success(resulting_module) => {
@ -2128,11 +2128,11 @@ struct Resolver {
namespace: Namespace)
-> ResolveResult<Target> {
debug!{"(resolving item in lexical scope) resolving `%s` in \
debug!("(resolving item in lexical scope) resolving `%s` in \
namespace %? in `%s`",
self.session.str_of(name),
namespace,
self.module_to_str(module_)};
self.module_to_str(module_));
// The current module node is handled specially. First, check for
// its immediate children.
@ -2159,9 +2159,9 @@ struct Resolver {
match (*import_resolution).target_for_namespace(namespace) {
none => {
// Not found; continue.
debug!{"(resolving item in lexical scope) found \
debug!("(resolving item in lexical scope) found \
import resolution, but not in namespace %?",
namespace};
namespace);
}
some(target) => {
import_resolution.used = true;
@ -2178,8 +2178,8 @@ struct Resolver {
match search_module.parent_link {
NoParentLink => {
// No more parents. This module was unresolved.
debug!{"(resolving item in lexical scope) unresolved \
module"};
debug!("(resolving item in lexical scope) unresolved \
module");
return Failed;
}
ModuleParentLink(parent_module_node, _) |
@ -2198,8 +2198,8 @@ struct Resolver {
// We couldn't see through the higher scope because of an
// unresolved import higher up. Bail.
debug!{"(resolving item in lexical scope) indeterminate \
higher scope; bailing"};
debug!("(resolving item in lexical scope) indeterminate \
higher scope; bailing");
return Indeterminate;
}
Success(target) => {
@ -2217,8 +2217,8 @@ struct Resolver {
Success(target) => {
match target.bindings.module_def {
NoModuleDef => {
error!{"!!! (resolving module in lexical scope) module
wasn't actually a module!"};
error!("!!! (resolving module in lexical scope) module
wasn't actually a module!");
return Failed;
}
ModuleDef(module_) => {
@ -2227,13 +2227,13 @@ struct Resolver {
}
}
Indeterminate => {
debug!{"(resolving module in lexical scope) indeterminate; \
bailing"};
debug!("(resolving module in lexical scope) indeterminate; \
bailing");
return Indeterminate;
}
Failed => {
debug!{"(resolving module in lexical scope) failed to \
resolve"};
debug!("(resolving module in lexical scope) failed to \
resolve");
return Failed;
}
}
@ -2255,13 +2255,13 @@ struct Resolver {
xray: XrayFlag)
-> ResolveResult<Target> {
debug!{"(resolving name in module) resolving `%s` in `%s`",
debug!("(resolving name in module) resolving `%s` in `%s`",
self.session.str_of(name),
self.module_to_str(module_)};
self.module_to_str(module_));
if xray == NoXray && !self.name_is_exported(module_, name) {
debug!{"(resolving name in module) name `%s` is unexported",
self.session.str_of(name)};
debug!("(resolving name in module) name `%s` is unexported",
self.session.str_of(name));
return Failed;
}
@ -2270,7 +2270,7 @@ struct Resolver {
some(name_bindings)
if (*name_bindings).defined_in_namespace(namespace) => {
debug!{"(resolving name in module) found node as child"};
debug!("(resolving name in module) found node as child");
return Success(Target(module_, name_bindings));
}
some(_) | none => {
@ -2282,7 +2282,7 @@ struct Resolver {
// we bail out; we don't know its imports yet.
if module_.glob_count > 0u {
debug!{"(resolving name in module) module has glob; bailing out"};
debug!("(resolving name in module) module has glob; bailing out");
return Indeterminate;
}
@ -2290,20 +2290,20 @@ struct Resolver {
match module_.import_resolutions.find(name) {
some(import_resolution) => {
if import_resolution.outstanding_references != 0u {
debug!{"(resolving name in module) import unresolved; \
bailing out"};
debug!("(resolving name in module) import unresolved; \
bailing out");
return Indeterminate;
}
match (*import_resolution).target_for_namespace(namespace) {
none => {
debug!{"(resolving name in module) name found, but \
debug!("(resolving name in module) name found, but \
not in namespace %?",
namespace};
namespace);
}
some(target) => {
debug!{"(resolving name in module) resolved to \
import"};
debug!("(resolving name in module) resolved to \
import");
import_resolution.used = true;
return Success(copy target);
}
@ -2315,8 +2315,8 @@ struct Resolver {
}
// We're out of luck.
debug!{"(resolving name in module) failed to resolve %s",
self.session.str_of(name)};
debug!("(resolving name in module) failed to resolve %s",
self.session.str_of(name));
return Failed;
}
@ -2341,81 +2341,81 @@ struct Resolver {
}
}
debug!{"(resolving one-level naming result) resolving import `%s` = \
debug!("(resolving one-level naming result) resolving import `%s` = \
`%s` in `%s`",
self.session.str_of(target_name),
self.session.str_of(source_name),
self.module_to_str(module_)};
self.module_to_str(module_));
// Find the matching items in the lexical scope chain for every
// namespace. If any of them come back indeterminate, this entire
// import is indeterminate.
let mut module_result;
debug!{"(resolving one-level naming result) searching for module"};
debug!("(resolving one-level naming result) searching for module");
match self.resolve_item_in_lexical_scope(module_,
source_name,
ModuleNS) {
Failed => {
debug!{"(resolving one-level renaming import) didn't find \
module result"};
debug!("(resolving one-level renaming import) didn't find \
module result");
module_result = none;
}
Indeterminate => {
debug!{"(resolving one-level renaming import) module result \
is indeterminate; bailing"};
debug!("(resolving one-level renaming import) module result \
is indeterminate; bailing");
return Indeterminate;
}
Success(name_bindings) => {
debug!{"(resolving one-level renaming import) module result \
found"};
debug!("(resolving one-level renaming import) module result \
found");
module_result = some(copy name_bindings);
}
}
let mut value_result;
debug!{"(resolving one-level naming result) searching for value"};
debug!("(resolving one-level naming result) searching for value");
match self.resolve_item_in_lexical_scope(module_,
source_name,
ValueNS) {
Failed => {
debug!{"(resolving one-level renaming import) didn't find \
value result"};
debug!("(resolving one-level renaming import) didn't find \
value result");
value_result = none;
}
Indeterminate => {
debug!{"(resolving one-level renaming import) value result \
is indeterminate; bailing"};
debug!("(resolving one-level renaming import) value result \
is indeterminate; bailing");
return Indeterminate;
}
Success(name_bindings) => {
debug!{"(resolving one-level renaming import) value result \
found"};
debug!("(resolving one-level renaming import) value result \
found");
value_result = some(copy name_bindings);
}
}
let mut type_result;
debug!{"(resolving one-level naming result) searching for type"};
debug!("(resolving one-level naming result) searching for type");
match self.resolve_item_in_lexical_scope(module_,
source_name,
TypeNS) {
Failed => {
debug!{"(resolving one-level renaming import) didn't find \
type result"};
debug!("(resolving one-level renaming import) didn't find \
type result");
type_result = none;
}
Indeterminate => {
debug!{"(resolving one-level renaming import) type result is \
indeterminate; bailing"};
debug!("(resolving one-level renaming import) type result is \
indeterminate; bailing");
return Indeterminate;
}
Success(name_bindings) => {
debug!{"(resolving one-level renaming import) type result \
found"};
debug!("(resolving one-level renaming import) type result \
found");
type_result = some(copy name_bindings);
}
}
@ -2455,11 +2455,11 @@ struct Resolver {
import resolution name by now";
}
some(import_resolution) => {
debug!{"(resolving one-level renaming import) writing module \
debug!("(resolving one-level renaming import) writing module \
result %? for `%s` into `%s`",
is_none(module_result),
self.session.str_of(target_name),
self.module_to_str(module_)};
self.module_to_str(module_));
import_resolution.module_target = module_result;
import_resolution.value_target = value_result;
@ -2470,7 +2470,7 @@ struct Resolver {
}
}
debug!{"(resolving one-level renaming import) successfully resolved"};
debug!("(resolving one-level renaming import) successfully resolved");
return Success(());
}
@ -2526,9 +2526,9 @@ struct Resolver {
}
some(_) => {
// Bail out.
debug!{"(recording exports for module subtree) not recording \
debug!("(recording exports for module subtree) not recording \
exports for `%s`",
self.module_to_str(module_)};
self.module_to_str(module_));
return;
}
}
@ -2638,17 +2638,17 @@ struct Resolver {
some(name) => {
match orig_module.children.find(name) {
none => {
debug!{"!!! (with scope) didn't find `%s` in `%s`",
debug!("!!! (with scope) didn't find `%s` in `%s`",
self.session.str_of(name),
self.module_to_str(orig_module)};
self.module_to_str(orig_module));
}
some(name_bindings) => {
match (*name_bindings).get_module_if_available() {
none => {
debug!{"!!! (with scope) didn't find module \
debug!("!!! (with scope) didn't find module \
for `%s` in `%s`",
self.session.str_of(name),
self.module_to_str(orig_module)};
self.module_to_str(orig_module));
}
some(module_) => {
self.current_module = module_;
@ -2794,7 +2794,7 @@ struct Resolver {
// XXX: This shouldn't be unsafe!
fn resolve_crate() unsafe {
debug!{"(resolving crate) starting"};
debug!("(resolving crate) starting");
// XXX: This is awful!
let this = ptr::addr_of(self);
@ -2816,8 +2816,8 @@ struct Resolver {
}
fn resolve_item(item: @item, visitor: ResolveVisitor) {
debug!{"(resolving item) resolving %s",
self.session.str_of(item.ident)};
debug!("(resolving item) resolving %s",
self.session.str_of(item.ident));
// Items with the !resolve_unexported attribute are X-ray contexts.
// This is used to allow the test runner to run unexported tests.
@ -2874,8 +2874,8 @@ struct Resolver {
// definition of the trait into the definition
// map.
debug!{"(resolving trait) found trait def: \
%?", def};
debug!("(resolving trait) found trait def: \
%?", def);
self.record_def(trt.ref_id, def);
}
@ -3010,8 +3010,8 @@ struct Resolver {
for (*type_parameters).eachi |index, type_parameter| {
let name = type_parameter.ident;
debug!{"with_type_parameter_rib: %d %d", node_id,
type_parameter.id};
debug!("with_type_parameter_rib: %d %d", node_id,
type_parameter.id);
let def_like = dl_def(def_ty_param
(local_def(type_parameter.id),
index + initial_index));
@ -3126,8 +3126,8 @@ struct Resolver {
self.resolve_type(argument.ty, visitor);
debug!{"(resolving function) recorded argument `%s`",
self.session.str_of(name)};
debug!("(resolving function) recorded argument `%s`",
self.session.str_of(name));
}
self.resolve_type(declaration.output, visitor);
@ -3137,7 +3137,7 @@ struct Resolver {
// Resolve the function body.
self.resolve_block(block, visitor);
debug!{"(resolving function) leaving function"};
debug!("(resolving function) leaving function");
}
(*self.label_ribs).pop();
@ -3193,7 +3193,7 @@ struct Resolver {
// definition of the trait into the definition
// map.
debug!{"(resolving class) found trait def: %?", def};
debug!("(resolving class) found trait def: %?", def);
self.record_def(trt.ref_id, def);
@ -3359,7 +3359,7 @@ struct Resolver {
visitor: ResolveVisitor) {
// Write the implementations in scope into the module metadata.
debug!{"(resolving module) resolving module ID %d", id};
debug!("(resolving module) resolving module ID %d", id);
visit_mod(module_, span, id, (), visitor);
}
@ -3411,17 +3411,17 @@ struct Resolver {
none => {
self.session.span_err(
p.span,
fmt!{"variable `%s` from pattern #1 is \
fmt!("variable `%s` from pattern #1 is \
not bound in pattern #%u",
self.session.str_of(key), i + 1});
self.session.str_of(key), i + 1));
}
some(binding_i) => {
if binding_0.binding_mode != binding_i.binding_mode {
self.session.span_err(
binding_i.span,
fmt!{"variable `%s` is bound with different \
fmt!("variable `%s` is bound with different \
mode in pattern #%u than in pattern #1",
self.session.str_of(key), i + 1});
self.session.str_of(key), i + 1));
}
}
}
@ -3431,9 +3431,9 @@ struct Resolver {
if !map_0.contains_key(key) {
self.session.span_err(
binding.span,
fmt!{"variable `%s` from pattern #%u is \
fmt!("variable `%s` from pattern #%u is \
not bound in pattern #1",
self.session.str_of(key), i + 1});
self.session.str_of(key), i + 1));
}
}
}
@ -3459,7 +3459,7 @@ struct Resolver {
}
fn resolve_block(block: blk, visitor: ResolveVisitor) {
debug!{"(resolving block) entering block"};
debug!("(resolving block) entering block");
(*self.value_ribs).push(@Rib(NormalRibKind));
// Move down in the graph, if there's an anonymous module rooted here.
@ -3467,8 +3467,8 @@ struct Resolver {
match self.current_module.anonymous_children.find(block.node.id) {
none => { /* Nothing to do. */ }
some(anonymous_module) => {
debug!{"(resolving block) found anonymous module, moving \
down"};
debug!("(resolving block) found anonymous module, moving \
down");
self.current_module = anonymous_module;
}
}
@ -3480,7 +3480,7 @@ struct Resolver {
self.current_module = orig_module;
(*self.value_ribs).pop();
debug!{"(resolving block) leaving block"};
debug!("(resolving block) leaving block");
}
fn resolve_type(ty: @ty, visitor: ResolveVisitor) {
@ -3495,8 +3495,8 @@ struct Resolver {
let mut result_def;
match self.resolve_path(path, TypeNS, true, visitor) {
some(def) => {
debug!{"(resolving type) resolved `%s` to type",
self.session.str_of(path.idents.last())};
debug!("(resolving type) resolved `%s` to type",
self.session.str_of(path.idents.last()));
result_def = some(def);
}
none => {
@ -3532,19 +3532,19 @@ struct Resolver {
match copy result_def {
some(def) => {
// Write the result into the def map.
debug!{"(resolving type) writing resolution for `%s` \
debug!("(resolving type) writing resolution for `%s` \
(id %d)",
connect(path.idents.map(
|x| self.session.str_of(x)), ~"::"),
path_id};
path_id);
self.record_def(path_id, def);
}
none => {
self.session.span_err
(ty.span, fmt!{"use of undeclared type name `%s`",
(ty.span, fmt!("use of undeclared type name `%s`",
connect(path.idents.map(
|x| self.session.str_of(x)),
~"::")});
~"::")));
}
}
}
@ -3582,19 +3582,19 @@ struct Resolver {
match self.resolve_enum_variant_or_const(atom) {
FoundEnumVariant(def) if mode == RefutableMode => {
debug!{"(resolving pattern) resolving `%s` to \
debug!("(resolving pattern) resolving `%s` to \
enum variant",
self.session.str_of(atom)};
self.session.str_of(atom));
self.record_def(pattern.id, def);
}
FoundEnumVariant(_) => {
self.session.span_err(pattern.span,
fmt!{"declaration of `%s` \
fmt!("declaration of `%s` \
shadows an enum \
that's in scope",
self.session
.str_of(atom)});
.str_of(atom)));
}
FoundConst => {
self.session.span_err(pattern.span,
@ -3603,8 +3603,8 @@ struct Resolver {
in scope");
}
EnumVariantOrConstNotFound => {
debug!{"(resolving pattern) binding `%s`",
self.session.str_of(atom)};
debug!("(resolving pattern) binding `%s`",
self.session.str_of(atom));
let is_mutable = mutability == Mutable;
@ -3647,10 +3647,10 @@ struct Resolver {
// in the same disjunct, which is an
// error
self.session.span_err(pattern.span,
fmt!{"Identifier %s is bound more \
fmt!("Identifier %s is bound more \
than once in the same pattern",
path_to_str(path, self.session
.intr())});
.intr())));
}
// Not bound in the same pattern: do nothing
}
@ -3678,9 +3678,9 @@ struct Resolver {
some(_) => {
self.session.span_err(
path.span,
fmt!{"not an enum variant: %s",
fmt!("not an enum variant: %s",
self.session.str_of(
path.idents.last())});
path.idents.last())));
}
none => {
self.session.span_err(path.span,
@ -3835,9 +3835,9 @@ struct Resolver {
-> NameDefinition {
if xray == NoXray && !self.name_is_exported(containing_module, name) {
debug!{"(resolving definition of name in module) name `%s` is \
debug!("(resolving definition of name in module) name `%s` is \
unexported",
self.session.str_of(name)};
self.session.str_of(name));
return NoNameDefinition;
}
@ -3918,9 +3918,9 @@ struct Resolver {
Failed => {
self.session.span_err(path.span,
fmt!{"use of undeclared module `%s`",
fmt!("use of undeclared module `%s`",
self.atoms_to_str(
(*module_path_atoms).get())});
(*module_path_atoms).get())));
return none;
}
@ -3942,9 +3942,9 @@ struct Resolver {
// We failed to resolve the name. Report an error.
self.session.span_err(
path.span,
fmt!{"unresolved name: %s::%s",
fmt!("unresolved name: %s::%s",
self.atoms_to_str((*module_path_atoms).get()),
self.session.str_of(name)});
self.session.str_of(name)));
return none;
}
ChildNameDefinition(def) | ImportNameDefinition(def) => {
@ -3971,9 +3971,9 @@ struct Resolver {
Failed => {
self.session.span_err(path.span,
fmt!{"use of undeclared module `::%s`",
fmt!("use of undeclared module `::%s`",
self.atoms_to_str
((*module_path_atoms).get())});
((*module_path_atoms).get())));
return none;
}
@ -3995,9 +3995,9 @@ struct Resolver {
// We failed to resolve the name. Report an error.
self.session.span_err(
path.span,
fmt!{"unresolved name: %s::%s", self.atoms_to_str(
fmt!("unresolved name: %s::%s", self.atoms_to_str(
(*module_path_atoms).get()),
self.session.str_of(name)});
self.session.str_of(name)));
return none;
}
ChildNameDefinition(def) | ImportNameDefinition(def) => {
@ -4028,10 +4028,10 @@ struct Resolver {
match copy search_result {
some(dl_def(def)) => {
debug!{"(resolving path in local ribs) resolved `%s` to \
debug!("(resolving path in local ribs) resolved `%s` to \
local: %?",
self.session.str_of(ident),
def};
def);
return some(def);
}
some(dl_field) | some(dl_impl(_)) | none => {
@ -4056,9 +4056,9 @@ struct Resolver {
bindings with no def for that namespace?!";
}
some(def) => {
debug!{"(resolving item path in lexical scope) \
debug!("(resolving item path in lexical scope) \
resolved `%s` to item",
self.session.str_of(ident)};
self.session.str_of(ident));
return some(def.def);
}
}
@ -4090,18 +4090,18 @@ struct Resolver {
match self.resolve_path(path, ValueNS, true, visitor) {
some(def) => {
// Write the result into the def map.
debug!{"(resolving expr) resolved `%s`",
debug!("(resolving expr) resolved `%s`",
connect(path.idents.map(
|x| self.session.str_of(x)), ~"::")};
|x| self.session.str_of(x)), ~"::"));
self.record_def(expr.id, def);
}
none => {
self.session.span_err(
expr.span,
fmt!{"unresolved name: %s",
fmt!("unresolved name: %s",
connect(path.idents.map(
|x| self.session.str_of(x)),
~"::")});
~"::")));
}
}
@ -4149,10 +4149,10 @@ struct Resolver {
_ => {
self.session.span_err(
path.span,
fmt!{"`%s` does not name a structure",
fmt!("`%s` does not name a structure",
connect(path.idents.map(
|x| self.session.str_of(x)),
~"::")});
~"::")));
}
}
@ -4340,11 +4340,11 @@ struct Resolver {
match self.trait_info.find(trait_def_id) {
some(trait_info) if trait_info.contains_key(name) => {
debug!{"(adding trait info if containing method) found trait \
debug!("(adding trait info if containing method) found trait \
%d:%d for method '%s'",
trait_def_id.crate,
trait_def_id.node,
self.session.str_of(name)};
self.session.str_of(name));
(*found_traits).push(trait_def_id);
}
some(_) | none => {
@ -4360,7 +4360,7 @@ struct Resolver {
}
fn record_def(node_id: node_id, def: def) {
debug!{"(recording def) recording %? for %?", def, node_id};
debug!("(recording def) recording %? for %?", def, node_id);
self.def_map.insert(node_id, def);
}
@ -4393,9 +4393,9 @@ struct Resolver {
}
some(_) => {
// Bail out.
debug!{"(checking for unused imports in module subtree) not \
debug!("(checking for unused imports in module subtree) not \
checking for unused imports for `%s`",
self.module_to_str(module_)};
self.module_to_str(module_));
return;
}
}
@ -4491,14 +4491,14 @@ struct Resolver {
}
fn dump_module(module_: @Module) {
debug!{"Dump of module `%s`:", self.module_to_str(module_)};
debug!("Dump of module `%s`:", self.module_to_str(module_));
debug!{"Children:"};
debug!("Children:");
for module_.children.each |name, _child| {
debug!{"* %s", self.session.str_of(name)};
debug!("* %s", self.session.str_of(name));
}
debug!{"Import resolutions:"};
debug!("Import resolutions:");
for module_.import_resolutions.each |name, import_resolution| {
let mut module_repr;
match (*import_resolution).target_for_namespace(ModuleNS) {
@ -4527,9 +4527,9 @@ struct Resolver {
}
}
debug!{"* %s:%s%s%s",
debug!("* %s:%s%s%s",
self.session.str_of(name),
module_repr, value_repr, type_repr};
module_repr, value_repr, type_repr);
}
}
}

View file

@ -67,8 +67,8 @@ enum dest {
fn dest_str(ccx: @crate_ctxt, d: dest) -> ~str {
match d {
by_val(v) => fmt!{"by_val(%s)", val_str(ccx.tn, *v)},
save_in(v) => fmt!{"save_in(%s)", val_str(ccx.tn, v)},
by_val(v) => fmt!("by_val(%s)", val_str(ccx.tn, *v)),
save_in(v) => fmt!("save_in(%s)", val_str(ccx.tn, v)),
ignore => ~"ignore"
}
}
@ -100,7 +100,7 @@ trait get_insn_ctxt {
impl @crate_ctxt: get_insn_ctxt {
fn insn_ctxt(s: &str) -> icx_popper {
debug!{"new insn_ctxt: %s", s};
debug!("new insn_ctxt: %s", s);
if self.sess.count_llvm_insns() {
vec::push(*self.stats.llvm_insn_ctxt, str::from_slice(s));
}
@ -504,7 +504,7 @@ fn declare_tydesc(ccx: @crate_ctxt, t: ty::t) -> @tydesc_info {
mangle_internal_name_by_type_only(ccx, t, ~"tydesc")
} else { mangle_internal_name_by_seq(ccx, ~"tydesc") };
note_unique_llvm_symbol(ccx, name);
log(debug, fmt!{"+++ declare_tydesc %s %s", ty_to_str(ccx.tcx, t), name});
log(debug, fmt!("+++ declare_tydesc %s %s", ty_to_str(ccx.tcx, t), name));
let gvar = str::as_c_str(name, |buf| {
llvm::LLVMAddGlobal(ccx.llmod, ccx.tydesc_type, buf)
});
@ -1116,52 +1116,52 @@ fn lazily_emit_tydesc_glue(ccx: @crate_ctxt, field: uint,
match ti.take_glue {
some(_) => (),
none => {
debug!{"+++ lazily_emit_tydesc_glue TAKE %s",
ppaux::ty_to_str(ccx.tcx, ti.ty)};
debug!("+++ lazily_emit_tydesc_glue TAKE %s",
ppaux::ty_to_str(ccx.tcx, ti.ty));
let glue_fn = declare_generic_glue(ccx, ti.ty, llfnty, ~"take");
ti.take_glue = some(glue_fn);
make_generic_glue(ccx, ti.ty, glue_fn, make_take_glue, ~"take");
debug!{"--- lazily_emit_tydesc_glue TAKE %s",
ppaux::ty_to_str(ccx.tcx, ti.ty)};
debug!("--- lazily_emit_tydesc_glue TAKE %s",
ppaux::ty_to_str(ccx.tcx, ti.ty));
}
}
} else if field == abi::tydesc_field_drop_glue {
match ti.drop_glue {
some(_) => (),
none => {
debug!{"+++ lazily_emit_tydesc_glue DROP %s",
ppaux::ty_to_str(ccx.tcx, ti.ty)};
debug!("+++ lazily_emit_tydesc_glue DROP %s",
ppaux::ty_to_str(ccx.tcx, ti.ty));
let glue_fn = declare_generic_glue(ccx, ti.ty, llfnty, ~"drop");
ti.drop_glue = some(glue_fn);
make_generic_glue(ccx, ti.ty, glue_fn, make_drop_glue, ~"drop");
debug!{"--- lazily_emit_tydesc_glue DROP %s",
ppaux::ty_to_str(ccx.tcx, ti.ty)};
debug!("--- lazily_emit_tydesc_glue DROP %s",
ppaux::ty_to_str(ccx.tcx, ti.ty));
}
}
} else if field == abi::tydesc_field_free_glue {
match ti.free_glue {
some(_) => (),
none => {
debug!{"+++ lazily_emit_tydesc_glue FREE %s",
ppaux::ty_to_str(ccx.tcx, ti.ty)};
debug!("+++ lazily_emit_tydesc_glue FREE %s",
ppaux::ty_to_str(ccx.tcx, ti.ty));
let glue_fn = declare_generic_glue(ccx, ti.ty, llfnty, ~"free");
ti.free_glue = some(glue_fn);
make_generic_glue(ccx, ti.ty, glue_fn, make_free_glue, ~"free");
debug!{"--- lazily_emit_tydesc_glue FREE %s",
ppaux::ty_to_str(ccx.tcx, ti.ty)};
debug!("--- lazily_emit_tydesc_glue FREE %s",
ppaux::ty_to_str(ccx.tcx, ti.ty));
}
}
} else if field == abi::tydesc_field_visit_glue {
match ti.visit_glue {
some(_) => (),
none => {
debug!{"+++ lazily_emit_tydesc_glue VISIT %s",
ppaux::ty_to_str(ccx.tcx, ti.ty)};
debug!("+++ lazily_emit_tydesc_glue VISIT %s",
ppaux::ty_to_str(ccx.tcx, ti.ty));
let glue_fn = declare_generic_glue(ccx, ti.ty, llfnty, ~"visit");
ti.visit_glue = some(glue_fn);
make_generic_glue(ccx, ti.ty, glue_fn, make_visit_glue, ~"visit");
debug!{"--- lazily_emit_tydesc_glue VISIT %s",
ppaux::ty_to_str(ccx.tcx, ti.ty)};
debug!("--- lazily_emit_tydesc_glue VISIT %s",
ppaux::ty_to_str(ccx.tcx, ti.ty));
}
}
}
@ -1722,7 +1722,7 @@ fn trans_eager_binop(cx: block, span: span, op: ast::binop, lhs: ValueRef,
fn trans_assign_op(bcx: block, ex: @ast::expr, op: ast::binop,
dst: @ast::expr, src: @ast::expr) -> block {
debug!{"%s", expr_to_str(ex, bcx.tcx().sess.parse_sess.interner)};
debug!("%s", expr_to_str(ex, bcx.tcx().sess.parse_sess.interner));
let _icx = bcx.insn_ctxt("trans_assign_op");
let t = expr_ty(bcx, src);
let lhs_res = trans_lval(bcx, dst);
@ -1732,9 +1732,9 @@ fn trans_assign_op(bcx: block, ex: @ast::expr, op: ast::binop,
match bcx.ccx().maps.method_map.find(ex.id) {
some(origin) => {
let bcx = lhs_res.bcx;
debug!{"user-defined method callee_id: %s",
debug!("user-defined method callee_id: %s",
ast_map::node_id_to_str(bcx.tcx().items, ex.callee_id,
bcx.sess().parse_sess.interner)};
bcx.sess().parse_sess.interner));
let fty = node_id_type(bcx, ex.callee_id);
let dty = expr_ty(bcx, dst);
@ -1770,7 +1770,7 @@ fn root_value(bcx: block, val: ValueRef, ty: ty::t,
if bcx.sess().trace() {
trans_trace(
bcx, none,
fmt!{"preserving until end of scope %d", scope_id});
fmt!("preserving until end of scope %d", scope_id));
}
let root_loc = alloca_zeroed(bcx, type_of(bcx.ccx(), ty));
@ -1789,9 +1789,9 @@ fn autoderef(cx: block, e_id: ast::node_id,
let ccx = cx.ccx();
let mut derefs = 0u;
while derefs < max {
debug!{"autoderef(e_id=%d, v1=%s, t1=%s, derefs=%u)",
debug!("autoderef(e_id=%d, v1=%s, t1=%s, derefs=%u)",
e_id, val_str(ccx.tn, v1), ppaux::ty_to_str(ccx.tcx, t1),
derefs};
derefs);
// root the autoderef'd value, if necessary:
derefs += 1u;
@ -2140,8 +2140,8 @@ fn monomorphic_fn(ccx: @crate_ctxt, fn_id: ast::def_id,
match ccx.monomorphized.find(hash_id) {
some(val) => {
debug!{"leaving monomorphic fn %s",
ty::item_path_str(ccx.tcx, fn_id)};
debug!("leaving monomorphic fn %s",
ty::item_path_str(ccx.tcx, fn_id));
return {val: val, must_cast: must_cast};
}
none => ()
@ -2151,9 +2151,9 @@ fn monomorphic_fn(ccx: @crate_ctxt, fn_id: ast::def_id,
let mut llitem_ty = tpt.ty;
let map_node = session::expect(ccx.sess, ccx.tcx.items.find(fn_id.node),
|| fmt!{"While monomorphizing %?, couldn't find it in the item map \
|| fmt!("While monomorphizing %?, couldn't find it in the item map \
(may have attempted to monomorphize an item defined in a different \
crate?)", fn_id});
crate?)", fn_id));
// Get the path so that we can create a symbol
let (pt, name, span) = match map_node {
ast_map::node_item(i, pt) => (pt, i.ident, i.span),
@ -2287,7 +2287,7 @@ fn monomorphic_fn(ccx: @crate_ctxt, fn_id: ast::def_id,
};
ccx.monomorphizing.insert(fn_id, depth);
debug!{"leaving monomorphic fn %s", ty::item_path_str(ccx.tcx, fn_id)};
debug!("leaving monomorphic fn %s", ty::item_path_str(ccx.tcx, fn_id));
{val: lldecl, must_cast: must_cast}
}
@ -2297,8 +2297,8 @@ fn maybe_instantiate_inline(ccx: @crate_ctxt, fn_id: ast::def_id)
match ccx.external.find(fn_id) {
some(some(node_id)) => {
// Already inline
debug!{"maybe_instantiate_inline(%s): already inline as node id %d",
ty::item_path_str(ccx.tcx, fn_id), node_id};
debug!("maybe_instantiate_inline(%s): already inline as node id %d",
ty::item_path_str(ccx.tcx, fn_id), node_id);
local_def(node_id)
}
some(none) => fn_id, // Not inlinable
@ -2487,8 +2487,8 @@ fn trans_local_var(cx: block, def: ast::def) -> local_var_result {
return {val: slf, kind: lv_owned};
}
_ => {
cx.sess().unimpl(fmt!{"unsupported def type in trans_local_var: %?",
def});
cx.sess().unimpl(fmt!("unsupported def type in trans_local_var: %?",
def));
}
}
}
@ -2576,7 +2576,7 @@ fn trans_rec_field_inner(bcx: block, val: ValueRef, ty: ty::t,
let ix = field_idx_strict(bcx.tcx(), sp, field, fields);
debug!{"val = %s ix = %u", bcx.val_str(val), ix};
debug!("val = %s ix = %u", bcx.val_str(val), ix);
/* self is a class with a dtor, which means we
have to select out the object itself
@ -2628,8 +2628,8 @@ fn trans_index(cx: block, ex: @ast::expr, base: @ast::expr,
len = Sub(bcx, len, C_uint(bcx.ccx(), 1u));
}
debug!{"trans_index: base %s", val_str(bcx.ccx().tn, base)};
debug!{"trans_index: len %s", val_str(bcx.ccx().tn, len)};
debug!("trans_index: base %s", val_str(bcx.ccx().tn, base));
debug!("trans_index: len %s", val_str(bcx.ccx().tn, len));
let bounds_check = ICmp(bcx, lib::llvm::IntUGE, scaled_ix, len);
let bcx = do with_cond(bcx, bounds_check) |bcx| {
@ -2687,8 +2687,8 @@ fn trans_lval(cx: block, e: @ast::expr) -> lval_result {
let lv = unrooted(cx, e);
if !cx.sess().no_asm_comments() {
add_comment(cx, fmt!{"preserving until end of scope %d",
scope_id});
add_comment(cx, fmt!("preserving until end of scope %d",
scope_id));
}
let _icx = lv.bcx.insn_ctxt("root_value_lval");
@ -2749,7 +2749,7 @@ fn trans_lval(cx: block, e: @ast::expr) -> lval_result {
* wrong address space and thus be the wrong type.
*/
fn non_gc_box_cast(cx: block, val: ValueRef) -> ValueRef {
debug!{"non_gc_box_cast"};
debug!("non_gc_box_cast");
add_comment(cx, ~"non_gc_box_cast");
assert(llvm::LLVMGetPointerAddressSpace(val_ty(val)) as uint == 1u);
let non_gc_t = T_ptr(llvm::LLVMGetElementType(val_ty(val)));
@ -2904,7 +2904,7 @@ fn trans_arg_expr(cx: block, arg: ty::arg, lldestty: TypeRef, e: @ast::expr,
-> result {
let _icx = cx.insn_ctxt("trans_arg_expr");
let ccx = cx.ccx();
debug!{"+++ trans_arg_expr on %s", expr_to_str(e, ccx.sess.intr())};
debug!("+++ trans_arg_expr on %s", expr_to_str(e, ccx.sess.intr()));
let e_ty = expr_ty(cx, e);
let is_bot = ty::type_is_bot(e_ty);
@ -2925,7 +2925,7 @@ fn trans_arg_expr(cx: block, arg: ty::arg, lldestty: TypeRef, e: @ast::expr,
// auto-deref value as required (this only applies to method
// call receivers) of method
debug!{" pre-deref value: %s", val_str(lv.bcx.ccx().tn, lv.val)};
debug!(" pre-deref value: %s", val_str(lv.bcx.ccx().tn, lv.val));
let {lv, e_ty} = if derefs == 0u {
{lv: lv, e_ty: e_ty}
} else {
@ -2937,11 +2937,11 @@ fn trans_arg_expr(cx: block, arg: ty::arg, lldestty: TypeRef, e: @ast::expr,
};
// borrow value (convert from @T to &T and so forth)
debug!{" pre-adaptation value: %s", val_str(lv.bcx.ccx().tn, lv.val)};
debug!(" pre-adaptation value: %s", val_str(lv.bcx.ccx().tn, lv.val));
let {lv, ty: e_ty} = adapt_borrowed_value(lv, e, e_ty);
let mut bcx = lv.bcx;
let mut val = lv.val;
debug!{" adapted value: %s", val_str(bcx.ccx().tn, val)};
debug!(" adapted value: %s", val_str(bcx.ccx().tn, val));
// finally, deal with the various modes
let arg_mode = ty::resolved_mode(ccx.tcx, arg.mode);
@ -2993,11 +2993,11 @@ fn trans_arg_expr(cx: block, arg: ty::arg, lldestty: TypeRef, e: @ast::expr,
}
if !is_bot && arg.ty != e_ty || ty::type_has_params(arg.ty) {
debug!{" casting from %s", val_str(bcx.ccx().tn, val)};
debug!(" casting from %s", val_str(bcx.ccx().tn, val));
val = PointerCast(bcx, val, lldestty);
}
debug!{"--- trans_arg_expr passing %s", val_str(bcx.ccx().tn, val)};
debug!("--- trans_arg_expr passing %s", val_str(bcx.ccx().tn, val));
return rslt(bcx, val);
}
@ -3038,9 +3038,9 @@ fn adapt_borrowed_value(lv: lval_result,
let (base, len) = tvec::get_base_and_len(bcx, val, e_ty);
let p = alloca(bcx, T_struct(~[T_ptr(llunit_ty), ccx.int_type]));
debug!{"adapt_borrowed_value: adapting %s to %s",
debug!("adapt_borrowed_value: adapting %s to %s",
val_str(bcx.ccx().tn, val),
val_str(bcx.ccx().tn, p)};
val_str(bcx.ccx().tn, p));
Store(bcx, base, GEPi(bcx, p, ~[0u, abi::slice_elt_base]));
Store(bcx, len, GEPi(bcx, p, ~[0u, abi::slice_elt_len]));
@ -3683,9 +3683,9 @@ fn lval_result_to_result(lv: lval_result, ty: ty::t) -> result {
fn add_root_cleanup(bcx: block, scope_id: ast::node_id,
root_loc: ValueRef, ty: ty::t) {
debug!{"add_root_cleanup(bcx=%s, scope_id=%d, root_loc=%s, ty=%s)",
debug!("add_root_cleanup(bcx=%s, scope_id=%d, root_loc=%s, ty=%s)",
bcx.to_str(), scope_id, val_str(bcx.ccx().tn, root_loc),
ppaux::ty_to_str(bcx.ccx().tcx, ty)};
ppaux::ty_to_str(bcx.ccx().tcx, ty));
let bcx_scope = find_bcx_for_scope(bcx, scope_id);
add_clean_temp_mem(bcx_scope, root_loc, ty);
@ -3700,7 +3700,7 @@ fn add_root_cleanup(bcx: block, scope_id: ast::node_id,
_ => {
match bcx_sid.parent {
none => bcx.tcx().sess.bug(
fmt!{"no enclosing scope with id %d", scope_id}),
fmt!("no enclosing scope with id %d", scope_id)),
some(bcx_par) => bcx_par
}
}
@ -3724,16 +3724,16 @@ fn trans_expr(bcx: block, e: @ast::expr, dest: dest) -> block {
return match bcx.ccx().maps.root_map.find({id:e.id, derefs:0u}) {
none => unrooted(bcx, e, dest),
some(scope_id) => {
debug!{"expression %d found in root map with scope %d",
e.id, scope_id};
debug!("expression %d found in root map with scope %d",
e.id, scope_id);
let ty = expr_ty(bcx, e);
let root_loc = alloca_zeroed(bcx, type_of(bcx.ccx(), ty));
let bcx = unrooted(bcx, e, save_in(root_loc));
if !bcx.sess().no_asm_comments() {
add_comment(bcx, fmt!{"preserving until end of scope %d",
scope_id});
add_comment(bcx, fmt!("preserving until end of scope %d",
scope_id));
}
let _icx = bcx.insn_ctxt("root_value_expr");
@ -3812,9 +3812,9 @@ fn trans_expr(bcx: block, e: @ast::expr, dest: dest) -> block {
ast::expr_fn_block(decl, body, cap_clause) => {
match check ty::get(expr_ty(bcx, e)).struct {
ty::ty_fn({proto, _}) => {
debug!{"translating fn_block %s with type %s",
debug!("translating fn_block %s with type %s",
expr_to_str(e, tcx.sess.intr()),
ppaux::ty_to_str(tcx, expr_ty(bcx, e))};
ppaux::ty_to_str(tcx, expr_ty(bcx, e)));
return closure::trans_expr_fn(bcx, proto, decl, body,
e.id, cap_clause, none, dest);
}
@ -3945,8 +3945,8 @@ fn lval_to_dps(bcx: block, e: @ast::expr, dest: dest) -> block {
let ty = expr_ty(bcx, e);
let lv = trans_lval(bcx, e);
let last_use = (lv.kind == lv_owned && last_use_map.contains_key(e.id));
debug!{"is last use (%s) = %b, %d", expr_to_str(e, bcx.ccx().sess.intr()),
last_use, lv.kind as int};
debug!("is last use (%s) = %b, %d", expr_to_str(e, bcx.ccx().sess.intr()),
last_use, lv.kind as int);
lval_result_to_dps(lv, ty, last_use, dest)
}
@ -4297,7 +4297,7 @@ fn init_local(bcx: block, local: @ast::local) -> block {
fn trans_stmt(cx: block, s: ast::stmt) -> block {
let _icx = cx.insn_ctxt("trans_stmt");
debug!{"trans_stmt(%s)", stmt_to_str(s, cx.tcx().sess.intr())};
debug!("trans_stmt(%s)", stmt_to_str(s, cx.tcx().sess.intr()));
if !cx.sess().no_asm_comments() {
add_span_comment(cx, s.span, stmt_to_str(s, cx.ccx().sess.intr()));
@ -4434,12 +4434,12 @@ fn cleanup_and_leave(bcx: block, upto: option<BasicBlockRef>,
let mut cur = bcx, bcx = bcx;
let is_lpad = leave == none;
loop {
debug!{"cleanup_and_leave: leaving %s", cur.to_str()};
debug!("cleanup_and_leave: leaving %s", cur.to_str());
if bcx.sess().trace() {
trans_trace(
bcx, none,
fmt!{"cleanup_and_leave(%s)", cur.to_str()});
fmt!("cleanup_and_leave(%s)", cur.to_str()));
}
match cur.kind {
@ -5133,9 +5133,9 @@ fn register_fn_fuller(ccx: @crate_ctxt, sp: span, path: path,
let llfn: ValueRef = decl_fn(ccx.llmod, ps, cc, llfty);
ccx.item_symbols.insert(node_id, ps);
debug!{"register_fn_fuller created fn %s for item %d with path %s",
debug!("register_fn_fuller created fn %s for item %d with path %s",
val_str(ccx.tn, llfn), node_id,
ast_map::path_to_str(path, ccx.sess.parse_sess.interner)};
ast_map::path_to_str(path, ccx.sess.parse_sess.interner));
let is_main = is_main_name(path) && !ccx.sess.building_library;
if is_main { create_main_wrapper(ccx, sp, llfn, node_type); }
@ -5276,8 +5276,8 @@ fn get_dtor_symbol(ccx: @crate_ctxt, path: path, id: ast::node_id,
mono_ty)
}
none => {
ccx.sess.bug(fmt!{"get_dtor_symbol: not monomorphizing and \
couldn't find a symbol for dtor %?", path});
ccx.sess.bug(fmt!("get_dtor_symbol: not monomorphizing and \
couldn't find a symbol for dtor %?", path));
}
}
}
@ -5285,7 +5285,7 @@ fn get_dtor_symbol(ccx: @crate_ctxt, path: path, id: ast::node_id,
}
fn get_item_val(ccx: @crate_ctxt, id: ast::node_id) -> ValueRef {
debug!{"get_item_val(id=`%?`)", id};
debug!("get_item_val(id=`%?`)", id);
let tcx = ccx.tcx;
match ccx.item_vals.find(id) {
some(v) => v,
@ -5316,7 +5316,7 @@ fn get_item_val(ccx: @crate_ctxt, id: ast::node_id) -> ValueRef {
}
}
ast_map::node_trait_method(trait_method, _, pth) => {
debug!{"get_item_val(): processing a node_trait_method"};
debug!("get_item_val(): processing a node_trait_method");
match *trait_method {
ast::required(_) => {
ccx.sess.bug(~"unexpected variant: required trait method in \
@ -5527,7 +5527,7 @@ fn trap(bcx: block) {
fn push_rtcall(ccx: @crate_ctxt, name: ~str, did: ast::def_id) {
if ccx.rtcalls.contains_key(name) {
fail fmt!{"multiple definitions for runtime call %s", name};
fail fmt!("multiple definitions for runtime call %s", name);
}
ccx.rtcalls.insert(name, did);
}
@ -5597,7 +5597,7 @@ fn gather_rtcalls(ccx: @crate_ctxt, crate: @ast::crate) {
~[~"exchange_free", ~"exchange_malloc", ~"fail", ~"free", ~"malloc"];
for vec::each(expected_rtcalls) |name| {
if !ccx.rtcalls.contains_key(name) {
fail fmt!{"no definition for runtime call %s", name};
fail fmt!("no definition for runtime call %s", name);
}
}
}
@ -5867,25 +5867,25 @@ fn trans_crate(sess: session::session,
write_metadata(ccx, crate);
if ccx.sess.trans_stats() {
io::println(~"--- trans stats ---");
io::println(fmt!{"n_static_tydescs: %u",
ccx.stats.n_static_tydescs});
io::println(fmt!{"n_glues_created: %u",
ccx.stats.n_glues_created});
io::println(fmt!{"n_null_glues: %u", ccx.stats.n_null_glues});
io::println(fmt!{"n_real_glues: %u", ccx.stats.n_real_glues});
io::println(fmt!("n_static_tydescs: %u",
ccx.stats.n_static_tydescs));
io::println(fmt!("n_glues_created: %u",
ccx.stats.n_glues_created));
io::println(fmt!("n_null_glues: %u", ccx.stats.n_null_glues));
io::println(fmt!("n_real_glues: %u", ccx.stats.n_real_glues));
// FIXME (#2280): this temporary shouldn't be
// necessary, but seems to be, for borrowing.
let times = copy *ccx.stats.fn_times;
for vec::each(times) |timing| {
io::println(fmt!{"time: %s took %d ms", timing.ident,
timing.time});
io::println(fmt!("time: %s took %d ms", timing.ident,
timing.time));
}
}
if ccx.sess.count_llvm_insns() {
for ccx.stats.llvm_insns.each |k, v| {
io::println(fmt!{"%-7u %s", v, k});
io::println(fmt!("%-7u %s", v, k));
}
}
return (llmod, link_meta);

View file

@ -142,10 +142,10 @@ fn Invoke(cx: block, Fn: ValueRef, Args: ~[ValueRef],
if cx.unreachable { return; }
assert (!cx.terminated);
cx.terminated = true;
debug!{"Invoke(%s with arguments (%s))",
debug!("Invoke(%s with arguments (%s))",
val_str(cx.ccx().tn, Fn),
str::connect(vec::map(Args, |a| val_str(cx.ccx().tn, a)),
~", ")};
~", "));
unsafe {
count_insn(cx, "invoke");
llvm::LLVMBuildInvoke(B(cx), Fn, vec::unsafe::to_ptr(Args),
@ -413,9 +413,9 @@ fn Load(cx: block, PointerVal: ValueRef) -> ValueRef {
fn Store(cx: block, Val: ValueRef, Ptr: ValueRef) {
if cx.unreachable { return; }
debug!{"Store %s -> %s",
debug!("Store %s -> %s",
val_str(cx.ccx().tn, Val),
val_str(cx.ccx().tn, Ptr)};
val_str(cx.ccx().tn, Ptr));
count_insn(cx, "store");
llvm::LLVMBuildStore(B(cx), Val, Ptr);
}
@ -673,9 +673,9 @@ fn Call(cx: block, Fn: ValueRef, Args: ~[ValueRef]) -> ValueRef {
unsafe {
count_insn(cx, "call");
debug!{"Call(Fn=%s, Args=%?)",
debug!("Call(Fn=%s, Args=%?)",
val_str(cx.ccx().tn, Fn),
Args.map(|arg| val_str(cx.ccx().tn, arg))};
Args.map(|arg| val_str(cx.ccx().tn, arg)));
return llvm::LLVMBuildCall(B(cx), Fn, vec::unsafe::to_ptr(Args),
Args.len() as c_uint, noname());

View file

@ -100,12 +100,12 @@ enum environment_value {
fn ev_to_str(ccx: @crate_ctxt, ev: environment_value) -> ~str {
match ev {
env_copy(v, t, lk) => fmt!{"copy(%s,%s)", val_str(ccx.tn, v),
ty_to_str(ccx.tcx, t)},
env_move(v, t, lk) => fmt!{"move(%s,%s)", val_str(ccx.tn, v),
ty_to_str(ccx.tcx, t)},
env_ref(v, t, lk) => fmt!{"ref(%s,%s)", val_str(ccx.tn, v),
ty_to_str(ccx.tcx, t)}
env_copy(v, t, lk) => fmt!("copy(%s,%s)", val_str(ccx.tn, v),
ty_to_str(ccx.tcx, t)),
env_move(v, t, lk) => fmt!("move(%s,%s)", val_str(ccx.tn, v),
ty_to_str(ccx.tcx, t)),
env_ref(v, t, lk) => fmt!("ref(%s,%s)", val_str(ccx.tn, v),
ty_to_str(ccx.tcx, t))
}
}
@ -129,7 +129,7 @@ fn mk_closure_tys(tcx: ty::ctxt,
});
}
let cdata_ty = ty::mk_tup(tcx, bound_tys);
debug!{"cdata_ty=%s", ty_to_str(tcx, cdata_ty)};
debug!("cdata_ty=%s", ty_to_str(tcx, cdata_ty));
return cdata_ty;
}
@ -195,16 +195,16 @@ fn store_environment(bcx: block,
let cboxptr_ty = ty::mk_ptr(tcx, {ty:cbox_ty, mutbl:ast::m_imm});
let llbox = PointerCast(bcx, llbox, type_of(ccx, cboxptr_ty));
debug!{"tuplify_box_ty = %s", ty_to_str(tcx, cbox_ty)};
debug!("tuplify_box_ty = %s", ty_to_str(tcx, cbox_ty));
// Copy expr values into boxed bindings.
let mut bcx = bcx;
do vec::iteri(bound_values) |i, bv| {
debug!{"Copy %s into closure", ev_to_str(ccx, bv)};
debug!("Copy %s into closure", ev_to_str(ccx, bv));
if !ccx.sess.no_asm_comments() {
add_comment(bcx, fmt!{"Copy %s into closure",
ev_to_str(ccx, bv)});
add_comment(bcx, fmt!("Copy %s into closure",
ev_to_str(ccx, bv)));
}
let bound_data = GEPi(bcx, llbox,
@ -225,9 +225,9 @@ fn store_environment(bcx: block,
bcx = move_val(bcx, INIT, bound_data, src, ty);
}
env_ref(val, ty, lv_owned) => {
debug!{"> storing %s into %s",
debug!("> storing %s into %s",
val_str(bcx.ccx().tn, val),
val_str(bcx.ccx().tn, bound_data)};
val_str(bcx.ccx().tn, bound_data));
Store(bcx, val, bound_data);
}
env_ref(val, ty, lv_owned_imm) => {
@ -259,13 +259,13 @@ fn build_closure(bcx0: block,
// Package up the captured upvars
do vec::iter(cap_vars) |cap_var| {
debug!{"Building closure: captured variable %?", cap_var};
debug!("Building closure: captured variable %?", cap_var);
let lv = trans_local_var(bcx, cap_var.def);
let nid = ast_util::def_id_of_def(cap_var.def).node;
debug!{"Node id is %s",
debug!("Node id is %s",
syntax::ast_map::node_id_to_str
(bcx.ccx().tcx.items, nid,
bcx.ccx().sess.parse_sess.interner)};
bcx.ccx().sess.parse_sess.interner));
let mut ty = node_id_type(bcx, nid);
match cap_var.mode {
capture::cap_ref => {

View file

@ -256,9 +256,9 @@ fn cleanup_type(cx: ty::ctxt, ty: ty::t) -> cleantype {
fn add_clean(cx: block, val: ValueRef, ty: ty::t) {
if !ty::type_needs_drop(cx.tcx(), ty) { return; }
debug!{"add_clean(%s, %s, %s)",
debug!("add_clean(%s, %s, %s)",
cx.to_str(), val_str(cx.ccx().tn, val),
ty_to_str(cx.ccx().tcx, ty)};
ty_to_str(cx.ccx().tcx, ty));
let cleanup_type = cleanup_type(cx.tcx(), ty);
do in_scope_cx(cx) |info| {
vec::push(info.cleanups, clean(|a| base::drop_ty(a, val, ty),
@ -268,9 +268,9 @@ fn add_clean(cx: block, val: ValueRef, ty: ty::t) {
}
fn add_clean_temp(cx: block, val: ValueRef, ty: ty::t) {
if !ty::type_needs_drop(cx.tcx(), ty) { return; }
debug!{"add_clean_temp(%s, %s, %s)",
debug!("add_clean_temp(%s, %s, %s)",
cx.to_str(), val_str(cx.ccx().tn, val),
ty_to_str(cx.ccx().tcx, ty)};
ty_to_str(cx.ccx().tcx, ty));
let cleanup_type = cleanup_type(cx.tcx(), ty);
fn do_drop(bcx: block, val: ValueRef, ty: ty::t) ->
block {
@ -288,9 +288,9 @@ fn add_clean_temp(cx: block, val: ValueRef, ty: ty::t) {
}
fn add_clean_temp_mem(cx: block, val: ValueRef, ty: ty::t) {
if !ty::type_needs_drop(cx.tcx(), ty) { return; }
debug!{"add_clean_temp_mem(%s, %s, %s)",
debug!("add_clean_temp_mem(%s, %s, %s)",
cx.to_str(), val_str(cx.ccx().tn, val),
ty_to_str(cx.ccx().tcx, ty)};
ty_to_str(cx.ccx().tcx, ty));
let cleanup_type = cleanup_type(cx.tcx(), ty);
do in_scope_cx(cx) |info| {
vec::push(info.cleanups,
@ -475,8 +475,8 @@ fn in_scope_cx(cx: block, f: fn(scope_info)) {
fn block_parent(cx: block) -> block {
match cx.parent {
some(b) => b,
none => cx.sess().bug(fmt!{"block_parent called on root block %?",
cx})
none => cx.sess().bug(fmt!("block_parent called on root block %?",
cx))
}
}
@ -496,10 +496,10 @@ impl block {
fn to_str() -> ~str {
match self.node_info {
some(node_info) => {
fmt!{"[block %d]", node_info.id}
fmt!("[block %d]", node_info.id)
}
none => {
fmt!{"[block %x]", ptr::addr_of(*self) as uint}
fmt!("[block %x]", ptr::addr_of(*self) as uint)
}
}
}
@ -876,7 +876,7 @@ fn C_cstr(cx: @crate_ctxt, s: ~str) -> ValueRef {
llvm::LLVMConstString(buf, str::len(s) as c_uint, False)
};
let g =
str::as_c_str(fmt!{"str%u", cx.names(~"str")},
str::as_c_str(fmt!("str%u", cx.names(~"str")),
|buf| llvm::LLVMAddGlobal(cx.llmod, val_ty(sc), buf));
llvm::LLVMSetInitializer(g, sc);
llvm::LLVMSetGlobalConstant(g, True);
@ -930,7 +930,7 @@ fn C_bytes(bytes: ~[u8]) -> ValueRef unsafe {
fn C_shape(ccx: @crate_ctxt, bytes: ~[u8]) -> ValueRef {
let llshape = C_bytes(bytes);
let llglobal = str::as_c_str(fmt!{"shape%u", ccx.names(~"shape")}, |buf| {
let llglobal = str::as_c_str(fmt!("shape%u", ccx.names(~"shape")), |buf| {
llvm::LLVMAddGlobal(ccx.llmod, val_ty(llshape), buf)
});
llvm::LLVMSetInitializer(llglobal, llshape);
@ -1025,8 +1025,8 @@ fn field_idx_strict(cx: ty::ctxt, sp: span, ident: ast::ident,
-> uint {
match ty::field_idx(ident, fields) {
none => cx.sess.span_bug(
sp, fmt!{"base expr doesn't appear to \
have a field named %s", cx.sess.str_of(ident)}),
sp, fmt!("base expr doesn't appear to \
have a field named %s", cx.sess.str_of(ident))),
some(i) => i
}
}

View file

@ -179,7 +179,7 @@ fn create_compile_unit(cx: @crate_ctxt)
lli32(DW_LANG_RUST),
llstr(crate_name),
llstr(work_dir),
llstr(env!{"CFG_VERSION"}),
llstr(env!("CFG_VERSION")),
lli1(true), // deprecated: main compile unit
lli1(cx.sess.opts.optimize != session::No),
llstr(~""), // flags (???)
@ -712,7 +712,7 @@ fn create_function(fcx: fn_ctxt) -> @metadata<subprogram_md> {
let cx = fcx.ccx;
let dbg_cx = option::get(cx.dbg_cx);
debug!{"~~"};
debug!("~~");
log(debug, fcx.id);
let sp = option::get(fcx.span);

View file

@ -931,10 +931,10 @@ fn trans_intrinsic(ccx: @crate_ctxt, decl: ValueRef, item: @ast::foreign_item,
ast_map::node_expr(e) => e.span
};
ccx.sess.span_fatal(
sp, fmt!{"reinterpret_cast called on types \
sp, fmt!("reinterpret_cast called on types \
with different size: %s (%u) to %s (%u)",
ty_to_str(ccx.tcx, tp_ty), tp_sz,
ty_to_str(ccx.tcx, substs.tys[1]), out_sz});
ty_to_str(ccx.tcx, substs.tys[1]), out_sz));
}
if !ty::type_is_nil(substs.tys[1]) {
let cast = PointerCast(bcx, get_param(decl, first_real_arg),

View file

@ -174,9 +174,9 @@ fn traverse_inline_body(cx: ctx, body: blk) {
some(d) => {
traverse_def_id(cx, def_id_of_def(d));
}
none => cx.tcx.sess.span_bug(e.span, fmt!{"Unbound node \
none => cx.tcx.sess.span_bug(e.span, fmt!("Unbound node \
id %? while traversing %s", e.id,
expr_to_str(e, cx.tcx.sess.intr())})
expr_to_str(e, cx.tcx.sess.intr())))
}
}
expr_field(_, _, _) => {

View file

@ -63,15 +63,15 @@ impl reflector {
let get_lval = |bcx| {
let callee =
impl::trans_trait_callee(bcx, v, mth_ty, mth_idx);
debug!{"calling mth ty %s, lltype %s",
debug!("calling mth ty %s, lltype %s",
ty_to_str(bcx.ccx().tcx, mth_ty),
val_str(bcx.ccx().tn, callee.val)};
val_str(bcx.ccx().tn, callee.val));
callee
};
debug!{"passing %u args:", vec::len(args)};
debug!("passing %u args:", vec::len(args));
let bcx = self.bcx;
for args.eachi |i, a| {
debug!{"arg %u: %s", i, val_str(bcx.ccx().tn, a)};
debug!("arg %u: %s", i, val_str(bcx.ccx().tn, a));
}
let d = empty_dest_cell();
let bcx =
@ -112,8 +112,8 @@ impl reflector {
fn visit_ty(t: ty::t) {
let bcx = self.bcx;
debug!{"reflect::visit_ty %s",
ty_to_str(bcx.ccx().tcx, t)};
debug!("reflect::visit_ty %s",
ty_to_str(bcx.ccx().tcx, t));
match ty::get(t).struct {
ty::ty_bot => self.leaf(~"bot"),

View file

@ -209,9 +209,9 @@ fn trans_evec(bcx: block, elements: evec_elements,
// Store the individual elements.
let mut i = 0u, temp_cleanups = ~[val];
debug!{"trans_evec: v: %s, dataptr: %s",
debug!("trans_evec: v: %s, dataptr: %s",
val_str(ccx.tn, val),
val_str(ccx.tn, dataptr)};
val_str(ccx.tn, dataptr));
match elements {
individual_evec(args) => {
for vec::each(args) |e| {
@ -309,7 +309,7 @@ fn get_base_and_len(cx: block, v: ValueRef, e_ty: ty::t)
(base, len)
}
ty::vstore_uniq | ty::vstore_box => {
debug!{"get_base_and_len: %s", val_str(ccx.tn, v)};
debug!("get_base_and_len: %s", val_str(ccx.tn, v));
let body = tvec::get_bodyptr(cx, v);
(tvec::get_dataptr(cx, body), tvec::get_fill(cx, body))
}
@ -325,13 +325,13 @@ fn trans_estr(bcx: block, s: @~str, vstore: option<ast::vstore>,
let c = match vstore {
some(ast::vstore_fixed(_)) => {
// "hello"/_ => "hello"/5 => ~[i8 x 6] in llvm
debug!{"trans_estr: fixed: %s", *s};
debug!("trans_estr: fixed: %s", *s);
C_postr(*s)
}
some(ast::vstore_slice(_)) | none => {
// "hello" => (*i8, 6u) in llvm
debug!{"trans_estr: slice '%s'", *s};
debug!("trans_estr: slice '%s'", *s);
C_estr_slice(ccx, *s)
}
@ -352,7 +352,7 @@ fn trans_estr(bcx: block, s: @~str, vstore: option<ast::vstore>,
}
};
debug!{"trans_estr: type: %s", val_str(ccx.tn, c)};
debug!("trans_estr: type: %s", val_str(ccx.tn, c));
base::store_in_dest(bcx, c, dest)
}

View file

@ -67,7 +67,7 @@ fn type_of_non_gc_box(cx: @crate_ctxt, t: ty::t) -> TypeRef {
}
fn type_of(cx: @crate_ctxt, t: ty::t) -> TypeRef {
debug!{"type_of %?: %?", t, ty::get(t)};
debug!("type_of %?: %?", t, ty::get(t));
// Check the cache.
if cx.lltypes.contains_key(t) { return cx.lltypes.get(t); }
@ -203,7 +203,7 @@ fn type_of(cx: @crate_ctxt, t: ty::t) -> TypeRef {
fn fill_type_of_enum(cx: @crate_ctxt, did: ast::def_id, t: ty::t,
llty: TypeRef) {
debug!{"type_of_enum %?: %?", t, ty::get(t)};
debug!("type_of_enum %?: %?", t, ty::get(t));
let lltys = {
let degen = (*ty::enum_variants(cx.tcx, did)).len() == 1u;
@ -227,7 +227,7 @@ fn llvm_type_name(cx: @crate_ctxt, t: ty::t) -> ~str {
ty::ty_enum(did, substs) => (~"enum", did, substs.tps),
ty::ty_class(did, substs) => (~"class", did, substs.tps)
};
return fmt!{
return fmt!(
"%s %s[#%d]",
name,
util::ppaux::parameterized(
@ -236,7 +236,7 @@ fn llvm_type_name(cx: @crate_ctxt, t: ty::t) -> ~str {
none,
tps),
did.crate
};
);
}
fn type_of_dtor(ccx: @crate_ctxt, self_ty: ty::t) -> TypeRef {

View file

@ -61,8 +61,8 @@ fn type_uses_for(ccx: @crate_ctxt, fn_id: def_id, n_tps: uint)
}
let map_node = match ccx.tcx.items.find(fn_id_loc.node) {
some(x) => x,
none => ccx.sess.bug(fmt!{"type_uses_for: unbound item ID %?",
fn_id_loc})
none => ccx.sess.bug(fmt!("type_uses_for: unbound item ID %?",
fn_id_loc))
};
match map_node {
ast_map::node_item(@{node: item_fn(_, _, body), _}, _) |

View file

@ -41,8 +41,8 @@ fn duplicate(bcx: block, v: ValueRef, t: ty::t) -> result {
let src_box = v;
let src_body = opaque_box_body(bcx, content_ty, src_box);
let src_body = load_if_immediate(bcx, src_body, content_ty);
debug!{"ST: %?", val_str(bcx.ccx().tn, src_body)};
debug!{"DT: %?", val_str(bcx.ccx().tn, dst_body)};
debug!("ST: %?", val_str(bcx.ccx().tn, src_body));
debug!("DT: %?", val_str(bcx.ccx().tn, dst_body));
let bcx = copy_val(bcx, INIT, dst_body, src_body, content_ty);
let src_tydesc_ptr = GEPi(bcx, src_box,

View file

@ -0,0 +1,76 @@
import syntax::ast::*;
import syntax::visit;
import syntax::codemap::span;
import syntax::print::pprust::stmt_to_str;
import aux::{num_constraints, get_fn_info, crate_ctxt, add_node};
import ann::empty_ann;
import pat_util::pat_binding_ids;
fn collect_ids_expr(e: @expr, rs: @mut ~[node_id]) { vec::push(*rs, e.id); }
fn collect_ids_block(b: blk, rs: @mut ~[node_id]) {
vec::push(*rs, b.node.id);
}
fn collect_ids_stmt(s: @stmt, rs: @mut ~[node_id]) {
match s.node {
stmt_decl(_, id) | stmt_expr(_, id) | stmt_semi(_, id) {
debug!("node_id %s", int::str(id));
debug!("%s", stmt_to_str(*s));
vec::push(*rs, id);
}
}
}
fn collect_ids_local(tcx: ty::ctxt, l: @local, rs: @mut ~[node_id]) {
vec::push_all(*rs, pat_binding_ids(tcx.def_map, l.node.pat));
}
fn node_ids_in_fn(tcx: ty::ctxt, body: blk, rs: @mut ~[node_id]) {
let collect_ids =
visit::mk_simple_visitor(@{visit_expr: |a| collect_ids_expr(a, rs),
visit_block: |a| collect_ids_block(a, rs),
visit_stmt: |a| collect_ids_stmt(a, rs),
visit_local: |a|
collect_ids_local(tcx, a, rs)
with *visit::default_simple_visitor()});
collect_ids.visit_block(body, (), collect_ids);
}
fn init_vecs(ccx: crate_ctxt, node_ids: ~[node_id], len: uint) {
for node_ids.each |i| {
log(debug, int::str(i) + ~" |-> " + uint::str(len));
add_node(ccx, i, empty_ann(len));
}
}
fn visit_fn(ccx: crate_ctxt, num_constraints: uint, body: blk) {
let node_ids: @mut ~[node_id] = @mut ~[];
node_ids_in_fn(ccx.tcx, body, node_ids);
let node_id_vec = *node_ids;
init_vecs(ccx, node_id_vec, num_constraints);
}
fn annotate_in_fn(ccx: crate_ctxt, _fk: visit::fn_kind, _decl: fn_decl,
body: blk, _sp: span, id: node_id) {
let f_info = get_fn_info(ccx, id);
visit_fn(ccx, num_constraints(f_info), body);
}
fn annotate_crate(ccx: crate_ctxt, crate: crate) {
let do_ann =
visit::mk_simple_visitor(
@{visit_fn: |a,b,c,d,e| annotate_in_fn(ccx, a, b, c, d, e)
with *visit::default_simple_visitor()});
visit::visit_crate(crate, (), do_ann);
}
//
// Local Variables:
// mode: rust
// fill-column: 78;
// indent-tabs-mode: nil
// c-basic-offset: 4
// buffer-file-coding-system: utf-8-unix
// End:
//

View file

@ -0,0 +1,990 @@
import option::*;
import pat_util::*;
import syntax::ast::*;
import syntax::ast_util::*;
import syntax::{visit, codemap};
import codemap::span;
import std::map::{hashmap, int_hash};
import syntax::print::pprust::path_to_str;
import tstate::ann::{pre_and_post, pre_and_post_state, empty_ann, prestate,
poststate, precond, postcond,
set_prestate, set_poststate, set_in_poststate_,
extend_prestate, extend_poststate, set_precondition,
set_postcondition, ts_ann,
clear_in_postcond,
clear_in_poststate_};
import driver::session::session;
import dvec::{dvec, extensions};
import tritv::{trit, tfalse, ttrue, dont_care, t};
import syntax::print::pprust::{constr_args_to_str, lit_to_str};
// Used to communicate which operands should be invalidated
// to helper functions
enum oper_type {
oper_move,
oper_swap,
oper_assign,
oper_assign_op,
oper_pure,
}
/* logging funs */
fn def_id_to_str(d: def_id) -> ~str {
return int::str(d.crate) + ~"," + int::str(d.node);
}
fn comma_str(args: ~[@constr_arg_use]) -> ~str {
let mut rslt = ~"";
let mut comma = false;
for args.each |a| {
if comma { rslt += ~", "; } else { comma = true; }
match a.node {
carg_base { rslt += ~"*"; }
carg_ident(i) { rslt += *i.ident; }
carg_lit(l) { rslt += lit_to_str(l); }
}
}
return rslt;
}
fn constraint_to_str(tcx: ty::ctxt, c: sp_constr) -> ~str {
return fmt!("%s(%s) - arising from %s",
path_to_str(c.node.path),
comma_str(c.node.args),
codemap::span_to_str(c.span, tcx.sess.codemap));
}
fn tritv_to_str(fcx: fn_ctxt, v: tritv::t) -> ~str {
let mut s = ~"";
let mut comma = false;
for constraints(fcx).each |p| {
match v.get(p.bit_num) {
dont_care { }
tt {
s +=
if comma { ~", " } else { comma = true; ~"" } +
if tt == tfalse { ~"!" } else { ~"" } +
constraint_to_str(fcx.ccx.tcx, p.c);
}
}
}
return s;
}
fn log_tritv(fcx: fn_ctxt, v: tritv::t) {
log(debug, tritv_to_str(fcx, v));
}
fn first_difference_string(fcx: fn_ctxt, expected: tritv::t, actual: tritv::t)
-> ~str {
let mut s = ~"";
for constraints(fcx).each |c| {
if expected.get(c.bit_num) == ttrue &&
actual.get(c.bit_num) != ttrue {
s = constraint_to_str(fcx.ccx.tcx, c.c);
break;
}
}
return s;
}
fn log_tritv_err(fcx: fn_ctxt, v: tritv::t) {
log(error, tritv_to_str(fcx, v));
}
fn tos(v: ~[uint]) -> ~str {
let mut rslt = ~"";
for v.each |i| {
if i == 0u {
rslt += ~"0";
} else if i == 1u { rslt += ~"1"; } else { rslt += ~"?"; }
}
return rslt;
}
fn log_cond(v: ~[uint]) { log(debug, tos(v)); }
fn log_cond_err(v: ~[uint]) { log(error, tos(v)); }
fn log_pp(pp: pre_and_post) {
let p1 = pp.precondition.to_vec();
let p2 = pp.postcondition.to_vec();
debug!("pre:");
log_cond(p1);
debug!("post:");
log_cond(p2);
}
fn log_pp_err(pp: pre_and_post) {
let p1 = pp.precondition.to_vec();
let p2 = pp.postcondition.to_vec();
error!("pre:");
log_cond_err(p1);
error!("post:");
log_cond_err(p2);
}
fn log_states(pp: pre_and_post_state) {
let p1 = pp.prestate.to_vec();
let p2 = pp.poststate.to_vec();
debug!("prestate:");
log_cond(p1);
debug!("poststate:");
log_cond(p2);
}
fn log_states_err(pp: pre_and_post_state) {
let p1 = pp.prestate.to_vec();
let p2 = pp.poststate.to_vec();
error!("prestate:");
log_cond_err(p1);
error!("poststate:");
log_cond_err(p2);
}
fn print_ident(i: ident) { log(debug, ~" " + *i + ~" "); }
fn print_idents(&idents: ~[ident]) {
if vec::len::<ident>(idents) == 0u { return; }
log(debug, ~"an ident: " + *vec::pop::<ident>(idents));
print_idents(idents);
}
/* data structures */
/**********************************************************************/
/* Two different data structures represent constraints in different
contexts: constraint and norm_constraint.
constraint gets used to record constraints in a table keyed by def_ids. Each
constraint has a single operator but a list of possible argument lists, and
thus represents several constraints at once, one for each possible argument
list.
norm_constraint, in contrast, gets used when handling an instance of a
constraint rather than a definition of a constraint. It has only a single
argument list.
The representation of constraints, where multiple instances of the
same predicate are collapsed into one entry in the table, makes it
easier to look up a specific instance.
Both types are in constrast with the constraint type defined in
syntax::ast, which is for predicate constraints only, and is what
gets generated by the parser. aux and ast share the same type
to represent predicate *arguments* however. This type
(constr_arg_general) is parameterized (see comments in syntax::ast).
Both types store an ident and span, for error-logging purposes.
*/
type pred_args_ = {args: ~[@constr_arg_use], bit_num: uint};
type pred_args = spanned<pred_args_>;
// The attached node ID is the *defining* node ID
// for this local.
type constr_arg_use = spanned<constr_arg_general_<inst>>;
/* Predicate constraints refer to the truth value of a predicate on variables
(definitely false, maybe true, or definitely true). The <path> field (and the
<def_id> field in the npred constructor) names a user-defined function that
may be the operator in a "check" expression in the source. */
type constraint = {
path: @path,
// FIXME (#2539): really only want it to be mut during
// collect_locals. freeze it after that.
descs: @dvec<pred_args>
};
type tsconstr = {
path: @path,
def_id: def_id,
args: ~[@constr_arg_use]
};
type sp_constr = spanned<tsconstr>;
type norm_constraint = {bit_num: uint, c: sp_constr};
type constr_map = std::map::hashmap<def_id, constraint>;
/* Contains stuff that has to be computed up front */
/* For easy access, the fn_info stores two special constraints for each
function. So we need context. And so it seems clearer to just have separate
constraints. */
type fn_info =
/* list, accumulated during pre/postcondition
computation, of all local variables that may be
used */
// Doesn't seem to work without the @ -- bug
{constrs: constr_map,
num_constraints: uint,
cf: ret_style,
used_vars: @mut ~[node_id],
ignore: bool};
/* mapping from node ID to typestate annotation */
type node_ann_table = @mut ~[mut ts_ann];
/* mapping from function name to fn_info map */
type fn_info_map = std::map::hashmap<node_id, fn_info>;
type fn_ctxt =
{enclosing: fn_info, id: node_id, name: ident, ccx: crate_ctxt};
type crate_ctxt = {tcx: ty::ctxt, node_anns: node_ann_table, fm: fn_info_map};
fn get_fn_info(ccx: crate_ctxt, id: node_id) -> fn_info {
assert (ccx.fm.contains_key(id));
return ccx.fm.get(id);
}
fn add_node(ccx: crate_ctxt, i: node_id, a: ts_ann) {
let sz = vec::len(*ccx.node_anns);
if sz <= i as uint {
vec::grow(*ccx.node_anns, (i as uint) - sz + 1u, empty_ann(0u));
}
ccx.node_anns[i] = a;
}
fn get_ts_ann(ccx: crate_ctxt, i: node_id) -> option<ts_ann> {
if i as uint < vec::len(*ccx.node_anns) {
return some::<ts_ann>(ccx.node_anns[i]);
} else { return none::<ts_ann>; }
}
/********* utils ********/
fn node_id_to_ts_ann(ccx: crate_ctxt, id: node_id) -> ts_ann {
match get_ts_ann(ccx, id) {
none {
error!("node_id_to_ts_ann: no ts_ann for node_id %d", id);
fail;
}
some(tt) { return tt; }
}
}
fn node_id_to_poststate(ccx: crate_ctxt, id: node_id) -> poststate {
debug!("node_id_to_poststate");
return node_id_to_ts_ann(ccx, id).states.poststate;
}
fn stmt_to_ann(ccx: crate_ctxt, s: stmt) -> ts_ann {
debug!("stmt_to_ann");
match s.node {
stmt_decl(_, id) | stmt_expr(_, id) | stmt_semi(_, id) {
return node_id_to_ts_ann(ccx, id);
}
}
}
/* fails if e has no annotation */
fn expr_states(ccx: crate_ctxt, e: @expr) -> pre_and_post_state {
debug!("expr_states");
return node_id_to_ts_ann(ccx, e.id).states;
}
/* fails if e has no annotation */
fn expr_pp(ccx: crate_ctxt, e: @expr) -> pre_and_post {
debug!("expr_pp");
return node_id_to_ts_ann(ccx, e.id).conditions;
}
fn stmt_pp(ccx: crate_ctxt, s: stmt) -> pre_and_post {
return stmt_to_ann(ccx, s).conditions;
}
/* fails if b has no annotation */
fn block_pp(ccx: crate_ctxt, b: blk) -> pre_and_post {
debug!("block_pp");
return node_id_to_ts_ann(ccx, b.node.id).conditions;
}
fn clear_pp(pp: pre_and_post) {
ann::clear(pp.precondition);
ann::clear(pp.postcondition);
}
fn clear_precond(ccx: crate_ctxt, id: node_id) {
let pp = node_id_to_ts_ann(ccx, id);
ann::clear(pp.conditions.precondition);
}
fn block_states(ccx: crate_ctxt, b: blk) -> pre_and_post_state {
debug!("block_states");
return node_id_to_ts_ann(ccx, b.node.id).states;
}
fn stmt_states(ccx: crate_ctxt, s: stmt) -> pre_and_post_state {
return stmt_to_ann(ccx, s).states;
}
fn expr_precond(ccx: crate_ctxt, e: @expr) -> precond {
return expr_pp(ccx, e).precondition;
}
fn expr_postcond(ccx: crate_ctxt, e: @expr) -> postcond {
return expr_pp(ccx, e).postcondition;
}
fn expr_prestate(ccx: crate_ctxt, e: @expr) -> prestate {
return expr_states(ccx, e).prestate;
}
fn expr_poststate(ccx: crate_ctxt, e: @expr) -> poststate {
return expr_states(ccx, e).poststate;
}
fn stmt_precond(ccx: crate_ctxt, s: stmt) -> precond {
return stmt_pp(ccx, s).precondition;
}
fn stmt_postcond(ccx: crate_ctxt, s: stmt) -> postcond {
return stmt_pp(ccx, s).postcondition;
}
fn states_to_poststate(ss: pre_and_post_state) -> poststate {
return ss.poststate;
}
fn stmt_prestate(ccx: crate_ctxt, s: stmt) -> prestate {
return stmt_states(ccx, s).prestate;
}
fn stmt_poststate(ccx: crate_ctxt, s: stmt) -> poststate {
return stmt_states(ccx, s).poststate;
}
fn block_precond(ccx: crate_ctxt, b: blk) -> precond {
return block_pp(ccx, b).precondition;
}
fn block_postcond(ccx: crate_ctxt, b: blk) -> postcond {
return block_pp(ccx, b).postcondition;
}
fn block_prestate(ccx: crate_ctxt, b: blk) -> prestate {
return block_states(ccx, b).prestate;
}
fn block_poststate(ccx: crate_ctxt, b: blk) -> poststate {
return block_states(ccx, b).poststate;
}
fn set_prestate_ann(ccx: crate_ctxt, id: node_id, pre: prestate) -> bool {
debug!("set_prestate_ann");
return set_prestate(node_id_to_ts_ann(ccx, id), pre);
}
fn extend_prestate_ann(ccx: crate_ctxt, id: node_id, pre: prestate) -> bool {
debug!("extend_prestate_ann");
return extend_prestate(node_id_to_ts_ann(ccx, id).states.prestate, pre);
}
fn set_poststate_ann(ccx: crate_ctxt, id: node_id, post: poststate) -> bool {
debug!("set_poststate_ann");
return set_poststate(node_id_to_ts_ann(ccx, id), post);
}
fn extend_poststate_ann(ccx: crate_ctxt, id: node_id, post: poststate) ->
bool {
debug!("extend_poststate_ann");
return extend_poststate(
node_id_to_ts_ann(ccx, id).states.poststate, post);
}
fn set_pre_and_post(ccx: crate_ctxt, id: node_id, pre: precond,
post: postcond) {
debug!("set_pre_and_post");
let tt = node_id_to_ts_ann(ccx, id);
set_precondition(tt, pre);
set_postcondition(tt, post);
}
fn copy_pre_post(ccx: crate_ctxt, id: node_id, sub: @expr) {
debug!("set_pre_and_post");
let p = expr_pp(ccx, sub);
copy_pre_post_(ccx, id, p.precondition, p.postcondition);
}
fn copy_pre_post_(ccx: crate_ctxt, id: node_id, pre: prestate,
post: poststate) {
debug!("set_pre_and_post");
let tt = node_id_to_ts_ann(ccx, id);
set_precondition(tt, pre);
set_postcondition(tt, post);
}
/* sets all bits to *1* */
fn set_postcond_false(ccx: crate_ctxt, id: node_id) {
let p = node_id_to_ts_ann(ccx, id);
ann::set(p.conditions.postcondition);
}
fn pure_exp(ccx: crate_ctxt, id: node_id, p: prestate) -> bool {
return set_prestate_ann(ccx, id, p) | set_poststate_ann(ccx, id, p);
}
fn num_constraints(m: fn_info) -> uint { return m.num_constraints; }
fn new_crate_ctxt(cx: ty::ctxt) -> crate_ctxt {
let na: ~[mut ts_ann] = ~[mut];
return {tcx: cx, node_anns: @mut na, fm: int_hash::<fn_info>()};
}
/* Use e's type to determine whether it returns.
If it has a function type with a ! annotation,
the answer is noreturn. */
fn controlflow_expr(ccx: crate_ctxt, e: @expr) -> ret_style {
match ty::get(ty::node_id_to_type(ccx.tcx, e.id)).struct {
ty::ty_fn(f) { return f.ret_style; }
_ { return return_val; }
}
}
fn constraints_expr(cx: ty::ctxt, e: @expr) -> ~[@ty::constr] {
match ty::get(ty::node_id_to_type(cx, e.id)).struct {
ty::ty_fn(f) { return f.constraints; }
_ { return ~[]; }
}
}
fn node_id_to_def_strict(cx: ty::ctxt, id: node_id) -> def {
match cx.def_map.find(id) {
none {
error!("node_id_to_def: node_id %d has no def", id);
fail;
}
some(d) { return d; }
}
}
fn node_id_to_def(ccx: crate_ctxt, id: node_id) -> option<def> {
return ccx.tcx.def_map.find(id);
}
fn norm_a_constraint(id: def_id, c: constraint) -> ~[norm_constraint] {
let mut rslt: ~[norm_constraint] = ~[];
for (*c.descs).each |pd| {
vec::push(rslt,
{bit_num: pd.node.bit_num,
c: respan(pd.span, {path: c.path,
def_id: id,
args: pd.node.args})});
}
return rslt;
}
// Tried to write this as an iterator, but I got a
// non-exhaustive match in trans.
fn constraints(fcx: fn_ctxt) -> ~[norm_constraint] {
let mut rslt: ~[norm_constraint] = ~[];
for fcx.enclosing.constrs.each |key, val| {
vec::push_all(rslt, norm_a_constraint(key, val));
};
return rslt;
}
// FIXME (#2539): Would rather take an immutable vec as an argument,
// should freeze it at some earlier point.
fn match_args(fcx: fn_ctxt, occs: @dvec<pred_args>,
occ: ~[@constr_arg_use]) -> uint {
debug!("match_args: looking at %s",
constr_args_to_str(fn@(i: inst) -> ~str { *i.ident }, occ));
for (*occs).each |pd| {
log(debug,
~"match_args: candidate " + pred_args_to_str(pd));
fn eq(p: inst, q: inst) -> bool { return p.node == q.node; }
if ty::args_eq(eq, pd.node.args, occ) { return pd.node.bit_num; }
}
fcx.ccx.tcx.sess.bug(~"match_args: no match for occurring args");
}
fn def_id_for_constr(tcx: ty::ctxt, t: node_id) -> def_id {
match tcx.def_map.find(t) {
none {
tcx.sess.bug(~"node_id_for_constr: bad node_id " + int::str(t));
}
some(def_fn(i, _)) { return i; }
_ { tcx.sess.bug(~"node_id_for_constr: pred is not a function"); }
}
}
fn expr_to_constr_arg(tcx: ty::ctxt, e: @expr) -> @constr_arg_use {
match e.node {
expr_path(p) {
match tcx.def_map.find(e.id) {
some(def_local(nid, _)) | some(def_arg(nid, _)) |
some(def_binding(nid, _)) | some(def_upvar(nid, _, _, _)) {
return @respan(p.span,
carg_ident({ident: p.idents[0], node: nid}));
}
some(what) {
tcx.sess.span_bug(e.span,
fmt!("exprs_to_constr_args: non-local variable %? \
as pred arg", what));
}
none {
tcx.sess.span_bug(e.span,
~"exprs_to_constr_args: unbound id as pred arg");
}
}
}
expr_lit(l) { return @respan(e.span, carg_lit(l)); }
_ {
tcx.sess.span_fatal(e.span,
~"arguments to constrained functions must be " +
~"literals or local variables");
}
}
}
fn exprs_to_constr_args(tcx: ty::ctxt,
args: ~[@expr]) -> ~[@constr_arg_use] {
let f = |a| expr_to_constr_arg(tcx, a);
let mut rslt: ~[@constr_arg_use] = ~[];
for args.each |e| { vec::push(rslt, f(e)); }
rslt
}
fn expr_to_constr(tcx: ty::ctxt, e: @expr) -> sp_constr {
match e.node {
expr_call(operator, args, _) {
match operator.node {
expr_path(p) {
return respan(e.span,
{path: p,
def_id: def_id_for_constr(tcx, operator.id),
args: exprs_to_constr_args(tcx, args)});
}
_ {
tcx.sess.span_bug(operator.span,
~"ill-formed operator in predicate");
}
}
}
_ {
tcx.sess.span_bug(e.span, ~"ill-formed predicate");
}
}
}
fn pred_args_to_str(p: pred_args) -> ~str {
~"<" + uint::str(p.node.bit_num) + ~", " +
constr_args_to_str(fn@(i: inst) -> ~str {return *i.ident; },
p.node.args)
+ ~">"
}
fn substitute_constr_args(cx: ty::ctxt, actuals: ~[@expr], c: @ty::constr) ->
tsconstr {
let mut rslt: ~[@constr_arg_use] = ~[];
for c.node.args.each |a| {
vec::push(rslt, substitute_arg(cx, actuals, a));
}
return {path: c.node.path,
def_id: c.node.id,
args: rslt};
}
fn substitute_arg(cx: ty::ctxt, actuals: ~[@expr], a: @constr_arg) ->
@constr_arg_use {
let num_actuals = vec::len(actuals);
match a.node {
carg_ident(i) {
if i < num_actuals {
return expr_to_constr_arg(cx, actuals[i]);
} else {
cx.sess.span_fatal(a.span, ~"constraint argument out of bounds");
}
}
carg_base { return @respan(a.span, carg_base); }
carg_lit(l) { return @respan(a.span, carg_lit(l)); }
}
}
fn pred_args_matches(pattern: ~[constr_arg_general_<inst>],
desc: pred_args) ->
bool {
let mut i = 0u;
for desc.node.args.each |c| {
let n = pattern[i];
match c.node {
carg_ident(p) {
match n {
carg_ident(q) { if p.node != q.node { return false; } }
_ { return false; }
}
}
carg_base { if n != carg_base { return false; } }
carg_lit(l) {
match n {
carg_lit(m) { if !const_eval::lit_eq(l, m) { return false; } }
_ { return false; }
}
}
}
i += 1u;
}
return true;
}
fn find_instance_(pattern: ~[constr_arg_general_<inst>],
descs: ~[pred_args]) ->
option<uint> {
for descs.each |d| {
if pred_args_matches(pattern, d) { return some(d.node.bit_num); }
}
return none;
}
type inst = {ident: ident, node: node_id};
enum dest {
local_dest(inst), // RHS is assigned to a local variable
call // RHS is passed to a function
}
type subst = ~[{from: inst, to: inst}];
fn find_instances(_fcx: fn_ctxt, subst: subst,
c: constraint) -> ~[{from: uint, to: uint}] {
if vec::len(subst) == 0u { return ~[]; }
let mut res = ~[];
do (*c.descs).swap |v| {
let v <- vec::from_mut(v);
for v.each |d| {
if args_mention(d.node.args, find_in_subst_bool, subst) {
let old_bit_num = d.node.bit_num;
let newv = replace(subst, d);
match find_instance_(newv, v) {
some(d1) {vec::push(res, {from: old_bit_num, to: d1})}
_ {}
}
} else {}
}
vec::to_mut(v)
}
return res;
}
fn find_in_subst(id: node_id, s: subst) -> option<inst> {
for s.each |p| {
if id == p.from.node { return some(p.to); }
}
return none;
}
fn find_in_subst_bool(s: subst, id: node_id) -> bool {
is_some(find_in_subst(id, s))
}
fn insts_to_str(stuff: ~[constr_arg_general_<inst>]) -> ~str {
let mut rslt = ~"<";
for stuff.each |i| {
rslt +=
~" " +
match i {
carg_ident(p) { *p.ident }
carg_base { ~"*" }
carg_lit(_) { ~"~[lit]" }
} + ~" ";
}
rslt += ~">";
rslt
}
fn replace(subst: subst, d: pred_args) -> ~[constr_arg_general_<inst>] {
let mut rslt: ~[constr_arg_general_<inst>] = ~[];
for d.node.args.each |c| {
match c.node {
carg_ident(p) {
match find_in_subst(p.node, subst) {
some(newv) { vec::push(rslt, carg_ident(newv)); }
_ { vec::push(rslt, c.node); }
}
}
_ {
vec::push(rslt, c.node);
}
}
}
return rslt;
}
enum if_ty { if_check, plain_if, }
fn for_constraints_mentioning(fcx: fn_ctxt, id: node_id,
f: fn(norm_constraint)) {
for constraints(fcx).each |c| {
if constraint_mentions(fcx, c, id) { f(c); }
};
}
fn local_node_id_to_def_id_strict(fcx: fn_ctxt, sp: span, i: node_id) ->
def_id {
match local_node_id_to_def(fcx, i) {
some(def_local(nid, _)) | some(def_arg(nid, _)) |
some(def_upvar(nid, _, _)) {
return local_def(nid);
}
some(_) {
fcx.ccx.tcx.sess.span_fatal(sp,
~"local_node_id_to_def_id: id \
isn't a local");
}
none {
// should really be bug. span_bug()?
fcx.ccx.tcx.sess.span_fatal(sp,
~"local_node_id_to_def_id: id \
is unbound");
}
}
}
fn local_node_id_to_def(fcx: fn_ctxt, i: node_id) -> option<def> {
fcx.ccx.tcx.def_map.find(i)
}
fn local_node_id_to_def_id(fcx: fn_ctxt, i: node_id) -> option<def_id> {
match local_node_id_to_def(fcx, i) {
some(def_local(nid, _)) | some(def_arg(nid, _)) |
some(def_binding(nid, _)) | some(def_upvar(nid, _, _)) {
some(local_def(nid))
}
_ { none }
}
}
fn local_node_id_to_local_def_id(fcx: fn_ctxt, i: node_id) ->
option<node_id> {
match local_node_id_to_def_id(fcx, i) {
some(did) { some(did.node) }
_ { none }
}
}
fn copy_in_postcond(fcx: fn_ctxt, parent_exp: node_id, dest: inst, src: inst,
ty: oper_type) {
let post =
node_id_to_ts_ann(fcx.ccx, parent_exp).conditions.postcondition;
copy_in_poststate_two(fcx, post, post, dest, src, ty);
}
fn copy_in_poststate(fcx: fn_ctxt, post: poststate, dest: inst, src: inst,
ty: oper_type) {
copy_in_poststate_two(fcx, post, post, dest, src, ty);
}
// In target_post, set the bits corresponding to copies of any
// constraints mentioning src that are set in src_post, with
// dest substituted for src.
// (This doesn't create any new constraints. If a new, substituted
// constraint isn't already in the bit vector, it's ignored.)
fn copy_in_poststate_two(fcx: fn_ctxt, src_post: poststate,
target_post: poststate, dest: inst, src: inst,
ty: oper_type) {
let mut subst;
match ty {
oper_swap { subst = ~[{from: dest, to: src}, {from: src, to: dest}]; }
oper_assign_op {
return; // Don't do any propagation
}
_ { subst = ~[{from: src, to: dest}]; }
}
for fcx.enclosing.constrs.each_value |val| {
// replace any occurrences of the src def_id with the
// dest def_id
let insts = find_instances(fcx, subst, val);
for insts.each |p| {
if bitvectors::promises_(p.from, src_post) {
set_in_poststate_(p.to, target_post);
}
}
};
}
fn forget_in_postcond(fcx: fn_ctxt, parent_exp: node_id, dead_v: node_id) {
// In the postcondition given by parent_exp, clear the bits
// for any constraints mentioning dead_v
let d = local_node_id_to_local_def_id(fcx, dead_v);
do option::iter(d) |d_id| {
do for_constraints_mentioning(fcx, d_id) |c| {
debug!("clearing constraint %u %s",
c.bit_num,
constraint_to_str(fcx.ccx.tcx, c.c));
clear_in_postcond(c.bit_num,
node_id_to_ts_ann(fcx.ccx,
parent_exp).conditions);
}
};
}
fn forget_in_poststate(fcx: fn_ctxt, p: poststate, dead_v: node_id) -> bool {
// In the poststate given by parent_exp, clear the bits
// for any constraints mentioning dead_v
let d = local_node_id_to_local_def_id(fcx, dead_v);
let mut changed = false;
do option::iter(d) |d_id| {
do for_constraints_mentioning(fcx, d_id) |c| {
changed |= clear_in_poststate_(c.bit_num, p);
}
}
return changed;
}
fn any_eq(v: ~[node_id], d: node_id) -> bool {
for v.each |i| { if i == d { return true; } }
false
}
fn constraint_mentions(_fcx: fn_ctxt, c: norm_constraint, v: node_id) ->
bool {
return args_mention(c.c.node.args, any_eq, ~[v]);
}
fn args_mention<T>(args: ~[@constr_arg_use],
q: fn(~[T], node_id) -> bool,
s: ~[T]) -> bool {
for args.each |a| {
match a.node {
carg_ident(p1) { if q(s, p1.node) { return true; } } _ { }
}
}
return false;
}
fn use_var(fcx: fn_ctxt, v: node_id) {
vec::push(*fcx.enclosing.used_vars, v);
}
fn op_to_oper_ty(io: init_op) -> oper_type {
match io { init_move { oper_move } _ { oper_assign } }
}
// default function visitor
fn do_nothing<T>(_fk: visit::fn_kind, _decl: fn_decl, _body: blk,
_sp: span, _id: node_id,
_t: T, _v: visit::vt<T>) {
}
fn args_to_constr_args(tcx: ty::ctxt, args: ~[arg],
indices: ~[@sp_constr_arg<uint>])
-> ~[@constr_arg_use] {
let mut actuals: ~[@constr_arg_use] = ~[];
let num_args = vec::len(args);
for indices.each |a| {
vec::push(
actuals,
@respan(a.span,
match a.node {
carg_base { carg_base }
carg_ident(i) {
if i < num_args {
carg_ident({ident: args[i].ident,
node: args[i].id})
} else {
tcx.sess.span_bug(a.span,
~"index out of bounds in \
constraint arg");
}
}
carg_lit(l) { carg_lit(l) }
}));
}
return actuals;
}
fn ast_constr_to_ts_constr(tcx: ty::ctxt, args: ~[arg], c: @constr) ->
tsconstr {
let tconstr = ty::ast_constr_to_constr(tcx, c);
return {path: tconstr.node.path,
def_id: tconstr.node.id,
args: args_to_constr_args(tcx, args, tconstr.node.args)};
}
fn ast_constr_to_sp_constr(tcx: ty::ctxt, args: ~[arg], c: @constr) ->
sp_constr {
let tconstr = ast_constr_to_ts_constr(tcx, args, c);
return respan(c.span, tconstr);
}
type binding = {lhs: ~[dest], rhs: option<initializer>};
fn local_to_bindings(tcx: ty::ctxt, loc: @local) -> binding {
let mut lhs = ~[];
do pat_bindings(tcx.def_map, loc.node.pat) |_bm, p_id, _s, name| {
vec::push(lhs, local_dest({ident: path_to_ident(name), node: p_id}));
};
{lhs: lhs, rhs: loc.node.init}
}
fn locals_to_bindings(tcx: ty::ctxt, locals: ~[@local]) -> ~[binding] {
let mut rslt = ~[];
for locals.each |loc| { vec::push(rslt, local_to_bindings(tcx, loc)); }
return rslt;
}
fn callee_modes(fcx: fn_ctxt, callee: node_id) -> ~[mode] {
let ty = ty::type_autoderef(fcx.ccx.tcx,
ty::node_id_to_type(fcx.ccx.tcx, callee));
match ty::get(ty).struct {
ty::ty_fn({inputs: args, _}) {
let mut modes = ~[];
for args.each |arg| { vec::push(modes, arg.mode); }
return modes;
}
_ {
// Shouldn't happen; callee should be ty_fn.
fcx.ccx.tcx.sess.bug(~"non-fn callee type in callee_modes: " +
util::ppaux::ty_to_str(fcx.ccx.tcx, ty));
}
}
}
fn callee_arg_init_ops(fcx: fn_ctxt, callee: node_id) -> ~[init_op] {
do vec::map(callee_modes(fcx, callee)) |m| {
match ty::resolved_mode(fcx.ccx.tcx, m) {
by_move { init_move }
by_copy | by_ref | by_val | by_mutbl_ref { init_assign }
}
}
}
fn arg_bindings(ops: ~[init_op], es: ~[@expr]) -> ~[binding] {
let mut bindings: ~[binding] = ~[];
let mut i = 0u;
for ops.each |op| {
vec::push(bindings,
{lhs: ~[call], rhs: some({op: op, expr: es[i]})});
i += 1u;
}
return bindings;
}
//
// Local Variables:
// mode: rust
// fill-column: 78;
// indent-tabs-mode: nil
// c-basic-offset: 4
// buffer-file-coding-system: utf-8-unix
// End:
//

View file

@ -0,0 +1,153 @@
import syntax::ast;
import ast::{stmt, fn_ident, node_id, crate, return_val, noreturn, expr};
import syntax::{visit, print};
import syntax::codemap::span;
import middle::ty;
import tstate::ann::{precond, prestate,
implies, ann_precond, ann_prestate};
import aux::*;
import util::ppaux::ty_to_str;
import bitvectors::*;
import annotate::annotate_crate;
import collect_locals::mk_f_to_fn_info;
import pre_post_conditions::fn_pre_post;
import states::find_pre_post_state_fn;
import syntax::print::pprust::expr_to_str;
import driver::session::session;
import std::map::hashmap;
fn check_states_expr(e: @expr, fcx: fn_ctxt, v: visit::vt<fn_ctxt>) {
visit::visit_expr(e, fcx, v);
let prec: precond = expr_precond(fcx.ccx, e);
let pres: prestate = expr_prestate(fcx.ccx, e);
if !implies(pres, prec) {
let mut s = ~"";
let diff = first_difference_string(fcx, prec, pres);
s +=
~"unsatisfied precondition constraint (for example, " + diff +
~") for expression:\n";
s += syntax::print::pprust::expr_to_str(e);
s += ~"\nprecondition:\n";
s += tritv_to_str(fcx, prec);
s += ~"\nprestate:\n";
s += tritv_to_str(fcx, pres);
fcx.ccx.tcx.sess.span_fatal(e.span, s);
}
}
fn check_states_stmt(s: @stmt, fcx: fn_ctxt, v: visit::vt<fn_ctxt>) {
visit::visit_stmt(s, fcx, v);
let a = stmt_to_ann(fcx.ccx, *s);
let prec: precond = ann_precond(a);
let pres: prestate = ann_prestate(a);
debug!("check_states_stmt:");
log(debug, print::pprust::stmt_to_str(*s));
debug!("prec = ");
log_tritv(fcx, prec);
debug!("pres = ");
log_tritv(fcx, pres);
if !implies(pres, prec) {
let mut ss = ~"";
let diff = first_difference_string(fcx, prec, pres);
ss +=
~"unsatisfied precondition constraint (for example, " + diff +
~") for statement:\n";
ss += syntax::print::pprust::stmt_to_str(*s);
ss += ~"\nprecondition:\n";
ss += tritv_to_str(fcx, prec);
ss += ~"\nprestate: \n";
ss += tritv_to_str(fcx, pres);
fcx.ccx.tcx.sess.span_fatal(s.span, ss);
}
}
fn check_states_against_conditions(fcx: fn_ctxt,
fk: visit::fn_kind,
f_decl: ast::fn_decl,
f_body: ast::blk,
sp: span,
id: node_id) {
/* Postorder traversal instead of pre is important
because we want the smallest possible erroneous statement
or expression. */
let visitor = visit::mk_vt(
@{visit_stmt: check_states_stmt,
visit_expr: check_states_expr,
visit_fn: |a,b,c,d,e,f,g| {
do_nothing::<fn_ctxt>(a, b, c, d, e, f, g)
}
with *visit::default_visitor::<fn_ctxt>()});
visit::visit_fn(fk, f_decl, f_body, sp, id, fcx, visitor);
}
fn check_fn_states(fcx: fn_ctxt,
fk: visit::fn_kind,
f_decl: ast::fn_decl,
f_body: ast::blk,
sp: span,
id: node_id) {
/* Compute the pre- and post-states for this function */
// Fixpoint iteration
while find_pre_post_state_fn(fcx, f_decl, f_body) { }
/* Now compare each expr's pre-state to its precondition
and post-state to its postcondition */
check_states_against_conditions(fcx, fk, f_decl, f_body, sp, id);
}
fn fn_states(fk: visit::fn_kind, f_decl: ast::fn_decl, f_body: ast::blk,
sp: span, id: node_id,
ccx: crate_ctxt, v: visit::vt<crate_ctxt>) {
visit::visit_fn(fk, f_decl, f_body, sp, id, ccx, v);
// We may not care about typestate for this function if it contains
// no constrained calls
if !ccx.fm.get(id).ignore {
/* Look up the var-to-bit-num map for this function */
let f_info = ccx.fm.get(id);
let name = visit::name_of_fn(fk);
let fcx = {enclosing: f_info, id: id, name: name, ccx: ccx};
check_fn_states(fcx, fk, f_decl, f_body, sp, id)
}
}
fn check_crate(cx: ty::ctxt, crate: @crate) {
let ccx: crate_ctxt = new_crate_ctxt(cx);
/* Build the global map from function id to var-to-bit-num-map */
mk_f_to_fn_info(ccx, crate);
/* Add a blank ts_ann for every statement (and expression) */
annotate_crate(ccx, *crate);
/* Compute the pre and postcondition for every subexpression */
let vtor = visit::default_visitor::<crate_ctxt>();
let vtor = @{visit_fn: fn_pre_post with *vtor};
visit::visit_crate(*crate, ccx, visit::mk_vt(vtor));
/* Check the pre- and postcondition against the pre- and poststate
for every expression */
let vtor = visit::default_visitor::<crate_ctxt>();
let vtor = @{visit_fn: fn_states with *vtor};
visit::visit_crate(*crate, ccx, visit::mk_vt(vtor));
}
//
// Local Variables:
// mode: rust
// fill-column: 78;
// indent-tabs-mode: nil
// c-basic-offset: 4
// buffer-file-coding-system: utf-8-unix
// End:
//

View file

@ -0,0 +1,165 @@
import option::*;
import pat_util::*;
import syntax::ast::*;
import syntax::ast_util::*;
import syntax::visit;
import syntax::codemap::span;
import syntax::ast_util::respan;
import driver::session::session;
import aux::*;
import std::map::hashmap;
import dvec::{dvec, extensions};
type ctxt = {cs: @mut ~[sp_constr], tcx: ty::ctxt};
fn collect_pred(e: @expr, cx: ctxt, v: visit::vt<ctxt>) {
match e.node {
expr_check(_, ch) { vec::push(*cx.cs, expr_to_constr(cx.tcx, ch)); }
expr_if_check(ex, _, _) {
vec::push(*cx.cs, expr_to_constr(cx.tcx, ex));
}
// If it's a call, generate appropriate instances of the
// call's constraints.
expr_call(operator, operands, _) {
for constraints_expr(cx.tcx, operator).each |c| {
let ct: sp_constr =
respan(c.span,
aux::substitute_constr_args(cx.tcx, operands, c));
vec::push(*cx.cs, ct);
}
}
_ { }
}
// visit subexpressions
visit::visit_expr(e, cx, v);
}
fn find_locals(tcx: ty::ctxt,
fk: visit::fn_kind,
f_decl: fn_decl,
f_body: blk,
sp: span,
id: node_id) -> ctxt {
let cx: ctxt = {cs: @mut ~[], tcx: tcx};
let visitor = visit::default_visitor::<ctxt>();
let visitor =
@{visit_expr: collect_pred,
visit_fn: do_nothing
with *visitor};
visit::visit_fn(fk, f_decl, f_body, sp,
id, cx, visit::mk_vt(visitor));
return cx;
}
fn add_constraint(tcx: ty::ctxt, c: sp_constr, next: uint, tbl: constr_map) ->
uint {
log(debug,
constraint_to_str(tcx, c) + ~" |-> " + uint::str(next));
let {path: p, def_id: d_id, args: args} = c.node;
match tbl.find(d_id) {
some(ct) {
(*ct.descs).push(respan(c.span, {args: args, bit_num: next}));
}
none {
let rslt = @dvec();
(*rslt).push(respan(c.span, {args: args, bit_num: next}));
tbl.insert(d_id, {path:p, descs:rslt});
}
}
return next + 1u;
}
fn contains_constrained_calls(tcx: ty::ctxt, body: blk) -> bool {
type cx = @{
tcx: ty::ctxt,
mut has: bool
};
let cx = @{
tcx: tcx,
mut has: false
};
let vtor = visit::default_visitor::<cx>();
let vtor = @{visit_expr: visit_expr with *vtor};
visit::visit_block(body, cx, visit::mk_vt(vtor));
return cx.has;
fn visit_expr(e: @expr, &&cx: cx, v: visit::vt<cx>) {
import syntax::print::pprust;
debug!("visiting %?", pprust::expr_to_str(e));
visit::visit_expr(e, cx, v);
if constraints_expr(cx.tcx, e).is_not_empty() {
debug!("has constraints");
cx.has = true;
} else {
debug!("has not constraints");
}
}
}
/* builds a table mapping each local var defined in f
to a bit number in the precondition/postcondition vectors */
fn mk_fn_info(ccx: crate_ctxt,
fk: visit::fn_kind,
f_decl: fn_decl,
f_body: blk,
f_sp: span,
id: node_id) {
let name = visit::name_of_fn(fk);
let res_map = new_def_hash::<constraint>();
let mut next: uint = 0u;
let cx: ctxt = find_locals(ccx.tcx, fk, f_decl, f_body, f_sp, id);
/* now we have to add bit nums for both the constraints
and the variables... */
let ignore = !contains_constrained_calls(ccx.tcx, f_body);
if !ignore {
let mut i = 0u, l = vec::len(*cx.cs);
while i < l {
next = add_constraint(cx.tcx, copy cx.cs[i], next, res_map);
i += 1u;
}
/* if this function has any constraints, instantiate them to the
argument names and add them */
for f_decl.constraints.each |c| {
let sc = ast_constr_to_sp_constr(cx.tcx, f_decl.inputs, c);
next = add_constraint(cx.tcx, sc, next, res_map);
}
}
let v: @mut ~[node_id] = @mut ~[];
let rslt =
{constrs: res_map,
num_constraints: next,
cf: f_decl.cf,
used_vars: v,
ignore: ignore};
ccx.fm.insert(id, rslt);
debug!("%s has %u constraints", *name, num_constraints(rslt));
}
/* initializes the global fn_info_map (mapping each function ID, including
nested locally defined functions, onto a mapping from local variable name
to bit number) */
fn mk_f_to_fn_info(ccx: crate_ctxt, c: @crate) {
let visitor =
visit::mk_simple_visitor(@{
visit_fn: |a,b,c,d,e| mk_fn_info(ccx, a, b, c, d, e)
with *visit::default_simple_visitor()});
visit::visit_crate(*c, (), visitor);
}
//
// Local Variables:
// mode: rust
// fill-column: 78;
// indent-tabs-mode: nil
// c-basic-offset: 4
// buffer-file-coding-system: utf-8-unix
// End:
//

View file

@ -0,0 +1,617 @@
import tstate::ann::*;
import aux::*;
import bitvectors::{bit_num, seq_preconds, seq_postconds,
intersect_states,
relax_precond_block, gen};
import tritv::*;
import pat_util::*;
import syntax::ast::*;
import syntax::ast_util::*;
import syntax::print::pprust::{expr_to_str, stmt_to_str};
import syntax::visit;
import util::common::{field_exprs, has_nonlocal_exits};
import syntax::codemap::span;
import driver::session::session;
import std::map::hashmap;
fn find_pre_post_mod(_m: _mod) -> _mod {
debug!("implement find_pre_post_mod!");
fail;
}
fn find_pre_post_foreign_mod(_m: foreign_mod) -> foreign_mod {
debug!("implement find_pre_post_foreign_mod");
fail;
}
fn find_pre_post_method(ccx: crate_ctxt, m: @method) {
assert (ccx.fm.contains_key(m.id));
let fcx: fn_ctxt =
{enclosing: ccx.fm.get(m.id),
id: m.id,
name: m.ident,
ccx: ccx};
find_pre_post_fn(fcx, m.body);
}
fn find_pre_post_item(ccx: crate_ctxt, i: item) {
match i.node {
item_const(_, e) {
// do nothing -- item_consts don't refer to local vars
}
item_fn(_, _, body) {
assert (ccx.fm.contains_key(i.id));
let fcx =
{enclosing: ccx.fm.get(i.id), id: i.id, name: i.ident, ccx: ccx};
find_pre_post_fn(fcx, body);
}
item_mod(m) { find_pre_post_mod(m); }
item_foreign_mod(nm) { find_pre_post_foreign_mod(nm); }
item_ty(*) | item_enum(*) | item_trait(*) { return; }
item_class(*) {
fail ~"find_pre_post_item: shouldn't be called on item_class";
}
item_impl(_, _, _, ms) {
for ms.each |m| { find_pre_post_method(ccx, m); }
}
item_mac(*) { fail ~"item macros unimplemented" }
}
}
/* Finds the pre and postcondition for each expr in <args>;
sets the precondition in a to be the result of combining
the preconditions for <args>, and the postcondition in a to
be the union of all postconditions for <args> */
fn find_pre_post_exprs(fcx: fn_ctxt, args: ~[@expr], id: node_id) {
if vec::len::<@expr>(args) > 0u {
debug!("find_pre_post_exprs: oper = %s", expr_to_str(args[0]));
}
fn do_one(fcx: fn_ctxt, e: @expr) { find_pre_post_expr(fcx, e); }
for args.each |e| { do_one(fcx, e); }
fn get_pp(ccx: crate_ctxt, &&e: @expr) -> pre_and_post {
return expr_pp(ccx, e);
}
let pps = vec::map(args, |a| get_pp(fcx.ccx, a) );
set_pre_and_post(fcx.ccx, id, seq_preconds(fcx, pps),
seq_postconds(fcx, vec::map(pps, get_post)));
}
fn find_pre_post_loop(fcx: fn_ctxt, index: @expr, body: blk, id: node_id) {
find_pre_post_expr(fcx, index);
find_pre_post_block(fcx, body);
let loop_precond =
seq_preconds(fcx, ~[expr_pp(fcx.ccx, index),
block_pp(fcx.ccx, body)]);
let loop_postcond =
intersect_states(expr_postcond(fcx.ccx, index),
block_postcond(fcx.ccx, body));
copy_pre_post_(fcx.ccx, id, loop_precond, loop_postcond);
}
// Generates a pre/post assuming that a is the
// annotation for an if-expression with consequent conseq
// and alternative maybe_alt
fn join_then_else(fcx: fn_ctxt, antec: @expr, conseq: blk,
maybe_alt: option<@expr>, id: node_id, chck: if_ty) {
find_pre_post_expr(fcx, antec);
find_pre_post_block(fcx, conseq);
match maybe_alt {
none {
match chck {
if_check {
let c: sp_constr = expr_to_constr(fcx.ccx.tcx, antec);
gen(fcx, antec.id, c.node);
}
_ { }
}
let precond_res =
seq_preconds(fcx,
~[expr_pp(fcx.ccx, antec),
block_pp(fcx.ccx, conseq)]);
set_pre_and_post(fcx.ccx, id, precond_res,
expr_poststate(fcx.ccx, antec));
}
some(altern) {
/*
if check = if_check, then
be sure that the predicate implied by antec
is *not* true in the alternative
*/
find_pre_post_expr(fcx, altern);
let precond_false_case =
seq_preconds(fcx,
~[expr_pp(fcx.ccx, antec),
expr_pp(fcx.ccx, altern)]);
let postcond_false_case =
seq_postconds(fcx,
~[expr_postcond(fcx.ccx, antec),
expr_postcond(fcx.ccx, altern)]);
/* Be sure to set the bit for the check condition here,
so that it's *not* set in the alternative. */
match chck {
if_check {
let c: sp_constr = expr_to_constr(fcx.ccx.tcx, antec);
gen(fcx, antec.id, c.node);
}
_ { }
}
let precond_true_case =
seq_preconds(fcx,
~[expr_pp(fcx.ccx, antec),
block_pp(fcx.ccx, conseq)]);
let postcond_true_case =
seq_postconds(fcx,
~[expr_postcond(fcx.ccx, antec),
block_postcond(fcx.ccx, conseq)]);
let precond_res =
seq_postconds(fcx, ~[precond_true_case, precond_false_case]);
let postcond_res =
intersect_states(postcond_true_case, postcond_false_case);
set_pre_and_post(fcx.ccx, id, precond_res, postcond_res);
}
}
}
fn gen_if_local(fcx: fn_ctxt, lhs: @expr, rhs: @expr, larger_id: node_id,
new_var: node_id) {
match node_id_to_def(fcx.ccx, new_var) {
some(d) {
match d {
def_local(nid, _) {
find_pre_post_expr(fcx, rhs);
let p = expr_pp(fcx.ccx, rhs);
set_pre_and_post(fcx.ccx, larger_id, p.precondition,
p.postcondition);
}
_ { find_pre_post_exprs(fcx, ~[lhs, rhs], larger_id); }
}
}
_ { find_pre_post_exprs(fcx, ~[lhs, rhs], larger_id); }
}
}
fn handle_update(fcx: fn_ctxt, parent: @expr, lhs: @expr, rhs: @expr,
ty: oper_type) {
find_pre_post_expr(fcx, rhs);
match lhs.node {
expr_path(p) {
let post = expr_postcond(fcx.ccx, parent);
let tmp = post.clone();
match ty {
oper_move {
if is_path(rhs) { forget_in_postcond(fcx, parent.id, rhs.id); }
}
oper_swap {
forget_in_postcond(fcx, parent.id, lhs.id);
forget_in_postcond(fcx, parent.id, rhs.id);
}
oper_assign {
forget_in_postcond(fcx, parent.id, lhs.id);
}
_ { }
}
gen_if_local(fcx, lhs, rhs, parent.id, lhs.id);
match rhs.node {
expr_path(p1) {
let d = local_node_id_to_local_def_id(fcx, lhs.id);
let d1 = local_node_id_to_local_def_id(fcx, rhs.id);
match d {
some(id) {
match d1 {
some(id1) {
let instlhs =
{ident: path_to_ident(p), node: id};
let instrhs =
{ident: path_to_ident(p1), node: id1};
copy_in_poststate_two(fcx, tmp, post, instlhs, instrhs,
ty);
}
_ { }
}
}
_ { }
}
}
_ {/* do nothing */ }
}
}
_ { find_pre_post_expr(fcx, lhs); }
}
}
fn forget_args_moved_in(fcx: fn_ctxt, parent: @expr, modes: ~[mode],
operands: ~[@expr]) {
do vec::iteri(modes) |i,mode| {
match ty::resolved_mode(fcx.ccx.tcx, mode) {
by_move { forget_in_postcond(fcx, parent.id, operands[i].id); }
by_ref | by_val | by_mutbl_ref | by_copy { }
}
}
}
fn find_pre_post_expr_fn_upvars(fcx: fn_ctxt, e: @expr) {
let rslt = expr_pp(fcx.ccx, e);
clear_pp(rslt);
}
/* Fills in annotations as a side effect. Does not rebuild the expr */
fn find_pre_post_expr(fcx: fn_ctxt, e: @expr) {
let enclosing = fcx.enclosing;
let num_local_vars = num_constraints(enclosing);
fn do_rand_(fcx: fn_ctxt, e: @expr) { find_pre_post_expr(fcx, e); }
match e.node {
expr_call(operator, operands, _) {
/* copy */
let mut args = operands;
vec::push(args, operator);
find_pre_post_exprs(fcx, args, e.id);
/* see if the call has any constraints on its type */
for constraints_expr(fcx.ccx.tcx, operator).each |c| {
let i =
bit_num(fcx, substitute_constr_args(fcx.ccx.tcx, args, c));
require(i, expr_pp(fcx.ccx, e));
}
forget_args_moved_in(fcx, e, callee_modes(fcx, operator.id),
operands);
/* if this is a failing call, its postcondition sets everything */
match controlflow_expr(fcx.ccx, operator) {
noreturn { set_postcond_false(fcx.ccx, e.id); }
_ { }
}
}
expr_vstore(ee, _) {
find_pre_post_expr(fcx, ee);
let p = expr_pp(fcx.ccx, ee);
set_pre_and_post(fcx.ccx, e.id, p.precondition, p.postcondition);
}
expr_vec(args, _) {
find_pre_post_exprs(fcx, args, e.id);
}
expr_path(p) {
let rslt = expr_pp(fcx.ccx, e);
clear_pp(rslt);
}
expr_new(p, _, v) {
find_pre_post_exprs(fcx, ~[p, v], e.id);
}
expr_log(_, lvl, arg) {
find_pre_post_exprs(fcx, ~[lvl, arg], e.id);
}
expr_fn(_, _, _, cap_clause) | expr_fn_block(_, _, cap_clause) {
find_pre_post_expr_fn_upvars(fcx, e);
for (*cap_clause).each |cap_item| {
let d = local_node_id_to_local_def_id(fcx, cap_item.id);
option::iter(d, |id| use_var(fcx, id) );
}
for (*cap_clause).each |cap_item| {
if cap_item.is_move {
log(debug, (~"forget_in_postcond: ", cap_item));
forget_in_postcond(fcx, e.id, cap_item.id);
}
}
}
expr_block(b) {
find_pre_post_block(fcx, b);
let p = block_pp(fcx.ccx, b);
set_pre_and_post(fcx.ccx, e.id, p.precondition, p.postcondition);
}
expr_rec(fields, maybe_base) {
let mut es = field_exprs(fields);
match maybe_base { none {/* no-op */ } some(b) { vec::push(es, b); } }
find_pre_post_exprs(fcx, es, e.id);
}
expr_tup(elts) { find_pre_post_exprs(fcx, elts, e.id); }
expr_move(lhs, rhs) { handle_update(fcx, e, lhs, rhs, oper_move); }
expr_swap(lhs, rhs) { handle_update(fcx, e, lhs, rhs, oper_swap); }
expr_assign(lhs, rhs) { handle_update(fcx, e, lhs, rhs, oper_assign); }
expr_assign_op(_, lhs, rhs) {
/* Different from expr_assign in that the lhs *must*
already be initialized */
find_pre_post_exprs(fcx, ~[lhs, rhs], e.id);
forget_in_postcond(fcx, e.id, lhs.id);
}
expr_lit(_) { clear_pp(expr_pp(fcx.ccx, e)); }
expr_ret(maybe_val) {
match maybe_val {
none {
clear_precond(fcx.ccx, e.id);
set_postcond_false(fcx.ccx, e.id);
}
some(ret_val) {
find_pre_post_expr(fcx, ret_val);
set_precondition(node_id_to_ts_ann(fcx.ccx, e.id),
expr_precond(fcx.ccx, ret_val));
set_postcond_false(fcx.ccx, e.id);
}
}
}
expr_if(antec, conseq, maybe_alt) {
join_then_else(fcx, antec, conseq, maybe_alt, e.id, plain_if);
}
expr_binary(bop, l, r) {
if lazy_binop(bop) {
find_pre_post_expr(fcx, l);
find_pre_post_expr(fcx, r);
let overall_pre =
seq_preconds(fcx,
~[expr_pp(fcx.ccx, l), expr_pp(fcx.ccx, r)]);
set_precondition(node_id_to_ts_ann(fcx.ccx, e.id), overall_pre);
set_postcondition(node_id_to_ts_ann(fcx.ccx, e.id),
expr_postcond(fcx.ccx, l));
} else { find_pre_post_exprs(fcx, ~[l, r], e.id); }
}
expr_addr_of(_, x) | expr_cast(x, _) | expr_unary(_, x) |
expr_loop_body(x) | expr_do_body(x) | expr_assert(x) | expr_copy(x) {
find_pre_post_expr(fcx, x);
copy_pre_post(fcx.ccx, e.id, x);
}
expr_while(test, body) {
find_pre_post_expr(fcx, test);
find_pre_post_block(fcx, body);
set_pre_and_post(fcx.ccx, e.id,
seq_preconds(fcx,
~[expr_pp(fcx.ccx, test),
block_pp(fcx.ccx, body)]),
intersect_states(expr_postcond(fcx.ccx, test),
block_postcond(fcx.ccx, body)));
}
expr_loop(body) {
find_pre_post_block(fcx, body);
/* Infinite loop: if control passes it, everything is true. */
let mut loop_postcond = false_postcond(num_local_vars);
/* Conservative approximation: if the body has any nonlocal exits,
the poststate is blank since we don't know what parts of it
execute. */
if has_nonlocal_exits(body) {
loop_postcond = empty_poststate(num_local_vars);
}
set_pre_and_post(fcx.ccx, e.id, block_precond(fcx.ccx, body),
loop_postcond);
}
expr_index(val, sub) { find_pre_post_exprs(fcx, ~[val, sub], e.id); }
expr_match(ex, alts, _) {
find_pre_post_expr(fcx, ex);
fn do_an_alt(fcx: fn_ctxt, an_alt: arm) -> pre_and_post {
match an_alt.guard {
some(e) { find_pre_post_expr(fcx, e); }
_ {}
}
find_pre_post_block(fcx, an_alt.body);
return block_pp(fcx.ccx, an_alt.body);
}
let mut alt_pps = ~[];
for alts.each |a| { vec::push(alt_pps, do_an_alt(fcx, a)); }
fn combine_pp(antec: pre_and_post, fcx: fn_ctxt, &&pp: pre_and_post,
&&next: pre_and_post) -> pre_and_post {
union(pp.precondition, seq_preconds(fcx, ~[antec, next]));
intersect(pp.postcondition, next.postcondition);
return pp;
}
let antec_pp = pp_clone(expr_pp(fcx.ccx, ex));
let e_pp =
{precondition: empty_prestate(num_local_vars),
postcondition: false_postcond(num_local_vars)};
let g = |a,b| combine_pp(antec_pp, fcx, a, b);
let alts_overall_pp =
vec::foldl(e_pp, alt_pps, g);
set_pre_and_post(fcx.ccx, e.id, alts_overall_pp.precondition,
alts_overall_pp.postcondition);
}
expr_field(operator, _, _) {
find_pre_post_expr(fcx, operator);
copy_pre_post(fcx.ccx, e.id, operator);
}
expr_fail(maybe_val) {
let mut prestate;
match maybe_val {
none { prestate = empty_prestate(num_local_vars); }
some(fail_val) {
find_pre_post_expr(fcx, fail_val);
prestate = expr_precond(fcx.ccx, fail_val);
}
}
set_pre_and_post(fcx.ccx, e.id,
/* if execution continues after fail,
then everything is true! */
prestate, false_postcond(num_local_vars));
}
expr_check(_, p) {
find_pre_post_expr(fcx, p);
copy_pre_post(fcx.ccx, e.id, p);
/* predicate p holds after this expression executes */
let c: sp_constr = expr_to_constr(fcx.ccx.tcx, p);
gen(fcx, e.id, c.node);
}
expr_if_check(p, conseq, maybe_alt) {
join_then_else(fcx, p, conseq, maybe_alt, e.id, if_check);
}
expr_break { clear_pp(expr_pp(fcx.ccx, e)); }
expr_again { clear_pp(expr_pp(fcx.ccx, e)); }
expr_mac(_) { fcx.ccx.tcx.sess.bug(~"unexpanded macro"); }
}
}
fn find_pre_post_stmt(fcx: fn_ctxt, s: stmt) {
debug!("stmt = %s", stmt_to_str(s));
match s.node {
stmt_decl(adecl, id) {
match adecl.node {
decl_local(alocals) {
let prev_pp = empty_pre_post(num_constraints(fcx.enclosing));
for alocals.each |alocal| {
match alocal.node.init {
some(an_init) {
/* LHS always becomes initialized,
whether or not this is a move */
find_pre_post_expr(fcx, an_init.expr);
do pat_bindings(fcx.ccx.tcx.def_map, alocal.node.pat)
|p_id, _s, _n| {
copy_pre_post(fcx.ccx, p_id, an_init.expr);
};
/* Inherit ann from initializer, and add var being
initialized to the postcondition */
copy_pre_post(fcx.ccx, id, an_init.expr);
let mut p = none;
match an_init.expr.node {
expr_path(_p) { p = some(_p); }
_ { }
}
do pat_bindings(fcx.ccx.tcx.def_map, alocal.node.pat)
|p_id, _s, n| {
let ident = path_to_ident(n);
match p {
some(p) {
copy_in_postcond(fcx, id,
{ident: ident, node: p_id},
{ident:
path_to_ident(p),
node: an_init.expr.id},
op_to_oper_ty(an_init.op));
}
none { }
}
};
/* Clear out anything that the previous initializer
guaranteed */
let e_pp = expr_pp(fcx.ccx, an_init.expr);
prev_pp.precondition.become(
seq_preconds(fcx, ~[prev_pp, e_pp]));
/* Include the LHSs too, since those aren't in the
postconds of the RHSs themselves */
copy_pre_post_(fcx.ccx, id, prev_pp.precondition,
prev_pp.postcondition);
}
none {
do pat_bindings(fcx.ccx.tcx.def_map, alocal.node.pat)
|p_id, _s, _n| {
clear_pp(node_id_to_ts_ann(fcx.ccx, p_id).conditions);
};
clear_pp(node_id_to_ts_ann(fcx.ccx, id).conditions);
}
}
}
}
decl_item(anitem) {
clear_pp(node_id_to_ts_ann(fcx.ccx, id).conditions);
find_pre_post_item(fcx.ccx, *anitem);
}
}
}
stmt_expr(e, id) | stmt_semi(e, id) {
find_pre_post_expr(fcx, e);
copy_pre_post(fcx.ccx, id, e);
}
}
}
fn find_pre_post_block(fcx: fn_ctxt, b: blk) {
/* Want to say that if there is a break or cont in this
block, then that invalidates the poststate upheld by
any of the stmts after it.
Given that the typechecker has run, we know any break will be in
a block that forms a loop body. So that's ok. There'll never be an
expr_break outside a loop body, therefore, no expr_break outside a block.
*/
/* Conservative approximation for now: This says that if a block contains
*any* breaks or conts, then its postcondition doesn't promise anything.
This will mean that:
x = 0;
break;
won't have a postcondition that says x is initialized, but that's ok.
*/
let nv = num_constraints(fcx.enclosing);
fn do_one_(fcx: fn_ctxt, s: @stmt) {
find_pre_post_stmt(fcx, *s);
}
for b.node.stmts.each |s| { do_one_(fcx, s); }
fn do_inner_(fcx: fn_ctxt, &&e: @expr) { find_pre_post_expr(fcx, e); }
let do_inner = |a| do_inner_(fcx, a);
option::map::<@expr, ()>(b.node.expr, do_inner);
let mut pps: ~[pre_and_post] = ~[];
for b.node.stmts.each |s| { vec::push(pps, stmt_pp(fcx.ccx, *s)); }
match b.node.expr {
none {/* no-op */ }
some(e) { vec::push(pps, expr_pp(fcx.ccx, e)); }
}
let block_precond = seq_preconds(fcx, pps);
let mut postconds = ~[];
for pps.each |pp| { vec::push(postconds, get_post(pp)); }
/* A block may be empty, so this next line ensures that the postconds
vector is non-empty. */
vec::push(postconds, block_precond);
let mut block_postcond = empty_poststate(nv);
/* conservative approximation */
if !has_nonlocal_exits(b) {
block_postcond = seq_postconds(fcx, postconds);
}
set_pre_and_post(fcx.ccx, b.node.id, block_precond, block_postcond);
}
fn find_pre_post_fn(fcx: fn_ctxt, body: blk) {
find_pre_post_block(fcx, body);
// Treat the tail expression as a return statement
match body.node.expr {
some(tailexpr) { set_postcond_false(fcx.ccx, tailexpr.id); }
none {/* fallthrough */ }
}
}
fn fn_pre_post(fk: visit::fn_kind, decl: fn_decl, body: blk, sp: span,
id: node_id,
ccx: crate_ctxt, v: visit::vt<crate_ctxt>) {
visit::visit_fn(fk, decl, body, sp, id, ccx, v);
assert (ccx.fm.contains_key(id));
if !ccx.fm.get(id).ignore {
let fcx =
{enclosing: ccx.fm.get(id),
id: id,
name: visit::name_of_fn(fk),
ccx: ccx};
find_pre_post_fn(fcx, body);
}
}
//
// Local Variables:
// mode: rust
// fill-column: 78;
// indent-tabs-mode: nil
// c-basic-offset: 4
// buffer-file-coding-system: utf-8-unix
// End:
//

View file

@ -0,0 +1,623 @@
import ann::*;
import aux::*;
import tritv::*;
import syntax::print::pprust::block_to_str;
import bitvectors::*;
import pat_util::*;
import syntax::ast::*;
import syntax::ast_util::*;
import syntax::print::pprust::{expr_to_str, stmt_to_str};
import syntax::codemap::span;
import middle::ty::{expr_ty, type_is_bot};
import util::common::{field_exprs, has_nonlocal_exits, may_break};
import driver::session::session;
import std::map::hashmap;
fn forbid_upvar(fcx: fn_ctxt, rhs_id: node_id, sp: span, t: oper_type) {
match t {
oper_move {
match local_node_id_to_def(fcx, rhs_id) {
some(def_upvar(_, _, _)) {
fcx.ccx.tcx.sess.span_err(sp,
~"tried to deinitialize a variable \
declared in a different scope");
}
_ { }
}
}
_ {/* do nothing */ }
}
}
fn handle_move_or_copy(fcx: fn_ctxt, post: poststate, rhs_path: @path,
rhs_id: node_id, destlhs: dest, init_op: init_op) {
forbid_upvar(fcx, rhs_id, rhs_path.span, op_to_oper_ty(init_op));
let rhs_d_id = local_node_id_to_def_id(fcx, rhs_id);
match rhs_d_id {
some(rhsid) {
// RHS is a local var
let instrhs =
{ident: path_to_ident(rhs_path), node: rhsid.node};
match destlhs {
local_dest(instlhs) {
copy_in_poststate(fcx, post, instlhs, instrhs,
op_to_oper_ty(init_op));
}
_ {}
}
}
_ {
// not a local -- do nothing
}
}
}
fn seq_states(fcx: fn_ctxt, pres: prestate, bindings: ~[binding]) ->
{changed: bool, post: poststate} {
let mut changed = false;
let mut post = pres.clone();
for bindings.each |b| {
match b.rhs {
some(an_init) {
// an expression, with or without a destination
changed |=
find_pre_post_state_expr(fcx, post, an_init.expr) || changed;
post = expr_poststate(fcx.ccx, an_init.expr).clone();
for b.lhs.each |d| {
match an_init.expr.node {
expr_path(p) {
handle_move_or_copy(fcx, post, p, an_init.expr.id, d,
an_init.op);
}
_ { }
}
}
// Forget the RHS if we just moved it.
if an_init.op == init_move {
forget_in_poststate(fcx, post, an_init.expr.id);
}
}
none {
}
}
}
return {changed: changed, post: post};
}
fn find_pre_post_state_sub(fcx: fn_ctxt, pres: prestate, e: @expr,
parent: node_id, c: option<tsconstr>) -> bool {
let mut changed = find_pre_post_state_expr(fcx, pres, e);
changed = set_prestate_ann(fcx.ccx, parent, pres) || changed;
let post = expr_poststate(fcx.ccx, e).clone();
match c {
none { }
some(c1) { set_in_poststate_(bit_num(fcx, c1), post); }
}
changed = set_poststate_ann(fcx.ccx, parent, post) || changed;
return changed;
}
fn find_pre_post_state_two(fcx: fn_ctxt, pres: prestate, lhs: @expr,
rhs: @expr, parent: node_id, ty: oper_type) ->
bool {
let mut changed = set_prestate_ann(fcx.ccx, parent, pres);
changed = find_pre_post_state_expr(fcx, pres, lhs) || changed;
changed =
find_pre_post_state_expr(fcx, expr_poststate(fcx.ccx, lhs), rhs) ||
changed;
forbid_upvar(fcx, rhs.id, rhs.span, ty);
let post = expr_poststate(fcx.ccx, rhs).clone();
match lhs.node {
expr_path(p) {
// for termination, need to make sure intermediate changes don't set
// changed flag
// tmp remembers "old" constraints we'd otherwise forget,
// for substitution purposes
let tmp = post.clone();
match ty {
oper_move {
if is_path(rhs) { forget_in_poststate(fcx, post, rhs.id); }
forget_in_poststate(fcx, post, lhs.id);
}
oper_swap {
forget_in_poststate(fcx, post, lhs.id);
forget_in_poststate(fcx, post, rhs.id);
}
_ { forget_in_poststate(fcx, post, lhs.id); }
}
match rhs.node {
expr_path(p1) {
let d = local_node_id_to_local_def_id(fcx, lhs.id);
let d1 = local_node_id_to_local_def_id(fcx, rhs.id);
match d {
some(id) {
match d1 {
some(id1) {
let instlhs =
{ident: path_to_ident(p), node: id};
let instrhs =
{ident: path_to_ident(p1), node: id1};
copy_in_poststate_two(fcx, tmp, post, instlhs, instrhs,
ty);
}
_ { }
}
}
_ { }
}
}
_ {/* do nothing */ }
}
}
_ { }
}
changed = set_poststate_ann(fcx.ccx, parent, post) || changed;
return changed;
}
fn find_pre_post_state_call(fcx: fn_ctxt, pres: prestate, a: @expr,
id: node_id, ops: ~[init_op], bs: ~[@expr],
cf: ret_style) -> bool {
let mut changed = find_pre_post_state_expr(fcx, pres, a);
// FIXME (#2178): This could be a typestate constraint (except we're
// not using them inside the compiler, I guess... see discussion in
// bug)
if vec::len(bs) != vec::len(ops) {
fcx.ccx.tcx.sess.span_bug(a.span,
fmt!("mismatched arg lengths: \
%u exprs vs. %u ops",
vec::len(bs), vec::len(ops)));
}
return find_pre_post_state_exprs(fcx, pres, id, ops,
bs, cf) || changed;
}
fn find_pre_post_state_exprs(fcx: fn_ctxt, pres: prestate, id: node_id,
ops: ~[init_op], es: ~[@expr],
cf: ret_style) -> bool {
let rs = seq_states(fcx, pres, arg_bindings(ops, es));
let mut changed = rs.changed | set_prestate_ann(fcx.ccx, id, pres);
/* if this is a failing call, it sets everything as initialized */
match cf {
noreturn {
let post = false_postcond(num_constraints(fcx.enclosing));
changed |= set_poststate_ann(fcx.ccx, id, post);
}
_ { changed |= set_poststate_ann(fcx.ccx, id, rs.post); }
}
return changed;
}
fn join_then_else(fcx: fn_ctxt, antec: @expr, conseq: blk,
maybe_alt: option<@expr>, id: node_id, chk: if_ty,
pres: prestate) -> bool {
let mut changed =
set_prestate_ann(fcx.ccx, id, pres) |
find_pre_post_state_expr(fcx, pres, antec);
match maybe_alt {
none {
match chk {
if_check {
let c: sp_constr = expr_to_constr(fcx.ccx.tcx, antec);
let conseq_prestate = expr_poststate(fcx.ccx, antec).clone();
conseq_prestate.set(bit_num(fcx, c.node), ttrue);
changed |=
find_pre_post_state_block(fcx, conseq_prestate, conseq) |
set_poststate_ann(fcx.ccx, id,
expr_poststate(fcx.ccx, antec));
}
_ {
changed |=
find_pre_post_state_block(fcx, expr_poststate(fcx.ccx, antec),
conseq) |
set_poststate_ann(fcx.ccx, id,
expr_poststate(fcx.ccx, antec));
}
}
}
some(altern) {
changed |=
find_pre_post_state_expr(fcx, expr_poststate(fcx.ccx, antec),
altern);
let mut conseq_prestate = expr_poststate(fcx.ccx, antec);
match chk {
if_check {
let c: sp_constr = expr_to_constr(fcx.ccx.tcx, antec);
conseq_prestate = conseq_prestate.clone();
conseq_prestate.set(bit_num(fcx, c.node), ttrue);
}
_ { }
}
changed |= find_pre_post_state_block(fcx, conseq_prestate, conseq);
let poststate_res =
intersect_states(block_poststate(fcx.ccx, conseq),
expr_poststate(fcx.ccx, altern));
/*
fcx.ccx.tcx.sess.span_note(antec.span,
"poststate_res = " + aux::tritv_to_str(fcx, poststate_res));
fcx.ccx.tcx.sess.span_note(antec.span,
"altern poststate = " +
aux::tritv_to_str(fcx, expr_poststate(fcx.ccx, altern)));
fcx.ccx.tcx.sess.span_note(antec.span,
"conseq poststate = " + aux::tritv_to_str(fcx,
block_poststate(fcx.ccx, conseq)));
*/
changed |= set_poststate_ann(fcx.ccx, id, poststate_res);
}
}
return changed;
}
fn find_pre_post_state_cap_clause(fcx: fn_ctxt, e_id: node_id,
pres: prestate, cap_clause: capture_clause)
-> bool
{
let ccx = fcx.ccx;
let pres_changed = set_prestate_ann(ccx, e_id, pres);
let post = pres.clone();
for (*cap_clause).each |cap_item| {
if cap_item.is_move {
forget_in_poststate(fcx, post, cap_item.id);
}
}
return set_poststate_ann(ccx, e_id, post) || pres_changed;
}
fn find_pre_post_state_expr(fcx: fn_ctxt, pres: prestate, e: @expr) -> bool {
let num_constrs = num_constraints(fcx.enclosing);
match e.node {
expr_new(p, _, v) {
return find_pre_post_state_two(fcx, pres, p, v, e.id, oper_pure);
}
expr_vstore(ee, _) {
let mut changed = find_pre_post_state_expr(fcx, pres, ee);
set_prestate_ann(fcx.ccx, e.id, expr_prestate(fcx.ccx, ee));
set_poststate_ann(fcx.ccx, e.id, expr_poststate(fcx.ccx, ee));
return changed;
}
expr_vec(elts, _) {
return find_pre_post_state_exprs(fcx, pres, e.id,
vec::from_elem(vec::len(elts),
init_assign), elts,
return_val);
}
expr_call(operator, operands, _) {
debug!("hey it's a call: %s", expr_to_str(e));
return find_pre_post_state_call(fcx, pres, operator, e.id,
callee_arg_init_ops(fcx, operator.id),
operands,
controlflow_expr(fcx.ccx, operator));
}
expr_path(_) { return pure_exp(fcx.ccx, e.id, pres); }
expr_log(_, lvl, ex) {
return find_pre_post_state_two(fcx, pres, lvl, ex, e.id, oper_pure);
}
expr_mac(_) { fcx.ccx.tcx.sess.bug(~"unexpanded macro"); }
expr_lit(l) { return pure_exp(fcx.ccx, e.id, pres); }
expr_fn(_, _, _, cap_clause) {
return find_pre_post_state_cap_clause(fcx, e.id, pres, cap_clause);
}
expr_fn_block(_, _, cap_clause) {
return find_pre_post_state_cap_clause(fcx, e.id, pres, cap_clause);
}
expr_block(b) {
return find_pre_post_state_block(fcx, pres, b) |
set_prestate_ann(fcx.ccx, e.id, pres) |
set_poststate_ann(fcx.ccx, e.id, block_poststate(fcx.ccx, b));
}
expr_rec(fields, maybe_base) {
let exs = field_exprs(fields);
let mut changed =
find_pre_post_state_exprs(fcx, pres, e.id,
vec::from_elem(vec::len(fields),
init_assign),
exs, return_val);
let base_pres = match vec::last_opt(exs) { none { pres }
some(f) { expr_poststate(fcx.ccx, f) }};
option::iter(maybe_base, |base| {
changed |= find_pre_post_state_expr(fcx, base_pres, base) |
set_poststate_ann(fcx.ccx, e.id,
expr_poststate(fcx.ccx, base))
});
return changed;
}
expr_tup(elts) {
return find_pre_post_state_exprs(fcx, pres, e.id,
vec::from_elem(vec::len(elts),
init_assign), elts,
return_val);
}
expr_move(lhs, rhs) {
return find_pre_post_state_two(fcx, pres, lhs, rhs, e.id, oper_move);
}
expr_assign(lhs, rhs) {
return find_pre_post_state_two(
fcx, pres, lhs, rhs, e.id, oper_assign);
}
expr_swap(lhs, rhs) {
return find_pre_post_state_two(fcx, pres, lhs, rhs, e.id, oper_swap);
// Could be more precise and actually swap the role of
// lhs and rhs in constraints
}
expr_ret(maybe_ret_val) {
let mut changed = set_prestate_ann(fcx.ccx, e.id, pres);
/* everything is true if execution continues after
a return expression (since execution never continues locally
after a return expression */
let post = false_postcond(num_constrs);
set_poststate_ann(fcx.ccx, e.id, post);
match maybe_ret_val {
none {/* do nothing */ }
some(ret_val) {
changed |= find_pre_post_state_expr(fcx, pres, ret_val);
}
}
return changed;
}
expr_if(antec, conseq, maybe_alt) {
return join_then_else(fcx, antec, conseq, maybe_alt, e.id, plain_if,
pres);
}
expr_binary(bop, l, r) {
if lazy_binop(bop) {
let mut changed = find_pre_post_state_expr(fcx, pres, l);
changed |=
find_pre_post_state_expr(fcx, expr_poststate(fcx.ccx, l), r);
return changed | set_prestate_ann(fcx.ccx, e.id, pres) |
set_poststate_ann(fcx.ccx, e.id,
expr_poststate(fcx.ccx, l));
} else {
return find_pre_post_state_two(fcx, pres, l, r, e.id, oper_pure);
}
}
expr_assign_op(op, lhs, rhs) {
return find_pre_post_state_two(fcx, pres, lhs, rhs, e.id,
oper_assign_op);
}
expr_while(test, body) {
let loop_pres =
intersect_states(block_poststate(fcx.ccx, body), pres);
let mut changed =
set_prestate_ann(fcx.ccx, e.id, loop_pres) |
find_pre_post_state_expr(fcx, loop_pres, test) |
find_pre_post_state_block(fcx, expr_poststate(fcx.ccx, test),
body);
/* conservative approximation: if a loop contains a break
or cont, we assume nothing about the poststate */
/* which is still unsound -- see ~[Break-unsound] */
if has_nonlocal_exits(body) {
return changed | set_poststate_ann(fcx.ccx, e.id, pres);
} else {
let e_post = expr_poststate(fcx.ccx, test);
let b_post = block_poststate(fcx.ccx, body);
return changed |
set_poststate_ann(fcx.ccx, e.id,
intersect_states(e_post, b_post));
}
}
expr_loop(body) {
let loop_pres =
intersect_states(block_poststate(fcx.ccx, body), pres);
let mut changed = set_prestate_ann(fcx.ccx, e.id, loop_pres)
| find_pre_post_state_block(fcx, loop_pres, body);
/* conservative approximation: if a loop contains a break
or cont, we assume nothing about the poststate (so, we
set all predicates to "don't know" */
/* which is still unsound -- see ~[Break-unsound] */
if may_break(body) {
/* Only do this if there are *breaks* not conts.
An infinite loop with conts is still an infinite loop.
We assume all preds are FALSE, not '?' -- because in the
worst case, the body could invalidate all preds and
deinitialize everything before breaking */
let post = empty_poststate(num_constrs);
post.kill();
return changed | set_poststate_ann(fcx.ccx, e.id, post);
} else {
return changed | set_poststate_ann(fcx.ccx, e.id,
false_postcond(num_constrs));
}
}
expr_index(val, sub) {
return find_pre_post_state_two(fcx, pres, val, sub, e.id, oper_pure);
}
expr_match(val, alts, _) {
let mut changed =
set_prestate_ann(fcx.ccx, e.id, pres) |
find_pre_post_state_expr(fcx, pres, val);
let e_post = expr_poststate(fcx.ccx, val);
let mut a_post;
if vec::len(alts) > 0u {
a_post = false_postcond(num_constrs);
for alts.each |an_alt| {
match an_alt.guard {
some(e) {
changed |= find_pre_post_state_expr(fcx, e_post, e);
}
_ {}
}
changed |=
find_pre_post_state_block(fcx, e_post, an_alt.body);
intersect(a_post, block_poststate(fcx.ccx, an_alt.body));
// We deliberately do *not* update changed here, because
// we'd go into an infinite loop that way, and the change
// gets made after the if expression.
}
} else {
// No alts; poststate is the poststate of the test
a_post = e_post;
}
return changed | set_poststate_ann(fcx.ccx, e.id, a_post);
}
expr_field(x, _, _) | expr_loop_body(x) | expr_do_body(x) |
expr_unary(_, x) |
expr_addr_of(_, x) | expr_assert(x) | expr_cast(x, _) |
expr_copy(x) {
return find_pre_post_state_sub(fcx, pres, x, e.id, none);
}
expr_fail(maybe_fail_val) {
/* if execution continues after fail, then everything is true!
woo! */
let post = false_postcond(num_constrs);
return set_prestate_ann(fcx.ccx, e.id, pres) |
set_poststate_ann(fcx.ccx, e.id, post) |
option::map_default(
maybe_fail_val, false,
|fail_val|
find_pre_post_state_expr(fcx, pres, fail_val) );
}
expr_check(_, p) {
/* predicate p holds after this expression executes */
let c: sp_constr = expr_to_constr(fcx.ccx.tcx, p);
return find_pre_post_state_sub(fcx, pres, p, e.id, some(c.node));
}
expr_if_check(p, conseq, maybe_alt) {
return join_then_else(
fcx, p, conseq, maybe_alt, e.id, if_check, pres);
}
expr_break { return pure_exp(fcx.ccx, e.id, pres); }
expr_again { return pure_exp(fcx.ccx, e.id, pres); }
}
}
fn find_pre_post_state_stmt(fcx: fn_ctxt, pres: prestate, s: @stmt) -> bool {
let stmt_ann = stmt_to_ann(fcx.ccx, *s);
debug!("[ %s ]", *fcx.name);
debug!("*At beginning: stmt = %s", stmt_to_str(*s));
debug!("*prestate = %s", stmt_ann.states.prestate.to_str());
debug!("*poststate = %s", stmt_ann.states.prestate.to_str());
match s.node {
stmt_decl(adecl, id) {
match adecl.node {
decl_local(alocals) {
set_prestate(stmt_ann, pres);
let c_and_p = seq_states(fcx, pres,
locals_to_bindings(fcx.ccx.tcx, alocals));
/* important to do this in one step to ensure
termination (don't want to set changed to true
for intermediate changes) */
let mut changed =
set_poststate(stmt_ann, c_and_p.post) | c_and_p.changed;
debug!("Summary: stmt = %s", stmt_to_str(*s));
debug!("prestate = %s", stmt_ann.states.prestate.to_str());
debug!("poststate = %s", stmt_ann.states.poststate.to_str());
debug!("changed = %s", bool::to_str(changed));
return changed;
}
decl_item(an_item) {
return set_prestate(stmt_ann, pres)
| set_poststate(stmt_ann, pres);
/* the outer visitor will recurse into the item */
}
}
}
stmt_expr(ex, _) | stmt_semi(ex, _) {
let mut changed =
find_pre_post_state_expr(fcx, pres, ex) |
set_prestate(stmt_ann, expr_prestate(fcx.ccx, ex)) |
set_poststate(stmt_ann, expr_poststate(fcx.ccx, ex));
debug!("Finally: %s", stmt_to_str(*s));
debug!("prestate = %s", stmt_ann.states.prestate.to_str());
debug!("poststate = %s", stmt_ann.states.poststate.to_str());
debug!("changed = %s", bool::to_str(changed));
return changed;
}
}
}
/* Updates the pre- and post-states of statements in the block,
returns a boolean flag saying whether any pre- or poststates changed */
fn find_pre_post_state_block(fcx: fn_ctxt, pres0: prestate, b: blk) -> bool {
/* First, set the pre-states and post-states for every expression */
let mut pres = pres0;
/* Iterate over each stmt. The new prestate is <pres>. The poststate
consist of improving <pres> with whatever variables this stmt
initializes. Then <pres> becomes the new poststate. */
let mut changed = false;
for b.node.stmts.each |s| {
changed |= find_pre_post_state_stmt(fcx, pres, s);
pres = stmt_poststate(fcx.ccx, *s);
}
let mut post = pres;
match b.node.expr {
none { }
some(e) {
changed |= find_pre_post_state_expr(fcx, pres, e);
post = expr_poststate(fcx.ccx, e);
}
}
set_prestate_ann(fcx.ccx, b.node.id, pres0);
set_poststate_ann(fcx.ccx, b.node.id, post);
return changed;
}
fn find_pre_post_state_fn(fcx: fn_ctxt,
f_decl: fn_decl,
f_body: blk) -> bool {
// All constraints are considered false until proven otherwise.
// This ensures that intersect works correctly.
kill_all_prestate(fcx, f_body.node.id);
// Instantiate any constraints on the arguments so we can use them
let block_pre = block_prestate(fcx.ccx, f_body);
for f_decl.constraints.each |c| {
let tsc = ast_constr_to_ts_constr(fcx.ccx.tcx, f_decl.inputs, c);
set_in_prestate_constr(fcx, tsc, block_pre);
}
let mut changed = find_pre_post_state_block(fcx, block_pre, f_body);
/*
error!("find_pre_post_state_fn");
log(error, changed);
fcx.ccx.tcx.sess.span_note(f_body.span, fcx.name);
*/
return changed;
}
//
// Local Variables:
// mode: rust
// fill-column: 78;
// indent-tabs-mode: nil
// c-basic-offset: 4
// buffer-file-coding-system: utf-8-unix
// End:
//

View file

@ -521,17 +521,17 @@ trait vid {
impl tv_vid: vid {
pure fn to_uint() -> uint { *self }
pure fn to_str() -> ~str { fmt!{"<V%u>", self.to_uint()} }
pure fn to_str() -> ~str { fmt!("<V%u>", self.to_uint()) }
}
impl tvi_vid: vid {
pure fn to_uint() -> uint { *self }
pure fn to_str() -> ~str { fmt!{"<VI%u>", self.to_uint()} }
pure fn to_str() -> ~str { fmt!("<VI%u>", self.to_uint()) }
}
impl region_vid: vid {
pure fn to_uint() -> uint { *self }
pure fn to_str() -> ~str { fmt!{"%?", self} }
pure fn to_str() -> ~str { fmt!("%?", self) }
}
trait purity_to_str {
@ -1135,23 +1135,23 @@ fn substs_is_noop(substs: &substs) -> bool {
}
fn substs_to_str(cx: ctxt, substs: &substs) -> ~str {
fmt!{"substs(self_r=%s, self_ty=%s, tps=%?)",
fmt!("substs(self_r=%s, self_ty=%s, tps=%?)",
substs.self_r.map_default(~"none", |r| region_to_str(cx, r)),
substs.self_ty.map_default(~"none", |t| ty_to_str(cx, t)),
substs.tps.map(|t| ty_to_str(cx, t))}
substs.tps.map(|t| ty_to_str(cx, t)))
}
fn subst(cx: ctxt,
substs: &substs,
typ: t) -> t {
debug!{"subst(substs=%s, typ=%s)",
debug!("subst(substs=%s, typ=%s)",
substs_to_str(cx, substs),
ty_to_str(cx, typ)};
ty_to_str(cx, typ));
if substs_is_noop(substs) { return typ; }
let r = do_subst(cx, substs, typ);
debug!{" r = %s", ty_to_str(cx, r)};
debug!(" r = %s", ty_to_str(cx, r));
return r;
fn do_subst(cx: ctxt,
@ -1880,27 +1880,27 @@ fn is_instantiable(cx: ctxt, r_ty: t) -> bool {
fn type_requires(cx: ctxt, seen: @mut ~[def_id],
r_ty: t, ty: t) -> bool {
debug!{"type_requires(%s, %s)?",
debug!("type_requires(%s, %s)?",
ty_to_str(cx, r_ty),
ty_to_str(cx, ty)};
ty_to_str(cx, ty));
let r = {
get(r_ty).struct == get(ty).struct ||
subtypes_require(cx, seen, r_ty, ty)
};
debug!{"type_requires(%s, %s)? %b",
debug!("type_requires(%s, %s)? %b",
ty_to_str(cx, r_ty),
ty_to_str(cx, ty),
r};
r);
return r;
}
fn subtypes_require(cx: ctxt, seen: @mut ~[def_id],
r_ty: t, ty: t) -> bool {
debug!{"subtypes_require(%s, %s)?",
debug!("subtypes_require(%s, %s)?",
ty_to_str(cx, r_ty),
ty_to_str(cx, ty)};
ty_to_str(cx, ty));
let r = match get(ty).struct {
ty_nil |
@ -1976,10 +1976,10 @@ fn is_instantiable(cx: ctxt, r_ty: t) -> bool {
}
};
debug!{"subtypes_require(%s, %s)? %b",
debug!("subtypes_require(%s, %s)? %b",
ty_to_str(cx, r_ty),
ty_to_str(cx, ty),
r};
r);
return r;
}
@ -1991,7 +1991,7 @@ fn is_instantiable(cx: ctxt, r_ty: t) -> bool {
fn type_structurally_contains(cx: ctxt, ty: t, test: fn(x: &sty) -> bool) ->
bool {
let sty = &get(ty).struct;
debug!{"type_structurally_contains: %s", ty_to_str(cx, ty)};
debug!("type_structurally_contains: %s", ty_to_str(cx, ty));
if test(sty) { return true; }
match *sty {
ty_enum(did, ref substs) => {
@ -2326,9 +2326,9 @@ fn node_id_to_type(cx: ctxt, id: ast::node_id) -> t {
match smallintmap::find(*cx.node_types, id as uint) {
some(t) => t,
none => cx.sess.bug(
fmt!{"node_id_to_type: unbound node ID %s",
fmt!("node_id_to_type: unbound node ID %s",
ast_map::node_id_to_str(cx.items, id,
cx.sess.parse_sess.interner)})
cx.sess.parse_sess.interner)))
}
}
@ -2382,7 +2382,7 @@ fn is_fn_ty(fty: t) -> bool {
fn ty_region(ty: t) -> region {
match get(ty).struct {
ty_rptr(r, _) => r,
s => fail fmt!{"ty_region() invoked on non-rptr: %?", s}
s => fail fmt!("ty_region() invoked on non-rptr: %?", s)
}
}
@ -2400,15 +2400,15 @@ fn is_pred_ty(fty: t) -> bool {
fn ty_var_id(typ: t) -> tv_vid {
match get(typ).struct {
ty_var(vid) => return vid,
_ => { error!{"ty_var_id called on non-var ty"}; fail; }
_ => { error!("ty_var_id called on non-var ty"); fail; }
}
}
fn ty_var_integral_id(typ: t) -> tvi_vid {
match get(typ).struct {
ty_var_integral(vid) => return vid,
_ => { error!{"ty_var_integral_id called on ty other than \
ty_var_integral"};
_ => { error!("ty_var_integral_id called on ty other than \
ty_var_integral");
fail; }
}
}
@ -2591,7 +2591,7 @@ fn canon_mode(cx: ctxt, m0: ast::mode) -> ast::mode {
fn resolved_mode(cx: ctxt, m: ast::mode) -> ast::rmode {
match canon_mode(cx, m) {
ast::infer(_) => {
cx.sess.bug(fmt!{"mode %? was never resolved", m});
cx.sess.bug(fmt!("mode %? was never resolved", m));
}
ast::expl(m0) => m0
}
@ -2642,7 +2642,7 @@ fn ty_sort_str(cx: ctxt, t: t) -> ~str {
ty_to_str(cx, t)
}
ty_enum(id, _) => fmt!{"enum %s", item_path_str(cx, id)},
ty_enum(id, _) => fmt!("enum %s", item_path_str(cx, id)),
ty_box(_) => ~"@-ptr",
ty_uniq(_) => ~"~-ptr",
ty_evec(_, _) => ~"vector",
@ -2651,8 +2651,8 @@ fn ty_sort_str(cx: ctxt, t: t) -> ~str {
ty_rptr(_, _) => ~"&-ptr",
ty_rec(_) => ~"record",
ty_fn(_) => ~"fn",
ty_trait(id, _, _) => fmt!{"trait %s", item_path_str(cx, id)},
ty_class(id, _) => fmt!{"class %s", item_path_str(cx, id)},
ty_trait(id, _, _) => fmt!("trait %s", item_path_str(cx, id)),
ty_class(id, _) => fmt!("class %s", item_path_str(cx, id)),
ty_tup(_) => ~"tuple",
ty_var(_) => ~"variable",
ty_var_integral(_) => ~"integral variable",
@ -2685,14 +2685,14 @@ fn type_err_to_str(cx: ctxt, err: &type_err) -> ~str {
to_str(values.expected))
}
terr_purity_mismatch(values) => {
fmt!{"expected %s fn but found %s fn",
fmt!("expected %s fn but found %s fn",
purity_to_str(values.expected),
purity_to_str(values.found)}
purity_to_str(values.found))
}
terr_proto_mismatch(values) => {
fmt!{"expected %s closure, found %s closure",
fmt!("expected %s closure, found %s closure",
proto_ty_to_str(cx, values.expected),
proto_ty_to_str(cx, values.found)}
proto_ty_to_str(cx, values.found))
}
terr_mutability => ~"values differ in mutability",
terr_box_mutability => ~"boxed values differ in mutability",
@ -2728,14 +2728,14 @@ fn type_err_to_str(cx: ctxt, err: &type_err) -> ~str {
mode_to_str(values.expected), mode_to_str(values.found))
}
terr_regions_does_not_outlive(subregion, superregion) => {
fmt!{"%s does not necessarily outlive %s",
fmt!("%s does not necessarily outlive %s",
explain_region(cx, superregion),
explain_region(cx, subregion)}
explain_region(cx, subregion))
}
terr_regions_not_same(region1, region2) => {
fmt!{"%s is not the same as %s",
fmt!("%s is not the same as %s",
explain_region(cx, region1),
explain_region(cx, region2)}
explain_region(cx, region2))
}
terr_regions_no_overlap(region1, region2) => {
fmt!("%s does not intersect %s",
@ -2753,9 +2753,9 @@ fn type_err_to_str(cx: ctxt, err: &type_err) -> ~str {
type_err_to_str(cx, err))
}
terr_sorts(values) => {
fmt!{"expected %s but found %s",
fmt!("expected %s but found %s",
ty_sort_str(cx, values.expected),
ty_sort_str(cx, values.found)}
ty_sort_str(cx, values.found))
}
terr_self_substs => {
~"inconsistent self substitution" // XXX this is more of a bug
@ -2800,7 +2800,7 @@ fn trait_methods(cx: ctxt, id: ast::def_id) -> @~[method] {
fn impl_traits(cx: ctxt, id: ast::def_id) -> ~[t] {
if id.crate == ast::local_crate {
debug!{"(impl_traits) searching for trait impl %?", id};
debug!("(impl_traits) searching for trait impl %?", id);
match cx.items.find(id.node) {
some(ast_map::node_item(@{
node: ast::item_impl(_, trait_refs, _, _),
@ -2816,12 +2816,12 @@ fn impl_traits(cx: ctxt, id: ast::def_id) -> ~[t] {
match cx.def_map.find(id.node) {
some(def_ty(trait_id)) => {
// XXX: Doesn't work cross-crate.
debug!{"(impl_traits) found trait id %?", trait_id};
debug!("(impl_traits) found trait id %?", trait_id);
~[node_id_to_type(cx, trait_id.node)]
}
some(x) => {
cx.sess.bug(fmt!{"impl_traits: trait ref is in trait map \
but is bound to %?", x});
cx.sess.bug(fmt!("impl_traits: trait ref is in trait map \
but is bound to %?", x));
}
none => {
~[]
@ -2932,7 +2932,7 @@ fn item_path(cx: ctxt, id: ast::def_id) -> ast_map::path {
ast_map::node_stmt(*) | ast_map::node_expr(*) |
ast_map::node_arg(*) | ast_map::node_local(*) |
ast_map::node_export(*) | ast_map::node_block(*) => {
cx.sess.bug(fmt!{"cannot find item_path for node %?", node});
cx.sess.bug(fmt!("cannot find item_path for node %?", node));
}
}
}
@ -3080,9 +3080,9 @@ fn lookup_class_fields(cx: ctxt, did: ast::def_id) -> ~[field_ty] {
}
_ => {
cx.sess.bug(
fmt!{"class ID not bound to an item: %s",
fmt!("class ID not bound to an item: %s",
ast_map::node_id_to_str(cx.items, did.node,
cx.sess.parse_sess.interner)});
cx.sess.parse_sess.interner)));
}
}
}
@ -3147,8 +3147,8 @@ fn lookup_class_method_by_name(cx:ctxt, did: ast::def_id, name: ident,
return ast_util::local_def(m.id);
}
}
cx.sess.span_fatal(sp, fmt!{"Class doesn't have a method \
named %s", cx.sess.str_of(name)});
cx.sess.span_fatal(sp, fmt!("Class doesn't have a method \
named %s", cx.sess.str_of(name)));
}
else {
csearch::get_class_method(cx.sess.cstore, did, name)

View file

@ -175,7 +175,7 @@ enum crate_ctxt {
// Functions that write types into the node type table
fn write_ty_to_tcx(tcx: ty::ctxt, node_id: ast::node_id, ty: ty::t) {
debug!{"write_ty_to_tcx(%d, %s)", node_id, ty_to_str(tcx, ty)};
debug!("write_ty_to_tcx(%d, %s)", node_id, ty_to_str(tcx, ty));
smallintmap::insert(*tcx.node_types, node_id as uint, ty);
}
fn write_substs_to_tcx(tcx: ty::ctxt,
@ -275,10 +275,10 @@ fn check_main_fn_ty(ccx: @crate_ctxt,
arg_is_argv_ty(tcx, inputs[0]);
if !ok {
tcx.sess.span_err(main_span,
fmt!{"Wrong type in main function: found `%s`, \
fmt!("Wrong type in main function: found `%s`, \
expected `extern fn(~[str]) -> ()` \
or `extern fn() -> ()`",
ty_to_str(tcx, main_t)});
ty_to_str(tcx, main_t)));
}
}
_ => {

View file

@ -87,8 +87,8 @@ fn ast_path_to_substs_and_ty<AC: ast_conv, RS: region_scope copy owned>(
let {bounds: decl_bounds, region_param: decl_rp, ty: decl_ty} =
self.get_item_ty(did);
debug!["ast_path_to_substs_and_ty: did=%? decl_rp=%?",
did, decl_rp];
debug!("ast_path_to_substs_and_ty: did=%? decl_rp=%?",
did, decl_rp);
// If the type is parameterized by the self region, then replace self
// region with the current anon region binding (in other words,
@ -100,9 +100,9 @@ fn ast_path_to_substs_and_ty<AC: ast_conv, RS: region_scope copy owned>(
(none, some(_)) => {
tcx.sess.span_err(
path.span,
fmt!{"no region bound is allowed on `%s`, \
fmt!("no region bound is allowed on `%s`, \
which is not declared as containing region pointers",
ty::item_path_str(tcx, did)});
ty::item_path_str(tcx, did)));
none
}
(some(_), none) => {
@ -119,8 +119,8 @@ fn ast_path_to_substs_and_ty<AC: ast_conv, RS: region_scope copy owned>(
if !vec::same_length(*decl_bounds, path.types) {
self.tcx().sess.span_fatal(
path.span,
fmt!{"wrong number of type arguments: expected %u but found %u",
(*decl_bounds).len(), path.types.len()});
fmt!("wrong number of type arguments: expected %u but found %u",
(*decl_bounds).len(), path.types.len()));
}
let tps = path.types.map(|a_t| ast_ty_to_ty(self, rscope, a_t));
@ -310,8 +310,8 @@ fn ast_ty_to_ty<AC: ast_conv, RS: region_scope copy owned>(
ast::ty_path(path, id) => {
let a_def = match tcx.def_map.find(id) {
none => tcx.sess.span_fatal(
ast_ty.span, fmt!{"unbound path %s",
path_to_str(path, tcx.sess.intr())}),
ast_ty.span, fmt!("unbound path %s",
path_to_str(path, tcx.sess.intr()))),
some(d) => d
};
match a_def {
@ -368,8 +368,8 @@ fn ast_ty_to_ty<AC: ast_conv, RS: region_scope copy owned>(
|ty| {
tcx.sess.span_err(
a_t.span,
fmt!{"bound not allowed on a %s",
ty::ty_sort_str(tcx, ty.ty)});
fmt!("bound not allowed on a %s",
ty::ty_sort_str(tcx, ty.ty)));
ty.ty
})
}
@ -470,7 +470,7 @@ fn ty_of_fn_decl<AC: ast_conv, RS: region_scope copy owned>(
expected_tys: expected_tys,
span: span) -> ty::fn_ty {
debug!{"ty_of_fn_decl"};
debug!("ty_of_fn_decl");
do indent {
// new region names that appear inside of the fn decl are bound to
// that function type

View file

@ -209,10 +209,10 @@ fn check_fn(ccx: @crate_ctxt,
let arg_tys = fn_ty.inputs.map(|a| a.ty);
let ret_ty = fn_ty.output;
debug!{"check_fn(arg_tys=%?, ret_ty=%?, self_info.self_ty=%?)",
debug!("check_fn(arg_tys=%?, ret_ty=%?, self_info.self_ty=%?)",
arg_tys.map(|a| ty_to_str(tcx, a)),
ty_to_str(tcx, ret_ty),
option::map(self_info, |s| ty_to_str(tcx, s.self_ty))};
option::map(self_info, |s| ty_to_str(tcx, s.self_ty)));
// ______________________________________________________________________
// Create the function context. This is either derived from scratch or,
@ -329,16 +329,16 @@ fn check_fn(ccx: @crate_ctxt,
for self_info.each |info| {
assign(info.explicit_self.span,
info.self_id, some(info.self_ty));
debug!{"self is assigned to %s",
fcx.locals.get(info.self_id).to_str()};
debug!("self is assigned to %s",
fcx.locals.get(info.self_id).to_str());
}
// Add formal parameters.
do vec::iter2(arg_tys, decl.inputs) |arg_ty, input| {
assign(input.ty.span, input.id, some(arg_ty));
debug!{"Argument %s is assigned to %s",
debug!("Argument %s is assigned to %s",
tcx.sess.str_of(input.ident),
fcx.locals.get(input.id).to_str()};
fcx.locals.get(input.id).to_str());
}
// Add explicitly-declared locals.
@ -349,9 +349,9 @@ fn check_fn(ccx: @crate_ctxt,
_ => some(fcx.to_ty(local.node.ty))
};
assign(local.span, local.node.id, o_ty);
debug!{"Local variable %s is assigned to %s",
debug!("Local variable %s is assigned to %s",
pat_to_str(local.node.pat, tcx.sess.intr()),
fcx.locals.get(local.node.id).to_str()};
fcx.locals.get(local.node.id).to_str());
visit::visit_local(local, e, v);
};
@ -361,9 +361,9 @@ fn check_fn(ccx: @crate_ctxt,
ast::pat_ident(_, path, _)
if !pat_util::pat_is_variant(fcx.ccx.tcx.def_map, p) => {
assign(p.span, p.id, none);
debug!{"Pattern binding %s is assigned to %s",
debug!("Pattern binding %s is assigned to %s",
tcx.sess.str_of(path.idents[0]),
fcx.locals.get(p.id).to_str()};
fcx.locals.get(p.id).to_str());
}
_ => {}
}
@ -414,9 +414,9 @@ fn check_no_duplicate_fields(tcx: ty::ctxt, fields:
let (id, sp) = p;
match field_names.find(id) {
some(orig_sp) => {
tcx.sess.span_err(sp, fmt!{"Duplicate field \
tcx.sess.span_err(sp, fmt!("Duplicate field \
name %s in record type declaration",
tcx.sess.str_of(id)});
tcx.sess.str_of(id)));
tcx.sess.span_note(orig_sp, ~"First declaration of \
this field occurred here");
break;
@ -528,7 +528,7 @@ fn check_item(ccx: @crate_ctxt, it: @ast::item) {
if (*tpt.bounds).is_not_empty() {
ccx.tcx.sess.span_err(
item.span,
fmt!{"foreign items may not have type parameters"});
fmt!("foreign items may not have type parameters"));
}
}
}
@ -561,8 +561,8 @@ impl @fn_ctxt: region_scope {
none if id == syntax::parse::token::special_idents::blk
=> result::ok(self.block_region()),
none => {
result::err(fmt!{"named region `%s` not in scope here",
self.ccx.tcx.sess.str_of(id)})
result::err(fmt!("named region `%s` not in scope here",
self.ccx.tcx.sess.str_of(id)))
}
}
}
@ -570,14 +570,14 @@ impl @fn_ctxt: region_scope {
}
impl @fn_ctxt {
fn tag() -> ~str { fmt!{"%x", ptr::addr_of(*self) as uint} }
fn tag() -> ~str { fmt!("%x", ptr::addr_of(*self) as uint) }
fn block_region() -> ty::region {
ty::re_scope(self.region_lb)
}
#[inline(always)]
fn write_ty(node_id: ast::node_id, ty: ty::t) {
debug!{"write_ty(%d, %s) in fcx %s",
node_id, ty_to_str(self.tcx(), ty), self.tag()};
debug!("write_ty(%d, %s) in fcx %s",
node_id, ty_to_str(self.tcx(), ty), self.tag());
self.node_types.insert(node_id, ty);
}
fn write_substs(node_id: ast::node_id, +substs: ty::substs) {
@ -607,9 +607,9 @@ impl @fn_ctxt {
some(t) => t,
none => {
self.tcx().sess.bug(
fmt!{"no type for expr %d (%s) in fcx %s",
fmt!("no type for expr %d (%s) in fcx %s",
ex.id, expr_to_str(ex, self.ccx.tcx.sess.intr()),
self.tag()});
self.tag()));
}
}
}
@ -618,11 +618,11 @@ impl @fn_ctxt {
some(t) => t,
none => {
self.tcx().sess.bug(
fmt!{"no type for node %d: %s in fcx %s",
fmt!("no type for node %d: %s in fcx %s",
id, ast_map::node_id_to_str(
self.tcx().items, id,
self.tcx().sess.parse_sess.interner),
self.tag()});
self.tag()));
}
}
}
@ -631,11 +631,11 @@ impl @fn_ctxt {
some(ts) => ts,
none => {
self.tcx().sess.bug(
fmt!{"no type substs for node %d: %s in fcx %s",
fmt!("no type substs for node %d: %s in fcx %s",
id, ast_map::node_id_to_str(
self.tcx().items, id,
self.tcx().sess.parse_sess.interner),
self.tag()});
self.tag()));
}
}
}
@ -647,10 +647,10 @@ impl @fn_ctxt {
err: &ty::type_err) {
self.ccx.tcx.sess.span_err(
sp,
fmt!{"mismatched types: expected `%s` but found `%s` (%s)",
fmt!("mismatched types: expected `%s` but found `%s` (%s)",
self.infcx.ty_to_str(e),
self.infcx.ty_to_str(a),
ty::type_err_to_str(self.ccx.tcx, err)});
ty::type_err_to_str(self.ccx.tcx, err)));
}
fn mk_subty(a_is_expected: bool, span: span,
@ -692,7 +692,7 @@ impl @fn_ctxt {
_ => {
self.ccx.tcx.sess.span_err(
sp,
fmt!{"%s requires unsafe function or block", op});
fmt!("%s requires unsafe function or block", op));
}
}
}
@ -863,10 +863,10 @@ fn check_expr_with_unifier(fcx: @fn_ctxt,
expected: option<ty::t>,
unifier: fn()) -> bool {
debug!{
debug!(
">> typechecking expr %d (%s)",
expr.id, syntax::print::pprust::expr_to_str(expr,
fcx.ccx.tcx.sess.intr())};
fcx.ccx.tcx.sess.intr()));
// A generic function to factor out common logic from call and
// overloaded operations
@ -880,8 +880,8 @@ fn check_expr_with_unifier(fcx: @fn_ctxt,
// Replace all region parameters in the arguments and return
// type with fresh region variables.
debug!{"check_call_inner: before universal quant., in_fty=%s",
fcx.infcx.ty_to_str(in_fty)};
debug!("check_call_inner: before universal quant., in_fty=%s",
fcx.infcx.ty_to_str(in_fty));
// This is subtle: we expect `fty` to be a function type, which
// normally introduce a level of binding. In this case, we want to
@ -907,8 +907,8 @@ fn check_expr_with_unifier(fcx: @fn_ctxt,
};
let fty = ty::mk_fn(fcx.tcx(), fn_ty);
debug!{"check_call_inner: after universal quant., fty=%s",
fcx.infcx.ty_to_str(fty)};
debug!("check_call_inner: after universal quant., fty=%s",
fcx.infcx.ty_to_str(fty));
let supplied_arg_count = vec::len(args);
@ -919,7 +919,7 @@ fn check_expr_with_unifier(fcx: @fn_ctxt,
fn_ty.inputs.map(|a| a.ty)
} else {
fcx.ccx.tcx.sess.span_err(
sp, fmt!{"this function takes %u parameter%s but %u \
sp, fmt!("this function takes %u parameter%s but %u \
parameter%s supplied", expected_arg_count,
if expected_arg_count == 1u {
~""
@ -931,7 +931,7 @@ fn check_expr_with_unifier(fcx: @fn_ctxt,
~" was"
} else {
~"s were"
}});
}));
fcx.infcx.next_ty_vars(supplied_arg_count)
};
@ -1099,8 +1099,8 @@ fn check_expr_with_unifier(fcx: @fn_ctxt,
// separate case below.
tcx.sess.span_bug(
expr.span,
fmt!{"comparison operator in expr_binop: %s",
ast_util::binop_to_str(op)});
fmt!("comparison operator in expr_binop: %s",
ast_util::binop_to_str(op)));
}
_ => lhs_t
};
@ -1161,8 +1161,8 @@ fn check_expr_with_unifier(fcx: @fn_ctxt,
some((ret_ty, _)) => ret_ty,
_ => {
fcx.ccx.tcx.sess.span_err(
ex.span, fmt!{"cannot apply unary operator `%s` to type `%s`",
op_str, fcx.infcx.ty_to_str(rhs_t)});
ex.span, fmt!("cannot apply unary operator `%s` to type `%s`",
op_str, fcx.infcx.ty_to_str(rhs_t)));
rhs_t
}
}
@ -1243,8 +1243,8 @@ fn check_expr_with_unifier(fcx: @fn_ctxt,
let fty = ty::mk_fn(tcx, fn_ty);
debug!{"check_expr_fn_with_unifier %s fty=%s",
expr_to_str(expr, tcx.sess.intr()), fcx.infcx.ty_to_str(fty)};
debug!("check_expr_fn_with_unifier %s fty=%s",
expr_to_str(expr, tcx.sess.intr()), fcx.infcx.ty_to_str(fty));
fcx.write_ty(expr.id, fty);
@ -1285,7 +1285,7 @@ fn check_expr_with_unifier(fcx: @fn_ctxt,
// (1) verify that the class id actually has a field called
// field
debug!{"class named %s", ty_to_str(tcx, base_t)};
debug!("class named %s", ty_to_str(tcx, base_t));
/*
check whether this is a self-reference or not, which
determines whether we look at all fields or only public
@ -1335,11 +1335,11 @@ fn check_expr_with_unifier(fcx: @fn_ctxt,
}
none => {
let t_err = fcx.infcx.resolve_type_vars_if_possible(expr_t);
let msg = fmt!{"attempted access of field `%s` on type `%s`, \
let msg = fmt!("attempted access of field `%s` on type `%s`, \
but no public field or method with that name \
was found",
tcx.sess.str_of(field),
fcx.infcx.ty_to_str(t_err)};
fcx.infcx.ty_to_str(t_err));
tcx.sess.span_err(expr.span, msg);
// NB: Adding a bogus type to allow typechecking to continue
fcx.write_ty(expr.id, fcx.infcx.next_ty_var());
@ -1466,8 +1466,8 @@ fn check_expr_with_unifier(fcx: @fn_ctxt,
_ => {
tcx.sess.span_err(
expr.span,
fmt!{"type %s cannot be dereferenced",
fcx.infcx.ty_to_str(oprnd_t)});
fmt!("type %s cannot be dereferenced",
fcx.infcx.ty_to_str(oprnd_t)));
}
}
}
@ -1626,9 +1626,9 @@ fn check_expr_with_unifier(fcx: @fn_ctxt,
result::ok(_) => (),
result::err(err) => {
tcx.sess.span_fatal(
expr.span, fmt!{"a `loop` function's last argument \
expr.span, fmt!("a `loop` function's last argument \
should return `bool`, not `%s`",
fcx.infcx.ty_to_str(fty.output)});
fcx.infcx.ty_to_str(fty.output)));
}
}
(ty::mk_fn(tcx, {output: ty::mk_nil(tcx) with fty}), fty.proto)
@ -1702,8 +1702,8 @@ fn check_expr_with_unifier(fcx: @fn_ctxt,
let t_1 = fcx.to_ty(t);
let t_e = fcx.expr_ty(e);
debug!{"t_1=%s", fcx.infcx.ty_to_str(t_1)};
debug!{"t_e=%s", fcx.infcx.ty_to_str(t_e)};
debug!("t_1=%s", fcx.infcx.ty_to_str(t_1));
debug!("t_e=%s", fcx.infcx.ty_to_str(t_e));
match ty::get(t_1).struct {
// This will be looked up later on
@ -1902,14 +1902,14 @@ fn check_expr_with_unifier(fcx: @fn_ctxt,
none => {
tcx.sess.span_err(
field.span,
fmt!{"structure has no field named field named `%s`",
tcx.sess.str_of(field.node.ident)});
fmt!("structure has no field named field named `%s`",
tcx.sess.str_of(field.node.ident)));
}
some((_, true)) => {
tcx.sess.span_err(
field.span,
fmt!{"field `%s` specified more than once",
tcx.sess.str_of(field.node.ident)});
fmt!("field `%s` specified more than once",
tcx.sess.str_of(field.node.ident)));
}
some((field_id, false)) => {
let expected_field_type =
@ -1939,14 +1939,14 @@ fn check_expr_with_unifier(fcx: @fn_ctxt,
}
tcx.sess.span_err(expr.span,
fmt!{"missing field%s: %s",
fmt!("missing field%s: %s",
if missing_fields.len() == 1 {
~""
} else {
~"s"
},
str::connect(missing_fields,
~", ")});
~", ")));
}
}
some(base_expr) => {
@ -1992,17 +1992,17 @@ fn check_expr_with_unifier(fcx: @fn_ctxt,
}
if bot { fcx.write_bot(expr.id); }
debug!{"type of expr %s is %s, expected is %s",
debug!("type of expr %s is %s, expected is %s",
syntax::print::pprust::expr_to_str(expr, tcx.sess.intr()),
ty_to_str(tcx, fcx.expr_ty(expr)),
match expected {
some(t) => ty_to_str(tcx, t),
_ => ~"empty"
}};
});
unifier();
debug!{"<< bot=%b", bot};
debug!("<< bot=%b", bot);
return bot;
}
@ -2152,10 +2152,10 @@ fn check_instantiable(tcx: ty::ctxt,
item_id: ast::node_id) {
let item_ty = ty::node_id_to_type(tcx, item_id);
if !ty::is_instantiable(tcx, item_ty) {
tcx.sess.span_err(sp, fmt!{"this type cannot be instantiated \
tcx.sess.span_err(sp, fmt!("this type cannot be instantiated \
without an instance of itself; \
consider using `option<%s>`",
ty_to_str(tcx, item_ty)});
ty_to_str(tcx, item_ty)));
}
}
@ -2439,7 +2439,7 @@ fn ast_expr_vstore_to_vstore(fcx: @fn_ctxt, e: @ast::expr, n: uint,
ast::vstore_fixed(none) => ty::vstore_fixed(n),
ast::vstore_fixed(some(u)) => {
if n != u {
let s = fmt!{"fixed-size sequence mismatch: %u vs. %u",u, n};
let s = fmt!("fixed-size sequence mismatch: %u vs. %u",u, n);
fcx.ccx.tcx.sess.span_err(e.span,s);
}
ty::vstore_fixed(u)
@ -2475,8 +2475,8 @@ fn check_bounds_are_used(ccx: @crate_ctxt,
for tps_used.eachi |i, b| {
if !b {
ccx.tcx.sess.span_err(
span, fmt!{"type parameter `%s` is unused",
ccx.tcx.sess.str_of(tps[i].ident)});
span, fmt!("type parameter `%s` is unused",
ccx.tcx.sess.str_of(tps[i].ident)));
}
}
}
@ -2574,14 +2574,14 @@ fn check_intrinsic_type(ccx: @crate_ctxt, it: @ast::foreign_item) {
let i_ty = ty::lookup_item_type(ccx.tcx, local_def(it.id));
let i_n_tps = (*i_ty.bounds).len();
if i_n_tps != n_tps {
tcx.sess.span_err(it.span, fmt!{"intrinsic has wrong number \
tcx.sess.span_err(it.span, fmt!("intrinsic has wrong number \
of type parameters: found %u, \
expected %u", i_n_tps, n_tps});
expected %u", i_n_tps, n_tps));
} else {
require_same_types(
tcx, none, false, it.span, i_ty.ty, fty,
|| fmt!{"intrinsic has wrong type: \
|| fmt!("intrinsic has wrong type: \
expected `%s`",
ty_to_str(ccx.tcx, fty)});
ty_to_str(ccx.tcx, fty)));
}
}

View file

@ -101,12 +101,12 @@ fn check_pat_variant(pcx: pat_ctxt, pat: @ast::pat, path: @ast::path,
if arg_len > 0u {
// N-ary variant.
if arg_len != subpats_len {
let s = fmt!{"this pattern has %u field%s, but the \
let s = fmt!("this pattern has %u field%s, but the \
corresponding variant has %u field%s",
subpats_len,
if subpats_len == 1u { ~"" } else { ~"s" },
arg_len,
if arg_len == 1u { ~"" } else { ~"s" }};
if arg_len == 1u { ~"" } else { ~"s" });
tcx.sess.span_fatal(pat.span, s);
}
@ -117,18 +117,18 @@ fn check_pat_variant(pcx: pat_ctxt, pat: @ast::pat, path: @ast::path,
};
} else if subpats_len > 0u {
tcx.sess.span_fatal
(pat.span, fmt!{"this pattern has %u field%s, \
(pat.span, fmt!("this pattern has %u field%s, \
but the corresponding variant has no fields",
subpats_len,
if subpats_len == 1u { ~"" }
else { ~"s" }});
else { ~"s" }));
}
}
_ => {
tcx.sess.span_fatal
(pat.span,
fmt!{"mismatched types: expected enum but found `%s`",
fcx.infcx.ty_to_str(expected)});
fmt!("mismatched types: expected enum but found `%s`",
fcx.infcx.ty_to_str(expected)));
}
}
}
@ -154,8 +154,8 @@ fn check_pat(pcx: pat_ctxt, pat: @ast::pat, expected: ty::t) {
fcx.infcx.resolve_type_vars_if_possible(fcx.expr_ty(begin));
let e_ty =
fcx.infcx.resolve_type_vars_if_possible(fcx.expr_ty(end));
debug!{"pat_range beginning type: %?", b_ty};
debug!{"pat_range ending type: %?", e_ty};
debug!("pat_range beginning type: %?", b_ty);
debug!("pat_range ending type: %?", e_ty);
if !require_same_types(
tcx, some(fcx.infcx), false, pat.span, b_ty, e_ty,
|| ~"mismatched types in range") {
@ -240,18 +240,18 @@ fn check_pat(pcx: pat_ctxt, pat: @ast::pat, expected: ty::t) {
_ => {
tcx.sess.span_fatal
(pat.span,
fmt!{"mismatched types: expected `%s` but found record",
fcx.infcx.ty_to_str(expected)});
fmt!("mismatched types: expected `%s` but found record",
fcx.infcx.ty_to_str(expected)));
}
};
let f_count = vec::len(fields);
let ex_f_count = vec::len(ex_fields);
if ex_f_count < f_count || !etc && ex_f_count > f_count {
tcx.sess.span_fatal
(pat.span, fmt!{"mismatched types: expected a record \
(pat.span, fmt!("mismatched types: expected a record \
with %u fields, found one with %u \
fields",
ex_f_count, f_count});
ex_f_count, f_count));
}
for fields.each |f| {
@ -261,9 +261,9 @@ fn check_pat(pcx: pat_ctxt, pat: @ast::pat, expected: ty::t) {
}
none => {
tcx.sess.span_fatal(pat.span,
fmt!{"mismatched types: did not \
fmt!("mismatched types: did not \
expect a record with a field `%s`",
tcx.sess.str_of(f.ident)});
tcx.sess.str_of(f.ident)));
}
}
}
@ -363,16 +363,16 @@ fn check_pat(pcx: pat_ctxt, pat: @ast::pat, expected: ty::t) {
_ => {
tcx.sess.span_fatal
(pat.span,
fmt!{"mismatched types: expected `%s`, found tuple",
fcx.infcx.ty_to_str(expected)});
fmt!("mismatched types: expected `%s`, found tuple",
fcx.infcx.ty_to_str(expected)));
}
};
let e_count = vec::len(elts);
if e_count != vec::len(ex_elts) {
tcx.sess.span_fatal
(pat.span, fmt!{"mismatched types: expected a tuple \
(pat.span, fmt!("mismatched types: expected a tuple \
with %u fields, found one with %u \
fields", vec::len(ex_elts), e_count});
fields", vec::len(ex_elts), e_count));
}
let mut i = 0u;
for elts.each |elt| {

View file

@ -106,10 +106,10 @@ struct lookup {
// Entrypoint:
fn method() -> option<method_map_entry> {
debug!{"method lookup(m_name=%s, self_ty=%s, %?)",
debug!("method lookup(m_name=%s, self_ty=%s, %?)",
self.fcx.tcx().sess.str_of(self.m_name),
self.fcx.infcx.ty_to_str(self.self_ty),
ty::get(self.self_ty).struct};
ty::get(self.self_ty).struct);
// Determine if there are any inherent methods we can call.
// (An inherent method is one that belongs to no trait, but is
@ -122,15 +122,15 @@ struct lookup {
optional_inherent_methods = none;
}
some(base_type_def_id) => {
debug!{"(checking method) found base type"};
debug!("(checking method) found base type");
optional_inherent_methods =
self.fcx.ccx.coherence_info.inherent_methods.find
(base_type_def_id);
if optional_inherent_methods.is_none() {
debug!{"(checking method) ... no inherent methods found"};
debug!("(checking method) ... no inherent methods found");
} else {
debug!{"(checking method) ... inherent methods found"};
debug!("(checking method) ... inherent methods found");
}
}
}
@ -177,8 +177,8 @@ struct lookup {
// if we found anything, stop before trying borrows
if self.candidates.len() > 0u {
debug!{"(checking method) found at least one inherent \
method; giving up looking now"};
debug!("(checking method) found at least one inherent \
method; giving up looking now");
break;
}
@ -205,8 +205,8 @@ struct lookup {
// if we found anything, stop before attempting auto-deref.
if self.candidates.len() > 0u {
debug!{"(checking method) found at least one inherent \
method; giving up looking now"};
debug!("(checking method) found at least one inherent \
method; giving up looking now");
break;
}
@ -221,8 +221,8 @@ struct lookup {
}
if self.candidates.len() == 0u {
debug!{"(checking method) couldn't find any candidate methods; \
returning none"};
debug!("(checking method) couldn't find any candidate methods; \
returning none");
return none;
}
@ -261,30 +261,30 @@ struct lookup {
};
self.tcx().sess.span_note(
span,
fmt!{"candidate #%u is `%s`",
fmt!("candidate #%u is `%s`",
(idx+1u),
ty::item_path_str(self.tcx(), did)});
ty::item_path_str(self.tcx(), did)));
}
fn report_param_candidate(idx: uint, did: ast::def_id) {
self.tcx().sess.span_note(
self.expr.span,
fmt!{"candidate #%u derives from the bound `%s`",
fmt!("candidate #%u derives from the bound `%s`",
(idx+1u),
ty::item_path_str(self.tcx(), did)});
ty::item_path_str(self.tcx(), did)));
}
fn report_trait_candidate(idx: uint, did: ast::def_id) {
self.tcx().sess.span_note(
self.expr.span,
fmt!{"candidate #%u derives from the type of the receiver, \
fmt!("candidate #%u derives from the type of the receiver, \
which is the trait `%s`",
(idx+1u),
ty::item_path_str(self.tcx(), did)});
ty::item_path_str(self.tcx(), did)));
}
fn add_candidates_from_param(n: uint, did: ast::def_id) {
debug!{"add_candidates_from_param"};
debug!("add_candidates_from_param");
let tcx = self.tcx();
let mut trait_bnd_idx = 0u; // count only trait bounds
@ -334,7 +334,7 @@ struct lookup {
fn add_candidates_from_trait(did: ast::def_id, trait_substs: ty::substs) {
debug!{"add_candidates_from_trait"};
debug!("add_candidates_from_trait");
let ms = *ty::trait_methods(self.tcx(), did);
for ms.eachi |i, m| {
@ -369,7 +369,7 @@ struct lookup {
fn add_candidates_from_class(did: ast::def_id, class_substs: ty::substs) {
debug!{"add_candidates_from_class"};
debug!("add_candidates_from_class");
let ms = *ty::trait_methods(self.tcx(), did);
@ -469,7 +469,7 @@ struct lookup {
matches = self.fcx.can_mk_subty(ref_ty, impl_ty);
}
}
debug!{"matches = %?", matches};
debug!("matches = %?", matches);
match matches {
result::err(_) => { /* keep looking */ }
result::ok(_) => {
@ -528,15 +528,15 @@ struct lookup {
// Continue.
}
some(inherent_methods) => {
debug!{"(adding inherent and extension candidates) adding \
inherent candidates"};
debug!("(adding inherent and extension candidates) adding \
inherent candidates");
for inherent_methods.each |implementation| {
debug!{"(adding inherent and extension candidates) \
debug!("(adding inherent and extension candidates) \
adding candidates from impl: %s",
node_id_to_str(self.tcx().items,
implementation.did.node,
self.fcx.tcx().sess.parse_sess
.interner)};
.interner));
self.add_candidates_from_impl(implementation, mode);
}
}
@ -549,9 +549,9 @@ struct lookup {
}
some(trait_ids) => {
for (*trait_ids).each |trait_id| {
debug!{"(adding inherent and extension candidates) \
debug!("(adding inherent and extension candidates) \
trying trait: %s",
self.def_id_to_str(trait_id)};
self.def_id_to_str(trait_id));
let coherence_info = self.fcx.ccx.coherence_info;
match coherence_info.extension_methods.find(trait_id) {
@ -560,10 +560,10 @@ struct lookup {
}
some(extension_methods) => {
for extension_methods.each |implementation| {
debug!{"(adding inherent and extension \
debug!("(adding inherent and extension \
candidates) adding impl %s",
self.def_id_to_str
(implementation.did)};
(implementation.did));
self.add_candidates_from_impl(implementation, mode);
}
}
@ -586,10 +586,10 @@ struct lookup {
fn write_mty_from_candidate(cand: candidate) -> method_map_entry {
let tcx = self.fcx.ccx.tcx;
debug!{"write_mty_from_candidate(n_tps_m=%u, fty=%s, entry=%?)",
debug!("write_mty_from_candidate(n_tps_m=%u, fty=%s, entry=%?)",
cand.n_tps_m,
self.fcx.infcx.ty_to_str(cand.fty),
cand.entry};
cand.entry);
match cand.mode {
subtyping_mode | assignability_mode => {
@ -602,9 +602,9 @@ struct lookup {
result::err(_) => {
self.tcx().sess.span_bug(
self.expr.span,
fmt!{"%s was assignable to %s but now is not?",
fmt!("%s was assignable to %s but now is not?",
self.fcx.infcx.ty_to_str(cand.self_ty),
self.fcx.infcx.ty_to_str(cand.rcvr_ty)});
self.fcx.infcx.ty_to_str(cand.rcvr_ty)));
}
}
}

View file

@ -156,8 +156,8 @@ fn visit_block(b: ast::blk, &&rcx: @rcx, v: rvt) {
}
fn visit_expr(e: @ast::expr, &&rcx: @rcx, v: rvt) {
debug!{"visit_expr(e=%s)",
pprust::expr_to_str(e, rcx.fcx.tcx().sess.intr())};
debug!("visit_expr(e=%s)",
pprust::expr_to_str(e, rcx.fcx.tcx().sess.intr()));
match e.node {
ast::expr_path(*) => {
@ -257,8 +257,8 @@ fn visit_node(id: ast::node_id, span: span, rcx: @rcx) -> bool {
let tcx = fcx.ccx.tcx;
let encl_region = ty::encl_region(tcx, id);
debug!{"visit_node(ty=%s, id=%d, encl_region=%?)",
ty_to_str(tcx, ty), id, encl_region};
debug!("visit_node(ty=%s, id=%d, encl_region=%?)",
ty_to_str(tcx, ty), id, encl_region);
// Otherwise, look at the type and see if it is a region pointer.
return constrain_regions_in_type(rcx, encl_region, span, ty);
@ -316,8 +316,8 @@ fn constrain_regions_in_type(
region: ty::region) {
let tcx = rcx.fcx.ccx.tcx;
debug!{"constrain_region(encl_region=%?, region=%?)",
encl_region, region};
debug!("constrain_region(encl_region=%?, region=%?)",
encl_region, region);
match region {
ty::re_bound(_) => {

View file

@ -35,15 +35,15 @@ fn replace_bound_regions_in_fn_ty(
for self_ty.each |t| { vec::push(all_tys, t) }
debug!{"replace_bound_regions_in_fn_ty(self_info.self_ty=%?, fn_ty=%s, \
debug!("replace_bound_regions_in_fn_ty(self_info.self_ty=%?, fn_ty=%s, \
all_tys=%?)",
self_ty.map(|t| ty_to_str(tcx, t)),
ty_to_str(tcx, ty::mk_fn(tcx, *fn_ty)),
all_tys.map(|t| ty_to_str(tcx, t))};
all_tys.map(|t| ty_to_str(tcx, t)));
let _i = indenter();
let isr = do create_bound_region_mapping(tcx, isr, all_tys) |br| {
debug!{"br=%?", br};
debug!("br=%?", br);
mapf(br)
};
let ty_fn = ty::ty_fn(*fn_ty);
@ -52,10 +52,10 @@ fn replace_bound_regions_in_fn_ty(
});
let t_self = self_ty.map(|t| replace_bound_regions(tcx, isr, t));
debug!{"result of replace_bound_regions_in_fn_ty: self_info.self_ty=%?, \
debug!("result of replace_bound_regions_in_fn_ty: self_info.self_ty=%?, \
fn_ty=%s",
t_self.map(|t| ty_to_str(tcx, t)),
ty_to_str(tcx, t_fn)};
ty_to_str(tcx, t_fn));
// Glue updated self_ty back together with its original def_id.
@ -161,9 +161,9 @@ fn replace_bound_regions_in_fn_ty(
none if in_fn => r,
none => {
tcx.sess.bug(
fmt!{"Bound region not found in \
fmt!("Bound region not found in \
in_scope_regions list: %s",
region_to_str(tcx, r)});
region_to_str(tcx, r)));
}
}
}

View file

@ -83,8 +83,8 @@ fn lookup_vtable(fcx: @fn_ctxt,
-> vtable_origin
{
debug!{"lookup_vtable(ty=%s, trait_ty=%s)",
fcx.infcx.ty_to_str(ty), fcx.infcx.ty_to_str(trait_ty)};
debug!("lookup_vtable(ty=%s, trait_ty=%s)",
fcx.infcx.ty_to_str(ty), fcx.infcx.ty_to_str(trait_ty));
let _i = indenter();
let tcx = fcx.ccx.tcx;
@ -116,8 +116,8 @@ fn lookup_vtable(fcx: @fn_ctxt,
match check ty::get(ity).struct {
ty::ty_trait(idid, substs, _) => {
if trait_id == idid {
debug!{"(checking vtable) @0 relating ty to trait ty
with did %?", idid};
debug!("(checking vtable) @0 relating ty to trait ty
with did %?", idid);
relate_trait_tys(fcx, expr, trait_ty, ity);
return vtable_param(n, n_bound);
}
@ -130,8 +130,8 @@ fn lookup_vtable(fcx: @fn_ctxt,
}
ty::ty_trait(did, substs, _) if trait_id == did => {
debug!{"(checking vtable) @1 relating ty to trait ty with did %?",
did};
debug!("(checking vtable) @1 relating ty to trait ty with did %?",
did);
relate_trait_tys(fcx, expr, trait_ty, ty);
if !allow_unsafe && !is_early {
@ -258,9 +258,9 @@ fn fixup_ty(fcx: @fn_ctxt,
result::err(e) if !is_early => {
tcx.sess.span_fatal(
expr.span,
fmt!{"cannot determine a type \
fmt!("cannot determine a type \
for this bounded type parameter: %s",
fixup_err_to_str(e)})
fixup_err_to_str(e)))
}
result::err(e) => {
none
@ -275,8 +275,8 @@ fn connect_trait_tps(fcx: @fn_ctxt, expr: @ast::expr, impl_tys: ~[ty::t],
// XXX: This should work for multiple traits.
let ity = ty::impl_traits(tcx, impl_did)[0];
let trait_ty = ty::subst_tps(tcx, impl_tys, ity);
debug!{"(connect trait tps) trait type is %?, impl did is %?",
ty::get(trait_ty).struct, impl_did};
debug!("(connect trait tps) trait type is %?, impl did is %?",
ty::get(trait_ty).struct, impl_did);
match check ty::get(trait_ty).struct {
ty::ty_trait(_, substs, _) => {
vec::iter2(substs.tps, trait_tys,

View file

@ -16,9 +16,9 @@ fn resolve_type_vars_in_type(fcx: @fn_ctxt, sp: span, typ: ty::t) ->
if !fcx.ccx.tcx.sess.has_errors() {
fcx.ccx.tcx.sess.span_err(
sp,
fmt!{"cannot determine a type \
fmt!("cannot determine a type \
for this expression: %s",
infer::fixup_err_to_str(e)})
infer::fixup_err_to_str(e)))
}
return none;
}
@ -35,8 +35,8 @@ fn resolve_type_vars_for_node(wbcx: wb_ctxt, sp: span, id: ast::node_id)
}
some(t) => {
debug!{"resolve_type_vars_for_node(id=%d, n_ty=%s, t=%s)",
id, ty_to_str(tcx, n_ty), ty_to_str(tcx, t)};
debug!("resolve_type_vars_for_node(id=%d, n_ty=%s, t=%s)",
id, ty_to_str(tcx, n_ty), ty_to_str(tcx, t));
write_ty_to_tcx(tcx, id, t);
match fcx.opt_node_ty_substs(id) {
some(substs) => {
@ -116,11 +116,11 @@ fn visit_block(b: ast::blk, wbcx: wb_ctxt, v: wb_vt) {
fn visit_pat(p: @ast::pat, wbcx: wb_ctxt, v: wb_vt) {
if !wbcx.success { return; }
resolve_type_vars_for_node(wbcx, p.span, p.id);
debug!{"Type for pattern binding %s (id %d) resolved to %s",
debug!("Type for pattern binding %s (id %d) resolved to %s",
pat_to_str(p, wbcx.fcx.ccx.tcx.sess.intr()), p.id,
wbcx.fcx.infcx.ty_to_str(
ty::node_id_to_type(wbcx.fcx.ccx.tcx,
p.id))};
p.id)));
visit::visit_pat(p, wbcx, v);
}
fn visit_local(l: @ast::local, wbcx: wb_ctxt, v: wb_vt) {
@ -129,17 +129,17 @@ fn visit_local(l: @ast::local, wbcx: wb_ctxt, v: wb_vt) {
let var_ty = ty::mk_var(wbcx.fcx.tcx(), var_id);
match resolve_type(wbcx.fcx.infcx, var_ty, resolve_all | force_all) {
result::ok(lty) => {
debug!{"Type for local %s (id %d) resolved to %s",
debug!("Type for local %s (id %d) resolved to %s",
pat_to_str(l.node.pat, wbcx.fcx.ccx.tcx.sess.intr()),l.node.id,
wbcx.fcx.infcx.ty_to_str(lty)};
wbcx.fcx.infcx.ty_to_str(lty));
write_ty_to_tcx(wbcx.fcx.ccx.tcx, l.node.id, lty);
}
result::err(e) => {
wbcx.fcx.ccx.tcx.sess.span_err(
l.span,
fmt!{"cannot determine a type \
fmt!("cannot determine a type \
for this local variable: %s",
infer::fixup_err_to_str(e)});
infer::fixup_err_to_str(e)));
wbcx.success = false;
}
}

View file

@ -60,13 +60,13 @@ fn get_base_type(inference_context: infer_ctxt, span: span, original_type: t)
ty_uniq(base_mutability_and_type) |
ty_ptr(base_mutability_and_type) |
ty_rptr(_, base_mutability_and_type) => {
debug!{"(getting base type) recurring"};
debug!("(getting base type) recurring");
get_base_type(inference_context, span,
base_mutability_and_type.ty)
}
ty_enum(*) | ty_trait(*) | ty_class(*) => {
debug!{"(getting base type) found base type"};
debug!("(getting base type) found base type");
some(resolved_type)
}
@ -75,8 +75,8 @@ fn get_base_type(inference_context: infer_ctxt, span: span, original_type: t)
ty_fn(*) | ty_tup(*) | ty_var(*) | ty_var_integral(*) |
ty_param(*) | ty_self | ty_type | ty_opaque_box |
ty_opaque_closure_ptr(*) | ty_unboxed_vec(*) => {
debug!{"(getting base type) no base type; found %?",
get(original_type).struct};
debug!("(getting base type) no base type; found %?",
get(original_type).struct);
none
}
}
@ -173,9 +173,9 @@ struct CoherenceChecker {
match item.node {
item_trait(_, _, trait_methods) => {
for trait_methods.each |trait_method| {
debug!{"(building provided methods map) checking \
debug!("(building provided methods map) checking \
trait `%s` with id %d",
sess.str_of(item.ident), item.id};
sess.str_of(item.ident), item.id);
match trait_method {
required(_) => { /* fall through */}
@ -191,11 +191,11 @@ struct CoherenceChecker {
// provided_methods_map, we just
// need to add this method to
// that entry.
debug!{"(building provided \
debug!("(building provided \
methods map) adding \
method `%s` to entry for \
existing trait",
sess.str_of(mi.ident)};
sess.str_of(mi.ident));
let mut method_infos = mis;
push(method_infos, mi);
pmm.insert(item.id, method_infos);
@ -203,10 +203,10 @@ struct CoherenceChecker {
none => {
// If the trait doesn't have an
// entry yet, create one.
debug!{"(building provided \
debug!("(building provided \
methods map) creating new \
entry for method `%s`",
sess.str_of(mi.ident)};
sess.str_of(mi.ident));
pmm.insert(item.id, ~[mi]);
}
}
@ -229,8 +229,8 @@ struct CoherenceChecker {
// inherent methods and extension methods.
visit_crate(*crate, (), mk_simple_visitor(@{
visit_item: |item| {
debug!{"(checking coherence) item '%s'",
self.crate_context.tcx.sess.str_of(item.ident)};
debug!("(checking coherence) item '%s'",
self.crate_context.tcx.sess.str_of(item.ident));
match item.node {
item_impl(_, associated_traits, _, _) => {
@ -270,9 +270,9 @@ struct CoherenceChecker {
// base type.
if associated_traits.len() == 0 {
debug!{"(checking implementation) no associated traits for item \
debug!("(checking implementation) no associated traits for item \
'%s'",
self.crate_context.tcx.sess.str_of(item.ident)};
self.crate_context.tcx.sess.str_of(item.ident));
match get_base_type_def_id(self.inference_context,
item.span,
@ -293,12 +293,12 @@ struct CoherenceChecker {
for associated_traits.each |associated_trait| {
let trait_did =
self.trait_ref_to_trait_def_id(associated_trait);
debug!{"(checking implementation) adding impl for trait \
debug!("(checking implementation) adding impl for trait \
'%s', item '%s'",
ast_map::node_id_to_str(
self.crate_context.tcx.items, trait_did.node,
self.crate_context.tcx.sess.parse_sess.interner),
self.crate_context.tcx.sess.str_of(item.ident)};
self.crate_context.tcx.sess.str_of(item.ident));
let implementation = self.create_impl_from_item(item);
self.add_trait_method(trait_did, implementation);
@ -442,10 +442,10 @@ struct CoherenceChecker {
let privileged_types =
self.gather_privileged_types(module_.items);
for privileged_types.each |privileged_type| {
debug!{"(checking privileged scopes) entering \
debug!("(checking privileged scopes) entering \
privileged scope of %d:%d",
privileged_type.crate,
privileged_type.node};
privileged_type.node);
self.privileged_types.insert(privileged_type, ());
}
@ -586,9 +586,9 @@ struct CoherenceChecker {
}
if !method_inherent_to_impl {
debug!{
debug!(
"(creating impl) adding provided method `%s` to impl",
sess.str_of(provided_method.ident)};
sess.str_of(provided_method.ident));
push(methods, provided_method);
}
}
@ -616,14 +616,14 @@ struct CoherenceChecker {
match self.crate_context.provided_methods_map
.find(trait_did.node) {
none => {
debug!{"(creating impl) trait with node_id `%d` \
has no provided methods", trait_did.node};
debug!("(creating impl) trait with node_id `%d` \
has no provided methods", trait_did.node);
/* fall through */
}
some(all_provided)
=> {
debug!{"(creating impl) trait with node_id `%d` \
has provided methods", trait_did.node};
debug!("(creating impl) trait with node_id `%d` \
has provided methods", trait_did.node);
// Selectively add only those provided
// methods that aren't inherent to the
// trait.
@ -722,11 +722,11 @@ struct CoherenceChecker {
self_type.ty) {
none => {
let session = self.crate_context.tcx.sess;
session.bug(fmt!{
session.bug(fmt!(
"no base type for external impl \
with no trait: %s (type %s)!",
session.str_of(implementation.ident),
ty_to_str(self.crate_context.tcx,self_type.ty)});
ty_to_str(self.crate_context.tcx,self_type.ty)));
}
some(_) => {
// Nothing to do.

View file

@ -95,8 +95,8 @@ impl @crate_ctxt: ast_conv {
ty_of_foreign_item(self, foreign_item)
}
x => {
self.tcx.sess.bug(fmt!{"unexpected sort of item \
in get_item_ty(): %?", x});
self.tcx.sess.bug(fmt!("unexpected sort of item \
in get_item_ty(): %?", x));
}
}
}
@ -249,17 +249,17 @@ fn compare_impl_method(tcx: ty::ctxt, sp: span,
if impl_m.purity != trait_m.purity {
tcx.sess.span_err(
sp, fmt!{"method `%s`'s purity does \
sp, fmt!("method `%s`'s purity does \
not match the trait method's \
purity", tcx.sess.str_of(impl_m.ident)});
purity", tcx.sess.str_of(impl_m.ident)));
}
// is this check right?
if impl_m.self_ty != trait_m.self_ty {
tcx.sess.span_err(
sp, fmt!{"method `%s`'s self type does \
sp, fmt!("method `%s`'s self type does \
not match the trait method's \
self type", tcx.sess.str_of(impl_m.ident)});
self type", tcx.sess.str_of(impl_m.ident)));
}
if impl_m.tps != trait_m.tps {
@ -269,11 +269,11 @@ fn compare_impl_method(tcx: ty::ctxt, sp: span,
}
if vec::len(impl_m.fty.inputs) != vec::len(trait_m.fty.inputs) {
tcx.sess.span_err(sp,fmt!{"method `%s` has %u parameters \
tcx.sess.span_err(sp,fmt!("method `%s` has %u parameters \
but the trait has %u",
tcx.sess.str_of(trait_m.ident),
vec::len(impl_m.fty.inputs),
vec::len(trait_m.fty.inputs)});
vec::len(trait_m.fty.inputs)));
return;
}
@ -357,8 +357,8 @@ fn check_methods_against_trait(ccx: @crate_ctxt,
none => {
tcx.sess.span_err(
a_trait_ty.path.span,
fmt!{"missing method `%s`",
tcx.sess.str_of(trait_m.ident)});
fmt!("missing method `%s`",
tcx.sess.str_of(trait_m.ident)));
}
}
}
@ -413,8 +413,8 @@ fn convert_methods(ccx: @crate_ctxt,
fn convert(ccx: @crate_ctxt, it: @ast::item) {
let tcx = ccx.tcx;
let rp = tcx.region_paramd_items.find(it.id);
#debug["convert: item %s with id %d rp %?",
tcx.sess.str_of(it.ident), it.id, rp];
debug!("convert: item %s with id %d rp %?",
tcx.sess.str_of(it.ident), it.id, rp);
match it.node {
// These don't define types.
ast::item_foreign_mod(_) | ast::item_mod(_) => {}
@ -440,8 +440,8 @@ fn convert(ccx: @crate_ctxt, it: @ast::item) {
}
ast::item_trait(tps, _, trait_methods) => {
let tpt = ty_of_item(ccx, it);
debug!{"item_trait(it.id=%d, tpt.ty=%s)",
it.id, ty_to_str(tcx, tpt.ty)};
debug!("item_trait(it.id=%d, tpt.ty=%s)",
it.id, ty_to_str(tcx, tpt.ty));
write_ty_to_tcx(tcx, it.id, tpt.ty);
ensure_trait_methods(ccx, it.id, tpt.ty);
@ -621,8 +621,8 @@ fn ty_of_item(ccx: @crate_ctxt, it: @ast::item)
let tpt = {bounds: bounds,
region_param: none,
ty: ty::mk_fn(ccx.tcx, tofd)};
debug!{"type of %s (id %d) is %s",
tcx.sess.str_of(it.ident), it.id, ty_to_str(tcx, tpt.ty)};
debug!("type of %s (id %d) is %s",
tcx.sess.str_of(it.ident), it.id, ty_to_str(tcx, tpt.ty));
ccx.tcx.tcache.insert(local_def(it.id), tpt);
return tpt;
}

View file

@ -352,8 +352,8 @@ fn fixup_err_to_str(f: fixup_err) -> ~str {
cyclic_ty(_) => ~"cyclic type of infinite size",
unresolved_region(_) => ~"unconstrained region",
region_var_bound_by_region_var(r1, r2) => {
fmt!{"region var %? bound by another region var %?; this is \
a bug in rustc", r1, r2}
fmt!("region var %? bound by another region var %?; this is \
a bug in rustc", r1, r2)
}
}
}
@ -384,7 +384,7 @@ fn mk_sub(cx: infer_ctxt, a_is_expected: bool, span: span) -> Sub {
fn mk_subty(cx: infer_ctxt, a_is_expected: bool, span: span,
a: ty::t, b: ty::t) -> ures {
debug!{"mk_subty(%s <: %s)", a.to_str(cx), b.to_str(cx)};
debug!("mk_subty(%s <: %s)", a.to_str(cx), b.to_str(cx));
do indent {
do cx.commit {
mk_sub(cx, a_is_expected, span).tys(a, b)
@ -393,7 +393,7 @@ fn mk_subty(cx: infer_ctxt, a_is_expected: bool, span: span,
}
fn can_mk_subty(cx: infer_ctxt, a: ty::t, b: ty::t) -> ures {
debug!{"can_mk_subty(%s <: %s)", a.to_str(cx), b.to_str(cx)};
debug!("can_mk_subty(%s <: %s)", a.to_str(cx), b.to_str(cx));
do indent {
do cx.probe {
mk_sub(cx, true, ast_util::dummy_sp()).tys(a, b)
@ -403,7 +403,7 @@ fn can_mk_subty(cx: infer_ctxt, a: ty::t, b: ty::t) -> ures {
fn mk_subr(cx: infer_ctxt, a_is_expected: bool, span: span,
a: ty::region, b: ty::region) -> ures {
debug!{"mk_subr(%s <: %s)", a.to_str(cx), b.to_str(cx)};
debug!("mk_subr(%s <: %s)", a.to_str(cx), b.to_str(cx));
do indent {
do cx.commit {
mk_sub(cx, a_is_expected, span).regions(a, b)
@ -413,7 +413,7 @@ fn mk_subr(cx: infer_ctxt, a_is_expected: bool, span: span,
fn mk_eqty(cx: infer_ctxt, a_is_expected: bool, span: span,
a: ty::t, b: ty::t) -> ures {
debug!{"mk_eqty(%s <: %s)", a.to_str(cx), b.to_str(cx)};
debug!("mk_eqty(%s <: %s)", a.to_str(cx), b.to_str(cx));
do indent {
do cx.commit {
let suber = mk_sub(cx, a_is_expected, span);
@ -424,8 +424,8 @@ fn mk_eqty(cx: infer_ctxt, a_is_expected: bool, span: span,
fn mk_assignty(cx: infer_ctxt, anmnt: &assignment,
a: ty::t, b: ty::t) -> ures {
debug!{"mk_assignty(%? / %s <: %s)",
anmnt, a.to_str(cx), b.to_str(cx)};
debug!("mk_assignty(%? / %s <: %s)",
anmnt, a.to_str(cx), b.to_str(cx));
do indent {
do cx.commit {
cx.assign_tys(anmnt, a, b)
@ -435,8 +435,8 @@ fn mk_assignty(cx: infer_ctxt, anmnt: &assignment,
fn can_mk_assignty(cx: infer_ctxt, anmnt: &assignment,
a: ty::t, b: ty::t) -> ures {
debug!{"can_mk_assignty(%? / %s <: %s)",
anmnt, a.to_str(cx), b.to_str(cx)};
debug!("can_mk_assignty(%? / %s <: %s)",
anmnt, a.to_str(cx), b.to_str(cx));
// FIXME(#2593)---this will not unroll any entries we make in the
// borrowings table. But this is OK for the moment because this
@ -464,8 +464,8 @@ fn resolve_borrowings(cx: infer_ctxt) {
for cx.borrowings.each |item| {
match resolve_region(cx, item.scope, resolve_all|force_all) {
ok(region) => {
debug!{"borrowing for expr %d resolved to region %?, mutbl %?",
item.expr_id, region, item.mutbl};
debug!("borrowing for expr %d resolved to region %?, mutbl %?",
item.expr_id, region, item.mutbl);
cx.tcx.borrowings.insert(
item.expr_id, {region: region, mutbl: item.mutbl});
}
@ -474,7 +474,7 @@ fn resolve_borrowings(cx: infer_ctxt) {
let str = fixup_err_to_str(e);
cx.tcx.sess.span_err(
item.span,
fmt!{"could not resolve lifetime for borrow: %s", str});
fmt!("could not resolve lifetime for borrow: %s", str));
}
}
}
@ -573,7 +573,7 @@ impl infer_ctxt {
fn commit<T,E>(f: fn() -> result<T,E>) -> result<T,E> {
assert !self.in_snapshot();
debug!{"commit()"};
debug!("commit()");
do indent {
let r <- self.try(f);
@ -589,7 +589,7 @@ impl infer_ctxt {
/// Execute `f`, unroll bindings on failure
fn try<T,E>(f: fn() -> result<T,E>) -> result<T,E> {
debug!{"try()"};
debug!("try()");
do indent {
let snapshot = self.start_snapshot();
let r = f();
@ -603,7 +603,7 @@ impl infer_ctxt {
/// Execute `f` then unroll any bindings it creates
fn probe<T,E>(f: fn() -> result<T,E>) -> result<T,E> {
debug!{"probe()"};
debug!("probe()");
do indent {
let snapshot = self.start_snapshot();
let r = self.try(f);

View file

@ -63,8 +63,8 @@ impl infer_ctxt {
}
}
debug!{"assign_tys(anmnt=%?, %s -> %s)",
anmnt, a.to_str(self), b.to_str(self)};
debug!("assign_tys(anmnt=%?, %s -> %s)",
anmnt, a.to_str(self), b.to_str(self));
let _r = indenter();
match (ty::get(a).struct, ty::get(b).struct) {
@ -110,9 +110,9 @@ impl infer_ctxt {
a: ty::t, b: ty::t,
+a_bnd: option<ty::t>, +b_bnd: option<ty::t>) -> ures {
debug!{"assign_tys_or_sub(anmnt=%?, %s -> %s, %s -> %s)",
debug!("assign_tys_or_sub(anmnt=%?, %s -> %s, %s -> %s)",
anmnt, a.to_str(self), b.to_str(self),
a_bnd.to_str(self), b_bnd.to_str(self)};
a_bnd.to_str(self), b_bnd.to_str(self));
let _r = indenter();
fn is_borrowable(v: ty::vstore) -> bool {
@ -167,9 +167,9 @@ impl infer_ctxt {
m: ast::mutability,
r_b: ty::region) -> ures {
debug!{"crosspollinate(anmnt=%?, a=%s, nr_b=%s, r_b=%s)",
debug!("crosspollinate(anmnt=%?, a=%s, nr_b=%s, r_b=%s)",
anmnt, a.to_str(self), nr_b.to_str(self),
r_b.to_str(self)};
r_b.to_str(self));
do indent {
let sub = mk_sub(self, false, anmnt.span);
@ -179,12 +179,12 @@ impl infer_ctxt {
let r_a = self.next_region_var(anmnt.span,
anmnt.borrow_lb);
debug!{"anmnt=%?", anmnt};
debug!("anmnt=%?", anmnt);
do sub.contraregions(r_a, r_b).chain |_r| {
// if successful, add an entry indicating that
// borrowing occurred
debug!{"borrowing expression #%?, scope=%?, m=%?",
anmnt, r_a, m};
debug!("borrowing expression #%?, scope=%?, m=%?",
anmnt, r_a, m);
self.borrowings.push({expr_id: anmnt.expr_id,
span: anmnt.span,
scope: r_a,

View file

@ -99,9 +99,9 @@ fn eq_tys<C: combine>(self: &C, a: ty::t, b: ty::t) -> ures {
}
fn eq_regions<C: combine>(self: &C, a: ty::region, b: ty::region) -> ures {
debug!{"eq_regions(%s, %s)",
debug!("eq_regions(%s, %s)",
a.to_str(self.infcx()),
b.to_str(self.infcx())};
b.to_str(self.infcx()));
let sub = self.sub();
do indent {
self.infcx().try(|| {
@ -140,10 +140,10 @@ fn eq_opt_regions<C:combine>(
// consistently have a region parameter or not have a
// region parameter.
self.infcx().tcx.sess.bug(
fmt!{"substitution a had opt_region %s and \
fmt!("substitution a had opt_region %s and \
b had opt_region %s",
a.to_str(self.infcx()),
b.to_str(self.infcx())});
b.to_str(self.infcx())));
}
}
}
@ -346,10 +346,10 @@ fn super_tys<C:combine>(
(ty::ty_var(_), _) |
(_, ty::ty_var(_)) => {
tcx.sess.bug(
fmt!{"%s: bot and var types should have been handled (%s,%s)",
fmt!("%s: bot and var types should have been handled (%s,%s)",
self.tag(),
a.to_str(self.infcx()),
b.to_str(self.infcx())});
b.to_str(self.infcx())));
}
// Relate integral variables to other types

View file

@ -16,10 +16,10 @@ impl Glb: combine {
fn mts(a: ty::mt, b: ty::mt) -> cres<ty::mt> {
let tcx = self.infcx.tcx;
debug!{"%s.mts(%s, %s)",
debug!("%s.mts(%s, %s)",
self.tag(),
mt_to_str(tcx, a),
mt_to_str(tcx, b)};
mt_to_str(tcx, b));
match (a.mutbl, b.mutbl) {
// If one side or both is mut, then the GLB must use
@ -110,10 +110,10 @@ impl Glb: combine {
}
fn regions(a: ty::region, b: ty::region) -> cres<ty::region> {
debug!{"%s.regions(%?, %?)",
debug!("%s.regions(%?, %?)",
self.tag(),
a.to_str(self.infcx),
b.to_str(self.infcx)};
b.to_str(self.infcx));
do indent {
self.infcx.region_vars.glb_regions(self.span, a, b)

View file

@ -37,7 +37,7 @@ fn intersection(a: int_ty_set, b: int_ty_set) -> int_ty_set {
fn single_type_contained_in(tcx: ty::ctxt, a: int_ty_set) ->
option<ty::t> {
debug!{"single_type_contained_in(a=%s)", uint::to_str(*a, 10u)};
debug!("single_type_contained_in(a=%s)", uint::to_str(*a, 10u));
if *a == INT_TY_SET_i8 { return some(ty::mk_i8(tcx)); }
if *a == INT_TY_SET_u8 { return some(ty::mk_u8(tcx)); }

View file

@ -37,9 +37,9 @@ impl Glb: lattice_ops {
fn lattice_tys<L:lattice_ops combine>(
self: &L, a: ty::t, b: ty::t) -> cres<ty::t> {
debug!{"%s.lattice_tys(%s, %s)", self.tag(),
debug!("%s.lattice_tys(%s, %s)", self.tag(),
a.to_str(self.infcx()),
b.to_str(self.infcx())};
b.to_str(self.infcx()));
if a == b { return ok(a); }
do indent {
match (ty::get(a).struct, ty::get(b).struct) {
@ -84,10 +84,10 @@ fn lattice_vars<L:lattice_ops combine>(
let a_bounds = nde_a.possible_types;
let b_bounds = nde_b.possible_types;
debug!{"%s.lattice_vars(%s=%s <: %s=%s)",
debug!("%s.lattice_vars(%s=%s <: %s=%s)",
self.tag(),
a_vid.to_str(), a_bounds.to_str(self.infcx()),
b_vid.to_str(), b_bounds.to_str(self.infcx())};
b_vid.to_str(), b_bounds.to_str(self.infcx()));
if a_vid == b_vid {
return ok(a_t);
@ -123,21 +123,21 @@ fn lattice_var_and_t<L:lattice_ops combine>(
// The comments in this function are written for LUB, but they
// apply equally well to GLB if you inverse upper/lower/sub/super/etc.
debug!{"%s.lattice_var_and_t(%s=%s <: %s)",
debug!("%s.lattice_var_and_t(%s=%s <: %s)",
self.tag(),
a_id.to_str(), a_bounds.to_str(self.infcx()),
b.to_str(self.infcx())};
b.to_str(self.infcx()));
match self.bnd(a_bounds) {
some(a_bnd) => {
// If a has an upper bound, return the LUB(a.ub, b)
debug!{"bnd=some(%s)", a_bnd.to_str(self.infcx())};
debug!("bnd=some(%s)", a_bnd.to_str(self.infcx()));
return c_ts(a_bnd, b);
}
none => {
// If a does not have an upper bound, make b the upper bound of a
// and then return b.
debug!{"bnd=none"};
debug!("bnd=none");
let a_bounds = self.with_bnd(a_bounds, b);
do bnds(self, a_bounds.lb, a_bounds.ub).then {
self.infcx().set(vb, a_id, root(a_bounds, nde_a.rank));

View file

@ -19,10 +19,10 @@ impl Lub: combine {
fn mts(a: ty::mt, b: ty::mt) -> cres<ty::mt> {
let tcx = self.infcx.tcx;
debug!{"%s.mts(%s, %s)",
debug!("%s.mts(%s, %s)",
self.tag(),
mt_to_str(tcx, a),
mt_to_str(tcx, b)};
mt_to_str(tcx, b));
let m = if a.mutbl == b.mutbl {
a.mutbl
@ -93,10 +93,10 @@ impl Lub: combine {
}
fn regions(a: ty::region, b: ty::region) -> cres<ty::region> {
debug!{"%s.regions(%?, %?)",
debug!("%s.regions(%?, %?)",
self.tag(),
a.to_str(self.infcx),
b.to_str(self.infcx)};
b.to_str(self.infcx));
do indent {
self.infcx.region_vars.lub_regions(self.span, a, b)

View file

@ -77,9 +77,9 @@ impl resolve_state {
fn resolve_type_chk(typ: ty::t) -> fres<ty::t> {
self.err = none;
debug!{"Resolving %s (modes=%x)",
debug!("Resolving %s (modes=%x)",
ty_to_str(self.infcx.tcx, typ),
self.modes};
self.modes);
// n.b. This is a hokey mess because the current fold doesn't
// allow us to pass back errors in any useful way.
@ -89,9 +89,9 @@ impl resolve_state {
assert vec::is_empty(self.v_seen);
match self.err {
none => {
debug!{"Resolved to %s (modes=%x)",
debug!("Resolved to %s (modes=%x)",
ty_to_str(self.infcx.tcx, rty),
self.modes};
self.modes);
return ok(rty);
}
some(e) => return err(e)
@ -108,7 +108,7 @@ impl resolve_state {
}
fn resolve_type(typ: ty::t) -> ty::t {
debug!{"resolve_type(%s)", typ.to_str(self.infcx)};
debug!("resolve_type(%s)", typ.to_str(self.infcx));
indent(fn&() -> ty::t {
if !ty::type_needs_infer(typ) { return typ; }
@ -137,7 +137,7 @@ impl resolve_state {
}
fn resolve_nested_tvar(typ: ty::t) -> ty::t {
debug!{"Resolve_if_deep(%s)", typ.to_str(self.infcx)};
debug!("Resolve_if_deep(%s)", typ.to_str(self.infcx));
if !self.should(resolve_nested_tvar) {
typ
} else {
@ -146,7 +146,7 @@ impl resolve_state {
}
fn resolve_region(orig: ty::region) -> ty::region {
debug!{"Resolve_region(%s)", orig.to_str(self.infcx)};
debug!("Resolve_region(%s)", orig.to_str(self.infcx));
match orig {
ty::re_var(rid) => self.resolve_region_var(rid),
_ => orig

View file

@ -28,10 +28,10 @@ impl Sub: combine {
}
fn regions(a: ty::region, b: ty::region) -> cres<ty::region> {
debug!{"%s.regions(%s, %s)",
debug!("%s.regions(%s, %s)",
self.tag(),
a.to_str(self.infcx),
b.to_str(self.infcx)};
b.to_str(self.infcx));
do indent {
match self.infcx.region_vars.make_subregion(self.span, a, b) {
ok(()) => ok(a),
@ -41,7 +41,7 @@ impl Sub: combine {
}
fn mts(a: ty::mt, b: ty::mt) -> cres<ty::mt> {
debug!{"mts(%s <: %s)", a.to_str(self.infcx), b.to_str(self.infcx)};
debug!("mts(%s <: %s)", a.to_str(self.infcx), b.to_str(self.infcx));
if a.mutbl != b.mutbl && b.mutbl != m_const {
return err(ty::terr_mutability);
@ -97,8 +97,8 @@ impl Sub: combine {
}
fn tys(a: ty::t, b: ty::t) -> cres<ty::t> {
debug!{"%s.tys(%s, %s)", self.tag(),
a.to_str(self.infcx), b.to_str(self.infcx)};
debug!("%s.tys(%s, %s)", self.tag(),
a.to_str(self.infcx), b.to_str(self.infcx));
if a == b { return ok(a); }
do indent {
match (ty::get(a).struct, ty::get(b).struct) {
@ -143,9 +143,9 @@ impl Sub: combine {
// NDM--we should not be used dummy_sp() here, but
// rather passing in the span or something like that.
let rvar = self.infcx.next_region_var_nb(dummy_sp());
debug!{"Bound region %s maps to %s",
debug!("Bound region %s maps to %s",
bound_region_to_str(self.infcx.tcx, br),
region_to_str(self.infcx.tcx, rvar)};
region_to_str(self.infcx.tcx, rvar));
rvar
}
};

View file

@ -34,9 +34,9 @@ impl<V:copy to_str> bound<V>: to_str {
impl<T:copy to_str> bounds<T>: to_str {
fn to_str(cx: infer_ctxt) -> ~str {
fmt!{"{%s <: %s}",
fmt!("{%s <: %s}",
self.lb.to_str(cx),
self.ub.to_str(cx)}
self.ub.to_str(cx))
}
}
@ -51,9 +51,9 @@ impl int_ty_set: to_str {
impl<V:copy vid, T:copy to_str> var_value<V, T>: to_str {
fn to_str(cx: infer_ctxt) -> ~str {
match self {
redirect(vid) => fmt!{"redirect(%s)", vid.to_str()},
root(pt, rk) => fmt!{"root(%s, %s)", pt.to_str(cx),
uint::to_str(rk, 10u)}
redirect(vid) => fmt!("redirect(%s)", vid.to_str()),
root(pt, rk) => fmt!("root(%s, %s)", pt.to_str(cx),
uint::to_str(rk, 10u))
}
}
}

View file

@ -25,7 +25,7 @@ impl infer_ctxt {
let vid_u = vid.to_uint();
match vb.vals.find(vid_u) {
none => {
self.tcx.sess.bug(fmt!{"failed lookup of vid `%u`", vid_u});
self.tcx.sess.bug(fmt!("failed lookup of vid `%u`", vid_u));
}
some(var_val) => {
match var_val {
@ -53,8 +53,8 @@ impl infer_ctxt {
vec::push(vb.bindings, (vid, old_v));
vb.vals.insert(vid.to_uint(), new_v);
debug!{"Updating variable %s from %s to %s",
vid.to_str(), old_v.to_str(self), new_v.to_str(self)};
debug!("Updating variable %s from %s to %s",
vid.to_str(), old_v.to_str(self), new_v.to_str(self));
}
}
@ -87,15 +87,15 @@ fn merge_bnds<C: combine>(
let _r = indenter();
do merge_bnd(self, a.ub, b.ub, glb).chain |ub| {
debug!{"glb of ubs %s and %s is %s",
debug!("glb of ubs %s and %s is %s",
a.ub.to_str(self.infcx()),
b.ub.to_str(self.infcx()),
ub.to_str(self.infcx())};
ub.to_str(self.infcx()));
do merge_bnd(self, a.lb, b.lb, lub).chain |lb| {
debug!{"lub of lbs %s and %s is %s",
debug!("lub of lbs %s and %s is %s",
a.lb.to_str(self.infcx()),
b.lb.to_str(self.infcx()),
lb.to_str(self.infcx())};
lb.to_str(self.infcx()));
ok({lb: lb, ub: ub})
}
}
@ -134,10 +134,10 @@ fn set_var_to_merged_bounds<C: combine>(
// A \ / A
// B
debug!{"merge(%s,%s,%s)",
debug!("merge(%s,%s,%s)",
v_id.to_str(),
a.to_str(self.infcx()),
b.to_str(self.infcx())};
b.to_str(self.infcx()));
// First, relate the lower/upper bounds of A and B.
// Note that these relations *must* hold for us to
@ -153,9 +153,9 @@ fn set_var_to_merged_bounds<C: combine>(
do merge_bnd(self, a.lb, b.lb,
|x, y| self.lub().tys(x, y)).chain |lb| {
let bounds = {lb: lb, ub: ub};
debug!{"merge(%s): bounds=%s",
debug!("merge(%s): bounds=%s",
v_id.to_str(),
bounds.to_str(self.infcx())};
bounds.to_str(self.infcx()));
// the new bounds must themselves
// be relatable:
@ -186,9 +186,9 @@ fn var_sub_var<C: combine>(self: &C,
let a_bounds = nde_a.possible_types;
let b_bounds = nde_b.possible_types;
debug!{"vars(%s=%s <: %s=%s)",
debug!("vars(%s=%s <: %s=%s)",
a_id.to_str(), a_bounds.to_str(self.infcx()),
b_id.to_str(), b_bounds.to_str(self.infcx())};
b_id.to_str(), b_bounds.to_str(self.infcx()));
if a_id == b_id { return uok(); }
@ -214,20 +214,20 @@ fn var_sub_var<C: combine>(self: &C,
// Make the node with greater rank the parent of the node with
// smaller rank.
if nde_a.rank > nde_b.rank {
debug!{"vars(): a has smaller rank"};
debug!("vars(): a has smaller rank");
// a has greater rank, so a should become b's parent,
// i.e., b should redirect to a.
self.infcx().set(vb, b_id, redirect(a_id));
set_var_to_merged_bounds(
self, a_id, a_bounds, b_bounds, nde_a.rank)
} else if nde_a.rank < nde_b.rank {
debug!{"vars(): b has smaller rank"};
debug!("vars(): b has smaller rank");
// b has greater rank, so a should redirect to b.
self.infcx().set(vb, a_id, redirect(b_id));
set_var_to_merged_bounds(
self, b_id, a_bounds, b_bounds, nde_b.rank)
} else {
debug!{"vars(): a and b have equal rank"};
debug!("vars(): a and b have equal rank");
assert nde_a.rank == nde_b.rank;
// If equal, just redirect one to the other and increment
// the other's rank. We choose arbitrarily to redirect b
@ -247,10 +247,10 @@ fn var_sub_t<C: combine>(self: &C, a_id: ty::tv_vid, b: ty::t) -> ures {
let a_id = nde_a.root;
let a_bounds = nde_a.possible_types;
debug!{"var_sub_t(%s=%s <: %s)",
debug!("var_sub_t(%s=%s <: %s)",
a_id.to_str(),
a_bounds.to_str(self.infcx()),
b.to_str(self.infcx())};
b.to_str(self.infcx()));
let b_bounds = {lb: none, ub: some(b)};
set_var_to_merged_bounds(self, a_id, a_bounds, b_bounds, nde_a.rank)
}
@ -264,17 +264,17 @@ fn t_sub_var<C: combine>(self: &C, a: ty::t, b_id: ty::tv_vid) -> ures {
let b_id = nde_b.root;
let b_bounds = nde_b.possible_types;
debug!{"t_sub_var(%s <: %s=%s)",
debug!("t_sub_var(%s <: %s=%s)",
a.to_str(self.infcx()),
b_id.to_str(),
b_bounds.to_str(self.infcx())};
b_bounds.to_str(self.infcx()));
set_var_to_merged_bounds(self, b_id, a_bounds, b_bounds, nde_b.rank)
}
fn bnds<C: combine>(
self: &C, a: bound<ty::t>, b: bound<ty::t>) -> ures {
debug!{"bnds(%s <: %s)", a.to_str(self.infcx()), b.to_str(self.infcx())};
debug!("bnds(%s <: %s)", a.to_str(self.infcx()), b.to_str(self.infcx()));
do indent {
match (a, b) {
(none, none) |
@ -316,18 +316,18 @@ impl infer_ctxt {
// Rank optimization
if nde_a.rank > nde_b.rank {
debug!{"vars_integral(): a has smaller rank"};
debug!("vars_integral(): a has smaller rank");
// a has greater rank, so a should become b's parent,
// i.e., b should redirect to a.
self.set(vb, a_id, root(intersection, nde_a.rank));
self.set(vb, b_id, redirect(a_id));
} else if nde_a.rank < nde_b.rank {
debug!{"vars_integral(): b has smaller rank"};
debug!("vars_integral(): b has smaller rank");
// b has greater rank, so a should redirect to b.
self.set(vb, b_id, root(intersection, nde_b.rank));
self.set(vb, a_id, redirect(b_id));
} else {
debug!{"vars_integral(): a and b have equal rank"};
debug!("vars_integral(): a and b have equal rank");
assert nde_a.rank == nde_b.rank;
// If equal, just redirect one to the other and increment
// the other's rank. We choose arbitrarily to redirect b

View file

@ -8,20 +8,20 @@ import syntax::print;
fn indent<R>(op: fn() -> R) -> R {
// Use in conjunction with the log post-processor like `src/etc/indenter`
// to make debug output more readable.
debug!{">>"};
debug!(">>");
let r <- op();
debug!{"<< (Result = %?)", r};
debug!("<< (Result = %?)", r);
return r;
}
struct _indenter {
let _i: ();
new(_i: ()) { self._i = (); }
drop { debug!{"<<"}; }
drop { debug!("<<"); }
}
fn indenter() -> _indenter {
debug!{">>"};
debug!(">>");
_indenter(())
}

View file

@ -100,7 +100,7 @@ fn explain_region_and_span(cx: ctxt, region: ty::region)
-> (~str, option<span>)
{
let lo = codemap::lookup_char_pos_adj(cx.sess.codemap, span.lo);
(fmt!{"the %s at %u:%u", heading, lo.line, lo.col}, some(span))
(fmt!("the %s at %u:%u", heading, lo.line, lo.col), some(span))
}
}
@ -118,7 +118,7 @@ fn bound_region_to_str(cx: ctxt, br: bound_region) -> ~str {
// does not fail
br_cap_avoid(id, br) => {
if cx.sess.ppregions() {
fmt!{"br_cap_avoid(%?, %s)", id, bound_region_to_str(cx, *br)}
fmt!("br_cap_avoid(%?, %s)", id, bound_region_to_str(cx, *br))
} else {
bound_region_to_str(cx, *br)
}
@ -129,40 +129,40 @@ fn bound_region_to_str(cx: ctxt, br: bound_region) -> ~str {
fn re_scope_id_to_str(cx: ctxt, node_id: ast::node_id) -> ~str {
match cx.items.find(node_id) {
some(ast_map::node_block(blk)) => {
fmt!{"<block at %s>",
codemap::span_to_str(blk.span, cx.sess.codemap)}
fmt!("<block at %s>",
codemap::span_to_str(blk.span, cx.sess.codemap))
}
some(ast_map::node_expr(expr)) => {
match expr.node {
ast::expr_call(*) => {
fmt!{"<call at %s>",
codemap::span_to_str(expr.span, cx.sess.codemap)}
fmt!("<call at %s>",
codemap::span_to_str(expr.span, cx.sess.codemap))
}
ast::expr_match(*) => {
fmt!{"<alt at %s>",
codemap::span_to_str(expr.span, cx.sess.codemap)}
fmt!("<alt at %s>",
codemap::span_to_str(expr.span, cx.sess.codemap))
}
ast::expr_assign_op(*) |
ast::expr_field(*) |
ast::expr_unary(*) |
ast::expr_binary(*) |
ast::expr_index(*) => {
fmt!{"<method at %s>",
codemap::span_to_str(expr.span, cx.sess.codemap)}
fmt!("<method at %s>",
codemap::span_to_str(expr.span, cx.sess.codemap))
}
_ => {
fmt!{"<expression at %s>",
codemap::span_to_str(expr.span, cx.sess.codemap)}
fmt!("<expression at %s>",
codemap::span_to_str(expr.span, cx.sess.codemap))
}
}
}
none => {
fmt!{"<unknown-%d>", node_id}
fmt!("<unknown-%d>", node_id)
}
_ => { cx.sess.bug(
fmt!{"re_scope refers to %s",
fmt!("re_scope refers to %s",
ast_map::node_id_to_str(cx.items, node_id,
cx.sess.parse_sess.interner)}) }
cx.sess.parse_sess.interner))) }
}
}
@ -198,7 +198,7 @@ fn mt_to_str(cx: ctxt, m: mt) -> ~str {
fn vstore_to_str(cx: ctxt, vs: ty::vstore) -> ~str {
match vs {
ty::vstore_fixed(n) => fmt!{"%u", n},
ty::vstore_fixed(n) => fmt!("%u", n),
ty::vstore_uniq => ~"~",
ty::vstore_box => ~"@",
ty::vstore_slice(r) => region_to_str(cx, r)
@ -208,9 +208,9 @@ fn vstore_to_str(cx: ctxt, vs: ty::vstore) -> ~str {
fn vstore_ty_to_str(cx: ctxt, ty: ~str, vs: ty::vstore) -> ~str {
match vs {
ty::vstore_fixed(_) => {
fmt!{"%s/%s", ty, vstore_to_str(cx, vs)}
fmt!("%s/%s", ty, vstore_to_str(cx, vs))
}
_ => fmt!{"%s%s", vstore_to_str(cx, vs), ty}
_ => fmt!("%s%s", vstore_to_str(cx, vs), ty)
}
}
@ -347,7 +347,7 @@ fn ty_to_str(cx: ctxt, typ: t) -> ~str {
vstore_ty_to_str(cx, result, vs)
}
ty_evec(mt, vs) => {
vstore_ty_to_str(cx, fmt!{"[%s]", mt_to_str(cx, mt)}, vs)
vstore_ty_to_str(cx, fmt!("[%s]", mt_to_str(cx, mt)), vs)
}
ty_estr(vs) => vstore_ty_to_str(cx, ~"str", vs),
ty_opaque_box => ~"@?",
@ -365,15 +365,15 @@ fn parameterized(cx: ctxt,
let r_str = match self_r {
none => ~"",
some(r) => {
fmt!{"/%s", region_to_str(cx, r)}
fmt!("/%s", region_to_str(cx, r))
}
};
if vec::len(tps) > 0u {
let strs = vec::map(tps, |t| ty_to_str(cx, t) );
fmt!{"%s%s<%s>", base, r_str, str::connect(strs, ~",")}
fmt!("%s%s<%s>", base, r_str, str::connect(strs, ~","))
} else {
fmt!{"%s%s", base, r_str}
fmt!("%s%s", base, r_str)
}
}