Merge remote-tracking branch 'original/incoming' into incoming

Conflicts:
	src/libstd/json.rs
	src/libstd/sort.rs
This commit is contained in:
Simon BD 2012-10-03 21:47:09 -05:00
commit efcd2385ea
378 changed files with 11486 additions and 8500 deletions

View file

@ -38,6 +38,10 @@ macro_rules! interner_key (
(-3 as uint, 0u)))
)
// FIXME(#3534): Replace with the struct-based newtype when it's been
// implemented.
struct ident { repr: uint }
fn serialize_ident<S: Serializer>(s: S, i: ident) {
let intr = match unsafe{
task::local_data::local_data_get(interner_key!())
@ -59,7 +63,16 @@ fn deserialize_ident<D: Deserializer>(d: D) -> ident {
(*intr).intern(@d.read_str())
}
type ident = token::str_num;
impl ident: cmp::Eq {
pure fn eq(other: &ident) -> bool { self.repr == other.repr }
pure fn ne(other: &ident) -> bool { !self.eq(other) }
}
impl ident: to_bytes::IterBytes {
pure fn iter_bytes(+lsb0: bool, f: to_bytes::Cb) {
self.repr.iter_bytes(lsb0, f)
}
}
// Functions may or may not have names.
#[auto_serialize]
@ -315,7 +328,7 @@ enum binding_mode {
}
impl binding_mode : to_bytes::IterBytes {
pure fn iter_bytes(lsb0: bool, f: to_bytes::Cb) {
pure fn iter_bytes(+lsb0: bool, f: to_bytes::Cb) {
match self {
bind_by_value => 0u8.iter_bytes(lsb0, f),
@ -389,7 +402,7 @@ enum pat_ {
enum mutability { m_mutbl, m_imm, m_const, }
impl mutability : to_bytes::IterBytes {
pure fn iter_bytes(lsb0: bool, f: to_bytes::Cb) {
pure fn iter_bytes(+lsb0: bool, f: to_bytes::Cb) {
(self as u8).iter_bytes(lsb0, f)
}
}
@ -528,7 +541,7 @@ enum inferable<T> {
}
impl<T: to_bytes::IterBytes> inferable<T> : to_bytes::IterBytes {
pure fn iter_bytes(lsb0: bool, f: to_bytes::Cb) {
pure fn iter_bytes(+lsb0: bool, f: to_bytes::Cb) {
match self {
expl(ref t) =>
to_bytes::iter_bytes_2(&0u8, t, lsb0, f),
@ -564,7 +577,7 @@ impl<T:cmp::Eq> inferable<T> : cmp::Eq {
enum rmode { by_ref, by_val, by_mutbl_ref, by_move, by_copy }
impl rmode : to_bytes::IterBytes {
pure fn iter_bytes(lsb0: bool, f: to_bytes::Cb) {
pure fn iter_bytes(+lsb0: bool, f: to_bytes::Cb) {
(self as u8).iter_bytes(lsb0, f)
}
}
@ -924,7 +937,7 @@ enum trait_method {
enum int_ty { ty_i, ty_char, ty_i8, ty_i16, ty_i32, ty_i64, }
impl int_ty : to_bytes::IterBytes {
pure fn iter_bytes(lsb0: bool, f: to_bytes::Cb) {
pure fn iter_bytes(+lsb0: bool, f: to_bytes::Cb) {
(self as u8).iter_bytes(lsb0, f)
}
}
@ -953,7 +966,7 @@ impl int_ty : cmp::Eq {
enum uint_ty { ty_u, ty_u8, ty_u16, ty_u32, ty_u64, }
impl uint_ty : to_bytes::IterBytes {
pure fn iter_bytes(lsb0: bool, f: to_bytes::Cb) {
pure fn iter_bytes(+lsb0: bool, f: to_bytes::Cb) {
(self as u8).iter_bytes(lsb0, f)
}
}
@ -980,7 +993,7 @@ impl uint_ty : cmp::Eq {
enum float_ty { ty_f, ty_f32, ty_f64, }
impl float_ty : to_bytes::IterBytes {
pure fn iter_bytes(lsb0: bool, f: to_bytes::Cb) {
pure fn iter_bytes(+lsb0: bool, f: to_bytes::Cb) {
(self as u8).iter_bytes(lsb0, f)
}
}
@ -1081,15 +1094,15 @@ enum ty_ {
// since we only care about this for normalizing them to "real" types.
impl ty : cmp::Eq {
pure fn eq(other: &ty) -> bool {
ptr::addr_of(self) == ptr::addr_of((*other))
ptr::addr_of(&self) == ptr::addr_of(&(*other))
}
pure fn ne(other: &ty) -> bool {
ptr::addr_of(self) != ptr::addr_of((*other))
ptr::addr_of(&self) != ptr::addr_of(&(*other))
}
}
impl ty : to_bytes::IterBytes {
pure fn iter_bytes(lsb0: bool, f: to_bytes::Cb) {
pure fn iter_bytes(+lsb0: bool, f: to_bytes::Cb) {
to_bytes::iter_bytes_2(&self.span.lo, &self.span.hi, lsb0, f);
}
}
@ -1113,7 +1126,7 @@ enum purity {
}
impl purity : to_bytes::IterBytes {
pure fn iter_bytes(lsb0: bool, f: to_bytes::Cb) {
pure fn iter_bytes(+lsb0: bool, f: to_bytes::Cb) {
(self as u8).iter_bytes(lsb0, f)
}
}
@ -1133,7 +1146,7 @@ enum ret_style {
}
impl ret_style : to_bytes::IterBytes {
pure fn iter_bytes(lsb0: bool, f: to_bytes::Cb) {
pure fn iter_bytes(+lsb0: bool, f: to_bytes::Cb) {
(self as u8).iter_bytes(lsb0, f)
}
}
@ -1430,7 +1443,7 @@ enum item_ {
enum class_mutability { class_mutable, class_immutable }
impl class_mutability : to_bytes::IterBytes {
pure fn iter_bytes(lsb0: bool, f: to_bytes::Cb) {
pure fn iter_bytes(+lsb0: bool, f: to_bytes::Cb) {
(self as u8).iter_bytes(lsb0, f)
}
}
@ -1472,7 +1485,8 @@ type foreign_item =
attrs: ~[attribute],
node: foreign_item_,
id: node_id,
span: span};
span: span,
vis: visibility};
#[auto_serialize]
enum foreign_item_ {

View file

@ -34,7 +34,7 @@ impl path_elt : cmp::Eq {
type path = ~[path_elt];
/* FIXMEs that say "bad" are as per #2543 */
fn path_to_str_with_sep(p: path, sep: ~str, itr: ident_interner) -> ~str {
fn path_to_str_with_sep(p: path, sep: ~str, itr: @ident_interner) -> ~str {
let strs = do vec::map(p) |e| {
match *e {
path_mod(s) => *itr.get(s),
@ -44,7 +44,7 @@ fn path_to_str_with_sep(p: path, sep: ~str, itr: ident_interner) -> ~str {
str::connect(strs, sep)
}
fn path_ident_to_str(p: path, i: ident, itr: ident_interner) -> ~str {
fn path_ident_to_str(p: path, i: ident, itr: @ident_interner) -> ~str {
if vec::is_empty(p) {
//FIXME /* FIXME (#2543) */ copy *i
*itr.get(i)
@ -53,7 +53,7 @@ fn path_ident_to_str(p: path, i: ident, itr: ident_interner) -> ~str {
}
}
fn path_to_str(p: path, itr: ident_interner) -> ~str {
fn path_to_str(p: path, itr: @ident_interner) -> ~str {
path_to_str_with_sep(p, ~"::", itr)
}
@ -273,12 +273,12 @@ fn map_item(i: @item, cx: ctx, v: vt) {
}
match i.node {
item_mod(_) | item_foreign_mod(_) => {
vec::push(cx.path, path_mod(i.ident));
cx.path.push(path_mod(i.ident));
}
_ => vec::push(cx.path, path_name(i.ident))
_ => cx.path.push(path_name(i.ident))
}
visit::visit_item(i, cx, v);
vec::pop(cx.path);
cx.path.pop();
}
fn map_struct_def(struct_def: @ast::struct_def, parent_node: ast_node,
@ -326,7 +326,7 @@ fn map_stmt(stmt: @stmt, cx: ctx, v: vt) {
visit::visit_stmt(stmt, cx, v);
}
fn node_id_to_str(map: map, id: node_id, itr: ident_interner) -> ~str {
fn node_id_to_str(map: map, id: node_id, itr: @ident_interner) -> ~str {
match map.find(id) {
None => {
fmt!("unknown node (id=%d)", id)

View file

@ -23,7 +23,7 @@ pure fn dummy_sp() -> span { return mk_sp(0u, 0u); }
pure fn path_name_i(idents: ~[ident], intr: token::ident_interner) -> ~str {
pure fn path_name_i(idents: ~[ident], intr: @token::ident_interner) -> ~str {
// FIXME: Bad copies (#2543 -- same for everything else that says "bad")
str::connect(idents.map(|i| *intr.get(*i)), ~"::")
}
@ -254,7 +254,7 @@ pure fn is_call_expr(e: @expr) -> bool {
// This makes def_id hashable
impl def_id : core::to_bytes::IterBytes {
#[inline(always)]
pure fn iter_bytes(lsb0: bool, f: core::to_bytes::Cb) {
pure fn iter_bytes(+lsb0: bool, f: core::to_bytes::Cb) {
core::to_bytes::iter_bytes_2(&self.crate, &self.node, lsb0, f);
}
}
@ -275,14 +275,14 @@ fn ident_to_path(s: span, +i: ident) -> @path {
rp: None, types: ~[]}
}
pure fn is_unguarded(&&a: arm) -> bool {
pure fn is_unguarded(a: &arm) -> bool {
match a.guard {
None => true,
_ => false
}
}
pure fn unguarded_pat(a: arm) -> Option<~[@pat]> {
pure fn unguarded_pat(a: &arm) -> Option<~[@pat]> {
if is_unguarded(a) { Some(/* FIXME (#2543) */ copy a.pats) } else { None }
}
@ -313,8 +313,8 @@ fn split_trait_methods(trait_methods: ~[trait_method])
let mut reqd = ~[], provd = ~[];
for trait_methods.each |trt_method| {
match *trt_method {
required(tm) => vec::push(reqd, tm),
provided(m) => vec::push(provd, m)
required(tm) => reqd.push(tm),
provided(m) => provd.push(m)
}
};
(reqd, provd)
@ -398,10 +398,8 @@ fn operator_prec(op: ast::binop) -> uint {
fn dtor_dec() -> fn_decl {
let nil_t = @{id: 0, node: ty_nil, span: dummy_sp()};
// dtor has one argument, of type ()
{inputs: ~[{mode: ast::expl(ast::by_ref),
ty: nil_t, ident: parse::token::special_idents::underscore,
id: 0}],
// dtor has no args
{inputs: ~[],
output: nil_t, cf: return_val}
}

View file

@ -91,7 +91,7 @@ fn attr_meta(attr: ast::attribute) -> @ast::meta_item { @attr.node.value }
// Get the meta_items from inside a vector of attributes
fn attr_metas(attrs: ~[ast::attribute]) -> ~[@ast::meta_item] {
let mut mitems = ~[];
for attrs.each |a| { vec::push(mitems, attr_meta(*a)); }
for attrs.each |a| { mitems.push(attr_meta(*a)); }
return mitems;
}
@ -163,9 +163,9 @@ fn get_name_value_str_pair(item: @ast::meta_item) -> Option<(~str, ~str)> {
fn find_attrs_by_name(attrs: ~[ast::attribute], name: ~str) ->
~[ast::attribute] {
let filter = (
fn@(a: ast::attribute) -> Option<ast::attribute> {
if get_attr_name(a) == name {
option::Some(a)
fn@(a: &ast::attribute) -> Option<ast::attribute> {
if get_attr_name(*a) == name {
option::Some(*a)
} else { option::None }
}
);
@ -175,9 +175,9 @@ fn find_attrs_by_name(attrs: ~[ast::attribute], name: ~str) ->
/// Searcha list of meta items and return only those with a specific name
fn find_meta_items_by_name(metas: ~[@ast::meta_item], name: ~str) ->
~[@ast::meta_item] {
let filter = fn@(&&m: @ast::meta_item) -> Option<@ast::meta_item> {
if get_meta_item_name(m) == name {
option::Some(m)
let filter = fn@(m: &@ast::meta_item) -> Option<@ast::meta_item> {
if get_meta_item_name(*m) == name {
option::Some(*m)
} else { option::None }
};
return vec::filter_map(metas, filter);
@ -289,8 +289,8 @@ fn remove_meta_items_by_name(items: ~[@ast::meta_item], name: ~str) ->
~[@ast::meta_item] {
return vec::filter_map(items, |item| {
if get_meta_item_name(item) != name {
option::Some(/* FIXME (#2543) */ copy item)
if get_meta_item_name(*item) != name {
option::Some(/* FIXME (#2543) */ copy *item)
} else {
option::None
}

View file

@ -84,7 +84,7 @@ fn mk_substr_filename(cm: codemap, sp: span) -> ~str
}
fn next_line(file: filemap, chpos: uint, byte_pos: uint) {
vec::push(file.lines, {ch: chpos, byte: byte_pos + file.start_pos.byte});
file.lines.push({ch: chpos, byte: byte_pos + file.start_pos.byte});
}
type lookup_fn = pure fn(file_pos) -> uint;
@ -204,7 +204,7 @@ fn span_to_lines(sp: span, cm: codemap::codemap) -> @file_lines {
let hi = lookup_char_pos(cm, sp.hi);
let mut lines = ~[];
for uint::range(lo.line - 1u, hi.line as uint) |i| {
vec::push(lines, i);
lines.push(i);
};
return @{file: lo.file, lines: lines};
}

View file

@ -10,30 +10,30 @@ export ice_msg;
export expect;
type emitter = fn@(cmsp: Option<(codemap::codemap, span)>,
msg: ~str, lvl: level);
msg: &str, lvl: level);
trait span_handler {
fn span_fatal(sp: span, msg: ~str) -> !;
fn span_err(sp: span, msg: ~str);
fn span_warn(sp: span, msg: ~str);
fn span_note(sp: span, msg: ~str);
fn span_bug(sp: span, msg: ~str) -> !;
fn span_unimpl(sp: span, msg: ~str) -> !;
fn span_fatal(sp: span, msg: &str) -> !;
fn span_err(sp: span, msg: &str);
fn span_warn(sp: span, msg: &str);
fn span_note(sp: span, msg: &str);
fn span_bug(sp: span, msg: &str) -> !;
fn span_unimpl(sp: span, msg: &str) -> !;
fn handler() -> handler;
}
trait handler {
fn fatal(msg: ~str) -> !;
fn err(msg: ~str);
fn fatal(msg: &str) -> !;
fn err(msg: &str);
fn bump_err_count();
fn has_errors() -> bool;
fn abort_if_errors();
fn warn(msg: ~str);
fn note(msg: ~str);
fn bug(msg: ~str) -> !;
fn unimpl(msg: ~str) -> !;
fn emit(cmsp: Option<(codemap::codemap, span)>, msg: ~str, lvl: level);
fn warn(msg: &str);
fn note(msg: &str);
fn bug(msg: &str) -> !;
fn unimpl(msg: &str) -> !;
fn emit(cmsp: Option<(codemap::codemap, span)>, msg: &str, lvl: level);
}
type handler_t = @{
@ -47,24 +47,24 @@ type codemap_t = @{
};
impl codemap_t: span_handler {
fn span_fatal(sp: span, msg: ~str) -> ! {
fn span_fatal(sp: span, msg: &str) -> ! {
self.handler.emit(Some((self.cm, sp)), msg, fatal);
fail;
}
fn span_err(sp: span, msg: ~str) {
fn span_err(sp: span, msg: &str) {
self.handler.emit(Some((self.cm, sp)), msg, error);
self.handler.bump_err_count();
}
fn span_warn(sp: span, msg: ~str) {
fn span_warn(sp: span, msg: &str) {
self.handler.emit(Some((self.cm, sp)), msg, warning);
}
fn span_note(sp: span, msg: ~str) {
fn span_note(sp: span, msg: &str) {
self.handler.emit(Some((self.cm, sp)), msg, note);
}
fn span_bug(sp: span, msg: ~str) -> ! {
fn span_bug(sp: span, msg: &str) -> ! {
self.span_fatal(sp, ice_msg(msg));
}
fn span_unimpl(sp: span, msg: ~str) -> ! {
fn span_unimpl(sp: span, msg: &str) -> ! {
self.span_bug(sp, ~"unimplemented " + msg);
}
fn handler() -> handler {
@ -73,11 +73,11 @@ impl codemap_t: span_handler {
}
impl handler_t: handler {
fn fatal(msg: ~str) -> ! {
fn fatal(msg: &str) -> ! {
self.emit(None, msg, fatal);
fail;
}
fn err(msg: ~str) {
fn err(msg: &str) {
self.emit(None, msg, error);
self.bump_err_count();
}
@ -97,22 +97,22 @@ impl handler_t: handler {
}
self.fatal(s);
}
fn warn(msg: ~str) {
fn warn(msg: &str) {
self.emit(None, msg, warning);
}
fn note(msg: ~str) {
fn note(msg: &str) {
self.emit(None, msg, note);
}
fn bug(msg: ~str) -> ! {
fn bug(msg: &str) -> ! {
self.fatal(ice_msg(msg));
}
fn unimpl(msg: ~str) -> ! { self.bug(~"unimplemented " + msg); }
fn emit(cmsp: Option<(codemap::codemap, span)>, msg: ~str, lvl: level) {
fn unimpl(msg: &str) -> ! { self.bug(~"unimplemented " + msg); }
fn emit(cmsp: Option<(codemap::codemap, span)>, msg: &str, lvl: level) {
self.emit(cmsp, msg, lvl);
}
}
fn ice_msg(msg: ~str) -> ~str {
fn ice_msg(msg: &str) -> ~str {
fmt!("internal compiler error: %s", msg)
}
@ -126,17 +126,19 @@ fn mk_handler(emitter: Option<emitter>) -> handler {
Some(e) => e,
None => {
let f = fn@(cmsp: Option<(codemap::codemap, span)>,
msg: ~str, t: level) {
msg: &str, t: level) {
emit(cmsp, msg, t);
};
f
}
};
@{
mut err_count: 0u,
let x: handler_t = @{
mut err_count: 0,
emit: emit
} as handler
};
x as handler
}
enum level {
@ -171,7 +173,7 @@ fn diagnosticcolor(lvl: level) -> u8 {
}
}
fn print_diagnostic(topic: ~str, lvl: level, msg: ~str) {
fn print_diagnostic(topic: ~str, lvl: level, msg: &str) {
let use_color = term::color_supported() &&
io::stderr().get_type() == io::Screen;
if str::is_not_empty(topic) {
@ -188,7 +190,7 @@ fn print_diagnostic(topic: ~str, lvl: level, msg: ~str) {
}
fn emit(cmsp: Option<(codemap::codemap, span)>,
msg: ~str, lvl: level) {
msg: &str, lvl: level) {
match cmsp {
Some((cm, sp)) => {
let sp = codemap::adjust_span(cm,sp);
@ -262,7 +264,7 @@ fn highlight_lines(cm: codemap::codemap, sp: span,
fn print_macro_backtrace(cm: codemap::codemap, sp: span) {
do option::iter(&sp.expn_info) |ei| {
let ss = option::map_default(&ei.callie.span, @~"",
|span| @codemap::span_to_str(span, cm));
|span| @codemap::span_to_str(*span, cm));
print_diagnostic(*ss, note,
fmt!("in expansion of #%s", ei.callie.name));
let ss = codemap::span_to_str(ei.call_site, cm);

View file

@ -90,8 +90,8 @@ fn expand(cx: ext_ctxt,
span: span,
_mitem: ast::meta_item,
in_items: ~[@ast::item]) -> ~[@ast::item] {
fn not_auto_serialize(a: ast::attribute) -> bool {
attr::get_attr_name(a) != ~"auto_serialize"
fn not_auto_serialize(a: &ast::attribute) -> bool {
attr::get_attr_name(*a) != ~"auto_serialize"
}
fn filter_attrs(item: @ast::item) -> @ast::item {
@ -102,12 +102,12 @@ fn expand(cx: ext_ctxt,
do vec::flat_map(in_items) |in_item| {
match in_item.node {
ast::item_ty(ty, tps) => {
vec::append(~[filter_attrs(in_item)],
vec::append(~[filter_attrs(*in_item)],
ty_fns(cx, in_item.ident, ty, tps))
}
ast::item_enum(enum_definition, tps) => {
vec::append(~[filter_attrs(in_item)],
vec::append(~[filter_attrs(*in_item)],
enum_fns(cx, in_item.ident,
in_item.span, enum_definition.variants, tps))
}
@ -116,7 +116,7 @@ fn expand(cx: ext_ctxt,
cx.span_err(span, ~"#[auto_serialize] can only be \
applied to type and enum \
definitions");
~[in_item]
~[*in_item]
}
}
}
@ -566,7 +566,7 @@ fn mk_ser_fn(cx: ext_ctxt, span: span, name: ast::ident,
tp_inputs);
let tps_map = map::HashMap();
do vec::iter2(tps, tp_inputs) |tp, arg| {
for vec::each2(tps, tp_inputs) |tp, arg| {
let arg_ident = arg.ident;
tps_map.insert(
tp.ident,
@ -773,7 +773,7 @@ fn mk_deser_fn(cx: ext_ctxt, span: span,
tp_inputs);
let tps_map = map::HashMap();
do vec::iter2(tps, tp_inputs) |tp, arg| {
for vec::each2(tps, tp_inputs) |tp, arg| {
let arg_ident = arg.ident;
tps_map.insert(
tp.ident,

File diff suppressed because it is too large Load diff

View file

@ -73,7 +73,7 @@ fn syntax_expander_table() -> HashMap<~str, syntax_extension> {
fn builtin_item_tt(f: syntax_expander_tt_item_) -> syntax_extension {
item_tt({expander: f, span: None})
}
let syntax_expanders = HashMap::<~str,syntax_extension>();
let syntax_expanders = HashMap();
syntax_expanders.insert(~"macro",
macro_defining(ext::simplext::add_new_extension));
syntax_expanders.insert(~"macro_rules",
@ -82,6 +82,12 @@ fn syntax_expander_table() -> HashMap<~str, syntax_extension> {
syntax_expanders.insert(~"fmt", builtin(ext::fmt::expand_syntax_ext));
syntax_expanders.insert(~"auto_serialize",
item_decorator(ext::auto_serialize::expand));
syntax_expanders.insert(
~"auto_serialize2",
item_decorator(ext::auto_serialize2::expand_auto_serialize));
syntax_expanders.insert(
~"auto_deserialize2",
item_decorator(ext::auto_serialize2::expand_auto_deserialize));
syntax_expanders.insert(~"env", builtin(ext::env::expand_syntax_ext));
syntax_expanders.insert(~"concat_idents",
builtin(ext::concat_idents::expand_syntax_ext));
@ -131,12 +137,12 @@ trait ext_ctxt {
fn mod_path() -> ~[ast::ident];
fn bt_push(ei: codemap::expn_info_);
fn bt_pop();
fn span_fatal(sp: span, msg: ~str) -> !;
fn span_err(sp: span, msg: ~str);
fn span_warn(sp: span, msg: ~str);
fn span_unimpl(sp: span, msg: ~str) -> !;
fn span_bug(sp: span, msg: ~str) -> !;
fn bug(msg: ~str) -> !;
fn span_fatal(sp: span, msg: &str) -> !;
fn span_err(sp: span, msg: &str);
fn span_warn(sp: span, msg: &str);
fn span_unimpl(sp: span, msg: &str) -> !;
fn span_bug(sp: span, msg: &str) -> !;
fn bug(msg: &str) -> !;
fn next_id() -> ast::node_id;
pure fn trace_macros() -> bool;
fn set_trace_macros(x: bool);
@ -158,8 +164,8 @@ fn mk_ctxt(parse_sess: parse::parse_sess,
fn cfg() -> ast::crate_cfg { self.cfg }
fn print_backtrace() { }
fn backtrace() -> expn_info { self.backtrace }
fn mod_push(i: ast::ident) { vec::push(self.mod_path, i); }
fn mod_pop() { vec::pop(self.mod_path); }
fn mod_push(i: ast::ident) { self.mod_path.push(i); }
fn mod_pop() { self.mod_path.pop(); }
fn mod_path() -> ~[ast::ident] { return self.mod_path; }
fn bt_push(ei: codemap::expn_info_) {
match ei {
@ -180,27 +186,27 @@ fn mk_ctxt(parse_sess: parse::parse_sess,
_ => self.bug(~"tried to pop without a push")
}
}
fn span_fatal(sp: span, msg: ~str) -> ! {
fn span_fatal(sp: span, msg: &str) -> ! {
self.print_backtrace();
self.parse_sess.span_diagnostic.span_fatal(sp, msg);
}
fn span_err(sp: span, msg: ~str) {
fn span_err(sp: span, msg: &str) {
self.print_backtrace();
self.parse_sess.span_diagnostic.span_err(sp, msg);
}
fn span_warn(sp: span, msg: ~str) {
fn span_warn(sp: span, msg: &str) {
self.print_backtrace();
self.parse_sess.span_diagnostic.span_warn(sp, msg);
}
fn span_unimpl(sp: span, msg: ~str) -> ! {
fn span_unimpl(sp: span, msg: &str) -> ! {
self.print_backtrace();
self.parse_sess.span_diagnostic.span_unimpl(sp, msg);
}
fn span_bug(sp: span, msg: ~str) -> ! {
fn span_bug(sp: span, msg: &str) -> ! {
self.print_backtrace();
self.parse_sess.span_diagnostic.span_bug(sp, msg);
}
fn bug(msg: ~str) -> ! {
fn bug(msg: &str) -> ! {
self.print_backtrace();
self.parse_sess.span_diagnostic.handler().bug(msg);
}

View file

@ -50,6 +50,10 @@ fn mk_access(cx: ext_ctxt, sp: span, p: ~[ast::ident], m: ast::ident)
let pathexpr = mk_path(cx, sp, p);
return mk_access_(cx, sp, pathexpr, m);
}
fn mk_addr_of(cx: ext_ctxt, sp: span, e: @ast::expr) -> @ast::expr {
return mk_expr(cx, sp, ast::expr_addr_of(ast::m_imm, e));
}
fn mk_call_(cx: ext_ctxt, sp: span, fn_expr: @ast::expr,
args: ~[@ast::expr]) -> @ast::expr {
mk_expr(cx, sp, ast::expr_call(fn_expr, args, false))
@ -96,7 +100,7 @@ fn mk_rec_e(cx: ext_ctxt, sp: span,
let val = field.ex;
let astfield =
{node: {mutbl: ast::m_imm, ident: ident, expr: val}, span: sp};
vec::push(astfields, astfield);
astfields.push(astfield);
}
let recexpr = ast::expr_rec(astfields, option::None::<@ast::expr>);
mk_expr(cx, sp, recexpr)

View file

@ -144,7 +144,7 @@ fn expand_mod_items(exts: HashMap<~str, syntax_extension>, cx: ext_ctxt,
// decorated with "item decorators", then use that function to transform
// the item into a new set of items.
let new_items = do vec::flat_map(module_.items) |item| {
do vec::foldr(item.attrs, ~[item]) |attr, items| {
do vec::foldr(item.attrs, ~[*item]) |attr, items| {
let mname = match attr.node.value.node {
ast::meta_word(n) => n,
ast::meta_name_value(n, _) => n,
@ -160,7 +160,7 @@ fn expand_mod_items(exts: HashMap<~str, syntax_extension>, cx: ext_ctxt,
}
};
return {items: new_items,.. module_};
return {items: new_items, ..module_};
}

View file

@ -20,10 +20,10 @@ fn expand_syntax_ext(cx: ext_ctxt, sp: span, arg: ast::mac_arg,
let fmtspan = args[0].span;
debug!("Format string:");
log(debug, fmt);
fn parse_fmt_err_(cx: ext_ctxt, sp: span, msg: ~str) -> ! {
fn parse_fmt_err_(cx: ext_ctxt, sp: span, msg: &str) -> ! {
cx.span_fatal(sp, msg);
}
let parse_fmt_err = fn@(s: ~str) -> ! {
let parse_fmt_err = fn@(s: &str) -> ! {
parse_fmt_err_(cx, fmtspan, s)
};
let pieces = parse_fmt_string(fmt, parse_fmt_err);
@ -187,7 +187,8 @@ fn pieces_to_expr(cx: ext_ctxt, sp: span,
TyFloat => {
return make_conv_call(cx, arg.span, ~"float", cnv, arg);
}
TyPoly => return make_conv_call(cx, arg.span, ~"poly", cnv, arg)
TyPoly => return make_conv_call(cx, arg.span, ~"poly", cnv,
mk_addr_of(cx, sp, arg))
}
}
fn log_conv(c: Conv) {
@ -245,7 +246,7 @@ fn pieces_to_expr(cx: ext_ctxt, sp: span,
for pieces.each |pc| {
match *pc {
PieceString(s) => {
vec::push(piece_exprs, mk_uniq_str(cx, fmt_sp, s))
piece_exprs.push(mk_uniq_str(cx, fmt_sp, s))
}
PieceConv(conv) => {
n += 1u;
@ -258,7 +259,7 @@ fn pieces_to_expr(cx: ext_ctxt, sp: span,
log_conv(conv);
let arg_expr = args[n];
let c_expr = make_new_conv(cx, fmt_sp, conv, arg_expr);
vec::push(piece_exprs, c_expr);
piece_exprs.push(c_expr);
}
}
}

View file

@ -65,7 +65,7 @@ fn analyze(proto: protocol, _cx: ext_ctxt) {
let mut self_live = ~[];
for colive.eachi |i, bv| {
if bv.get(i) {
vec::push(self_live, proto.get_state_by_id(i))
self_live.push(proto.get_state_by_id(i))
}
}

View file

@ -47,7 +47,7 @@ impl message: gen_send {
let arg_names = tys.mapi(|i, _ty| cx.ident_of(~"x_"+i.to_str()));
let args_ast = (arg_names, tys).map(
|n, t| cx.arg_mode(n, t, ast::by_copy)
|n, t| cx.arg_mode(*n, *t, ast::by_copy)
);
let pipe_ty = cx.ty_path_ast_builder(
@ -71,10 +71,10 @@ impl message: gen_send {
body += ~"let b = pipe.reuse_buffer();\n";
body += fmt!("let %s = pipes::SendPacketBuffered(\
ptr::addr_of(b.buffer.data.%s));\n",
ptr::addr_of(&(b.buffer.data.%s)));\n",
sp, next.name);
body += fmt!("let %s = pipes::RecvPacketBuffered(\
ptr::addr_of(b.buffer.data.%s));\n",
ptr::addr_of(&(b.buffer.data.%s)));\n",
rp, next.name);
}
else {
@ -129,7 +129,7 @@ impl message: gen_send {
let arg_names = tys.mapi(|i, _ty| (~"x_" + i.to_str()));
let args_ast = (arg_names, tys).map(
|n, t| cx.arg_mode(cx.ident_of(n), t, ast::by_copy)
|n, t| cx.arg_mode(cx.ident_of(*n), *t, ast::by_copy)
);
let args_ast = vec::append(
@ -226,7 +226,7 @@ impl state: to_type_decls {
let v = cx.variant(cx.ident_of(name), span, tys);
vec::push(items_msg, v);
items_msg.push(v);
}
~[cx.item_enum_poly(name,
@ -245,44 +245,44 @@ impl state: to_type_decls {
let mut items = ~[];
for self.messages.each |m| {
if dir == send {
vec::push(items, m.gen_send(cx, true));
vec::push(items, m.gen_send(cx, false));
items.push(m.gen_send(cx, true));
items.push(m.gen_send(cx, false));
}
}
if !self.proto.is_bounded() {
vec::push(items,
cx.item_ty_poly(
self.data_name(),
self.span,
cx.ty_path_ast_builder(
path(~[cx.ident_of(~"pipes"),
cx.ident_of(dir.to_str() + ~"Packet")],
empty_span())
.add_ty(cx.ty_path_ast_builder(
path(~[cx.ident_of(self.proto.name),
self.data_name()],
empty_span())
.add_tys(cx.ty_vars(self.ty_params))))),
self.ty_params));
items.push(
cx.item_ty_poly(
self.data_name(),
self.span,
cx.ty_path_ast_builder(
path(~[cx.ident_of(~"pipes"),
cx.ident_of(dir.to_str() + ~"Packet")],
empty_span())
.add_ty(cx.ty_path_ast_builder(
path(~[cx.ident_of(self.proto.name),
self.data_name()],
empty_span())
.add_tys(cx.ty_vars(self.ty_params))))),
self.ty_params));
}
else {
vec::push(items,
cx.item_ty_poly(
self.data_name(),
self.span,
cx.ty_path_ast_builder(
path(~[cx.ident_of(~"pipes"),
cx.ident_of(dir.to_str()
+ ~"PacketBuffered")],
empty_span())
.add_tys(~[cx.ty_path_ast_builder(
path(~[cx.ident_of(self.proto.name),
self.data_name()],
empty_span())
.add_tys(cx.ty_vars(self.ty_params))),
self.proto.buffer_ty_path(cx)])),
self.ty_params));
items.push(
cx.item_ty_poly(
self.data_name(),
self.span,
cx.ty_path_ast_builder(
path(~[cx.ident_of(~"pipes"),
cx.ident_of(dir.to_str()
+ ~"PacketBuffered")],
empty_span())
.add_tys(~[cx.ty_path_ast_builder(
path(~[cx.ident_of(self.proto.name),
self.data_name()],
empty_span())
.add_tys(cx.ty_vars(self.ty_params))),
self.proto.buffer_ty_path(cx)])),
self.ty_params));
};
items
}
@ -351,7 +351,7 @@ impl protocol: gen_init {
fmt!("data.%s.set_buffer_(buffer)",
s.name))),
ext_cx.parse_expr(
fmt!("ptr::addr_of(data.%s)",
fmt!("ptr::addr_of(&(data.%s))",
self.states[0].name))));
#ast {{
@ -367,7 +367,7 @@ impl protocol: gen_init {
for (copy self.states).each |s| {
for s.ty_params.each |tp| {
match params.find(|tpp| tp.ident == tpp.ident) {
None => vec::push(params, *tp),
None => params.push(*tp),
_ => ()
}
}
@ -383,7 +383,7 @@ impl protocol: gen_init {
let fields = do (copy self.states).map_to_vec |s| {
for s.ty_params.each |tp| {
match params.find(|tpp| tp.ident == tpp.ident) {
None => vec::push(params, *tp),
None => params.push(*tp),
_ => ()
}
}
@ -415,17 +415,15 @@ impl protocol: gen_init {
}
if self.is_bounded() {
vec::push(items, self.gen_buffer_type(cx))
items.push(self.gen_buffer_type(cx))
}
vec::push(items,
cx.item_mod(cx.ident_of(~"client"),
self.span,
client_states));
vec::push(items,
cx.item_mod(cx.ident_of(~"server"),
self.span,
server_states));
items.push(cx.item_mod(cx.ident_of(~"client"),
self.span,
client_states));
items.push(cx.item_mod(cx.ident_of(~"server"),
self.span,
server_states));
cx.item_mod(cx.ident_of(self.name), self.span, items)
}

View file

@ -210,10 +210,10 @@ fn visit<Tproto, Tstate, Tmessage, V: visitor<Tproto, Tstate, Tmessage>>(
// the copy keywords prevent recursive use of dvec
let states = do (copy proto.states).map_to_vec |s| {
let messages = do (copy s.messages).map_to_vec |m| {
let message(name, span, tys, this, next) = *m;
let message(name, span, tys, this, next) = m;
visitor.visit_message(name, span, tys, this, next)
};
visitor.visit_state(*s, messages)
visitor.visit_state(s, messages)
};
visitor.visit_proto(proto, states)
}

View file

@ -94,7 +94,7 @@ fn option_flatten_map<T: Copy, U: Copy>(f: fn@(T) -> Option<U>, v: ~[T]) ->
for v.each |elem| {
match f(*elem) {
None => return None,
Some(fv) => vec::push(res, fv)
Some(fv) => res.push(fv)
}
}
return Some(res);
@ -237,7 +237,7 @@ fn follow_for_trans(cx: ext_ctxt, mmaybe: Option<arb_depth<matchable>>,
/* helper for transcribe_exprs: what vars from `b` occur in `e`? */
fn free_vars(b: bindings, e: @expr, it: fn(ident)) {
let idents: HashMap<ident, ()> = HashMap();
let idents = HashMap();
fn mark_ident(&&i: ident, _fld: ast_fold, b: bindings,
idents: HashMap<ident, ()>) -> ident {
if b.contains_key(i) { idents.insert(i, ()); }
@ -305,9 +305,9 @@ fn transcribe_exprs(cx: ext_ctxt, b: bindings, idx_path: @mut ~[uint],
/* Whew, we now know how how many times to repeat */
let mut idx: uint = 0u;
while idx < rc {
vec::push(*idx_path, idx);
vec::push(res, recur(repeat_me)); // whew!
vec::pop(*idx_path);
idx_path.push(idx);
res.push(recur(repeat_me)); // whew!
idx_path.pop();
idx += 1u;
}
}
@ -567,7 +567,7 @@ fn p_t_s_r_ellipses(cx: ext_ctxt, repeat_me: @expr, offset: uint, s: selector,
let mut elts = ~[];
let mut idx = offset;
while idx < vec::len(arg_elts) {
vec::push(elts, leaf(match_expr(arg_elts[idx])));
elts.push(leaf(match_expr(arg_elts[idx])));
idx += 1u;
}
@ -672,9 +672,8 @@ fn add_new_extension(cx: ext_ctxt, sp: span, arg: ast::mac_arg,
None => cx.span_fatal(mac.span,
~"macro must have arguments")
};
vec::push(clauses,
@{params: pattern_to_selectors(cx, arg),
body: elts[1u]});
clauses.push(@{params: pattern_to_selectors(cx, arg),
body: elts[1u]});
// FIXME (#2251): check duplicates (or just simplify
// the macro arg situation)

View file

@ -185,7 +185,7 @@ fn nameize(p_s: parse_sess, ms: ~[matcher], res: ~[@named_match])
}
}
}
let ret_val = HashMap::<uint,@named_match>();
let ret_val = HashMap();
for ms.each() |m| { n_rec(p_s, *m, res, ret_val) }
return ret_val;
}
@ -208,7 +208,7 @@ fn parse_or_else(sess: parse_sess, cfg: ast::crate_cfg, rdr: reader,
fn parse(sess: parse_sess, cfg: ast::crate_cfg, rdr: reader, ms: ~[matcher])
-> parse_result {
let mut cur_eis = ~[];
vec::push(cur_eis, initial_matcher_pos(ms, None, rdr.peek().sp.lo));
cur_eis.push(initial_matcher_pos(ms, None, rdr.peek().sp.lo));
loop {
let mut bb_eis = ~[]; // black-box parsed by parser.rs
@ -219,7 +219,7 @@ fn parse(sess: parse_sess, cfg: ast::crate_cfg, rdr: reader, ms: ~[matcher])
/* we append new items to this while we go */
while cur_eis.len() > 0u { /* for each Earley Item */
let mut ei = vec::pop(cur_eis);
let mut ei = cur_eis.pop();
let idx = ei.idx;
let len = ei.elts.len();
@ -256,7 +256,7 @@ fn parse(sess: parse_sess, cfg: ast::crate_cfg, rdr: reader, ms: ~[matcher])
}
new_pos.idx += 1;
vec::push(cur_eis, move new_pos);
cur_eis.push(move new_pos);
}
// can we go around again?
@ -267,17 +267,17 @@ fn parse(sess: parse_sess, cfg: ast::crate_cfg, rdr: reader, ms: ~[matcher])
if tok == t { //pass the separator
let ei_t <- ei;
ei_t.idx += 1;
vec::push(next_eis, move ei_t);
next_eis.push(move ei_t);
}
}
_ => { // we don't need a separator
let ei_t <- ei;
ei_t.idx = 0;
vec::push(cur_eis, move ei_t);
cur_eis.push(move ei_t);
}
}
} else {
vec::push(eof_eis, move ei);
eof_eis.push(move ei);
}
} else {
match copy ei.elts[idx].node {
@ -292,13 +292,13 @@ fn parse(sess: parse_sess, cfg: ast::crate_cfg, rdr: reader, ms: ~[matcher])
new_ei.matches[idx].push(@matched_seq(~[], sp));
}
vec::push(cur_eis, move new_ei);
cur_eis.push(move new_ei);
}
let matches = vec::map(ei.matches, // fresh, same size:
|_m| DVec::<@named_match>());
let ei_t <- ei;
vec::push(cur_eis, ~{
cur_eis.push(~{
elts: matchers, sep: sep, mut idx: 0u,
mut up: matcher_pos_up(Some(move ei_t)),
matches: move matches,
@ -306,12 +306,12 @@ fn parse(sess: parse_sess, cfg: ast::crate_cfg, rdr: reader, ms: ~[matcher])
sp_lo: sp.lo
});
}
match_nonterminal(_,_,_) => { vec::push(bb_eis, move ei) }
match_nonterminal(_,_,_) => { bb_eis.push(move ei) }
match_tok(t) => {
let ei_t <- ei;
if t == tok {
ei_t.idx += 1;
vec::push(next_eis, move ei_t);
next_eis.push(move ei_t);
}
}
}
@ -323,7 +323,7 @@ fn parse(sess: parse_sess, cfg: ast::crate_cfg, rdr: reader, ms: ~[matcher])
if eof_eis.len() == 1u {
return success(
nameize(sess, ms,
vec::map(eof_eis[0u].matches, |dv| dv.pop())));
eof_eis[0u].matches.map(|dv| dv.pop())));
} else if eof_eis.len() > 1u {
return error(sp, ~"Ambiguity: multiple successful parses");
} else {
@ -350,13 +350,13 @@ fn parse(sess: parse_sess, cfg: ast::crate_cfg, rdr: reader, ms: ~[matcher])
} else if (next_eis.len() > 0u) {
/* Now process the next token */
while(next_eis.len() > 0u) {
vec::push(cur_eis, vec::pop(next_eis));
cur_eis.push(next_eis.pop());
}
rdr.next_token();
} else /* bb_eis.len() == 1 */ {
let rust_parser = parser(sess, cfg, rdr.dup(), SOURCE_FILE);
let ei = vec::pop(bb_eis);
let ei = bb_eis.pop();
match ei.elts[ei.idx].node {
match_nonterminal(_, name, idx) => {
ei.matches[idx].push(@matched_nonterminal(
@ -365,7 +365,7 @@ fn parse(sess: parse_sess, cfg: ast::crate_cfg, rdr: reader, ms: ~[matcher])
}
_ => fail
}
vec::push(cur_eis, move ei);
cur_eis.push(move ei);
/* this would fail if zero-length tokens existed */
while rdr.peek().sp.lo < rust_parser.span.lo {

View file

@ -25,7 +25,7 @@ type tt_frame = @{
type tt_reader = @{
sp_diag: span_handler,
interner: ident_interner,
interner: @ident_interner,
mut cur: tt_frame,
/* for MBE-style macro transcription */
interpolations: std::map::HashMap<ident, @named_match>,
@ -39,7 +39,7 @@ type tt_reader = @{
/** This can do Macro-By-Example transcription. On the other hand, if
* `src` contains no `tt_seq`s and `tt_nonterminal`s, `interp` can (and
* should) be none. */
fn new_tt_reader(sp_diag: span_handler, itr: ident_interner,
fn new_tt_reader(sp_diag: span_handler, itr: @ident_interner,
interp: Option<std::map::HashMap<ident,@named_match>>,
src: ~[ast::token_tree])
-> tt_reader {
@ -47,7 +47,7 @@ fn new_tt_reader(sp_diag: span_handler, itr: ident_interner,
mut cur: @{readme: src, mut idx: 0u, dotdotdoted: false,
sep: None, up: tt_frame_up(option::None)},
interpolations: match interp { /* just a convienience */
None => std::map::HashMap::<uint,@named_match>(),
None => std::map::HashMap(),
Some(x) => x
},
mut repeat_idx: ~[],
@ -82,13 +82,13 @@ pure fn dup_tt_reader(&&r: tt_reader) -> tt_reader {
pure fn lookup_cur_matched_by_matched(r: tt_reader,
start: @named_match) -> @named_match {
pure fn red(&&ad: @named_match, &&idx: uint) -> @named_match {
pure fn red(+ad: @named_match, idx: &uint) -> @named_match {
match *ad {
matched_nonterminal(_) => {
// end of the line; duplicate henceforth
ad
}
matched_seq(ads, _) => ads[idx]
matched_seq(ads, _) => ads[*idx]
}
}
vec::foldl(start, r.repeat_idx, red)
@ -122,8 +122,8 @@ fn lockstep_iter_size(t: token_tree, r: tt_reader) -> lis {
}
match t {
tt_delim(tts) | tt_seq(_, tts, _, _) => {
vec::foldl(lis_unconstrained, tts, {|lis, tt|
lis_merge(lis, lockstep_iter_size(tt, r), r) })
vec::foldl(lis_unconstrained, tts, |lis, tt|
lis_merge(lis, lockstep_iter_size(*tt, r), r))
}
tt_tok(*) => lis_unconstrained,
tt_nonterminal(_, name) => match *lookup_cur_matched(r, name) {
@ -148,7 +148,8 @@ fn tt_next_token(&&r: tt_reader) -> {tok: token, sp: span} {
}
tt_frame_up(Some(tt_f)) => {
if r.cur.dotdotdoted {
vec::pop(r.repeat_idx); vec::pop(r.repeat_len);
r.repeat_idx.pop();
r.repeat_len.pop();
}
r.cur = tt_f;
@ -205,8 +206,8 @@ fn tt_next_token(&&r: tt_reader) -> {tok: token, sp: span} {
r.cur.idx += 1u;
return tt_next_token(r);
} else {
vec::push(r.repeat_len, len);
vec::push(r.repeat_idx, 0u);
r.repeat_len.push(len);
r.repeat_idx.push(0u);
r.cur = @{readme: tts, mut idx: 0u, dotdotdoted: true,
sep: sep, up: tt_frame_up(option::Some(r.cur))};
}

View file

@ -114,7 +114,7 @@ fn fold_mac_(m: mac, fld: ast_fold) -> mac {
match m.node {
mac_invoc(pth, arg, body) => {
mac_invoc(fld.fold_path(pth),
option::map(&arg, |x| fld.fold_expr(x)), body)
option::map(&arg, |x| fld.fold_expr(*x)), body)
}
mac_invoc_tt(*) => m.node,
mac_ellipsis => mac_ellipsis,
@ -203,7 +203,8 @@ fn noop_fold_foreign_item(&&ni: @foreign_item, fld: ast_fold)
}
},
id: fld.new_id(ni.id),
span: fld.new_span(ni.span)};
span: fld.new_span(ni.span),
vis: ni.vis};
}
fn noop_fold_item(&&i: @item, fld: ast_fold) -> Option<@item> {
@ -243,7 +244,7 @@ fn noop_fold_item_underscore(i: item_, fld: ast_fold) -> item_ {
variants: vec::map(enum_definition.variants,
|x| fld.fold_variant(*x)),
common: option::map(&enum_definition.common,
|x| fold_struct_def(x, fld))
|x| fold_struct_def(*x, fld))
}), fold_ty_params(typms, fld))
}
item_class(struct_def, typms) => {
@ -252,7 +253,7 @@ fn noop_fold_item_underscore(i: item_, fld: ast_fold) -> item_ {
}
item_impl(tps, ifce, ty, methods) => {
item_impl(fold_ty_params(tps, fld),
ifce.map(|p| fold_trait_ref(p, fld)),
ifce.map(|p| fold_trait_ref(*p, fld)),
fld.fold_ty(ty),
vec::map(methods, |x| fld.fold_method(*x)))
}
@ -292,7 +293,7 @@ fn fold_struct_def(struct_def: @ast::struct_def, fld: ast_fold)
let dtor_id = fld.new_id(dtor.node.id);
{node: {body: dtor_body,
id: dtor_id,.. dtor.node},
.. dtor}};
.. *dtor}};
return @{
traits: vec::map(struct_def.traits, |p| fold_trait_ref(*p, fld)),
fields: vec::map(struct_def.fields, |f| fold_struct_field(*f, fld)),
@ -332,7 +333,7 @@ fn noop_fold_method(&&m: @method, fld: ast_fold) -> @method {
fn noop_fold_block(b: blk_, fld: ast_fold) -> blk_ {
return {view_items: vec::map(b.view_items, |x| fld.fold_view_item(*x)),
stmts: vec::map(b.stmts, |x| fld.fold_stmt(*x)),
expr: option::map(&b.expr, |x| fld.fold_expr(x)),
expr: option::map(&b.expr, |x| fld.fold_expr(*x)),
id: fld.new_id(b.id),
rules: b.rules};
}
@ -347,7 +348,7 @@ fn noop_fold_stmt(s: stmt_, fld: ast_fold) -> stmt_ {
fn noop_fold_arm(a: arm, fld: ast_fold) -> arm {
return {pats: vec::map(a.pats, |x| fld.fold_pat(*x)),
guard: option::map(&a.guard, |x| fld.fold_expr(x)),
guard: option::map(&a.guard, |x| fld.fold_expr(*x)),
body: fld.fold_block(a.body)};
}
@ -357,19 +358,18 @@ fn noop_fold_pat(p: pat_, fld: ast_fold) -> pat_ {
pat_ident(binding_mode, pth, sub) => {
pat_ident(binding_mode,
fld.fold_path(pth),
option::map(&sub, |x| fld.fold_pat(x)))
option::map(&sub, |x| fld.fold_pat(*x)))
}
pat_lit(e) => pat_lit(fld.fold_expr(e)),
pat_enum(pth, pats) => {
pat_enum(fld.fold_path(pth), option::map(&pats,
|pats| vec::map(pats, |x| fld.fold_pat(*x))))
|pats| vec::map(*pats, |x| fld.fold_pat(*x))))
}
pat_rec(fields, etc) => {
let mut fs = ~[];
for fields.each |f| {
vec::push(fs,
{ident: /* FIXME (#2543) */ copy f.ident,
pat: fld.fold_pat(f.pat)});
fs.push({ident: /* FIXME (#2543) */ copy f.ident,
pat: fld.fold_pat(f.pat)});
}
pat_rec(fs, etc)
}
@ -377,9 +377,8 @@ fn noop_fold_pat(p: pat_, fld: ast_fold) -> pat_ {
let pth_ = fld.fold_path(pth);
let mut fs = ~[];
for fields.each |f| {
vec::push(fs,
{ident: /* FIXME (#2543) */ copy f.ident,
pat: fld.fold_pat(f.pat)});
fs.push({ident: /* FIXME (#2543) */ copy f.ident,
pat: fld.fold_pat(f.pat)});
}
pat_struct(pth_, fs, etc)
}
@ -434,7 +433,7 @@ fn noop_fold_expr(e: expr_, fld: ast_fold) -> expr_ {
expr_repeat(fld.fold_expr(expr), fld.fold_expr(count), mutt),
expr_rec(fields, maybe_expr) => {
expr_rec(vec::map(fields, |x| fold_field(*x)),
option::map(&maybe_expr, |x| fld.fold_expr(x)))
option::map(&maybe_expr, |x| fld.fold_expr(*x)))
}
expr_tup(elts) => expr_tup(vec::map(elts, |x| fld.fold_expr(*x))),
expr_call(f, args, blk) => {
@ -453,14 +452,14 @@ fn noop_fold_expr(e: expr_, fld: ast_fold) -> expr_ {
expr_addr_of(m, ohs) => expr_addr_of(m, fld.fold_expr(ohs)),
expr_if(cond, tr, fl) => {
expr_if(fld.fold_expr(cond), fld.fold_block(tr),
option::map(&fl, |x| fld.fold_expr(x)))
option::map(&fl, |x| fld.fold_expr(*x)))
}
expr_while(cond, body) => {
expr_while(fld.fold_expr(cond), fld.fold_block(body))
}
expr_loop(body, opt_ident) => {
expr_loop(fld.fold_block(body),
option::map(&opt_ident, |x| fld.fold_ident(x)))
option::map(&opt_ident, |x| fld.fold_ident(*x)))
}
expr_match(expr, arms) => {
expr_match(fld.fold_expr(expr),
@ -502,12 +501,12 @@ fn noop_fold_expr(e: expr_, fld: ast_fold) -> expr_ {
expr_index(fld.fold_expr(el), fld.fold_expr(er))
}
expr_path(pth) => expr_path(fld.fold_path(pth)),
expr_fail(e) => expr_fail(option::map(&e, |x| fld.fold_expr(x))),
expr_fail(e) => expr_fail(option::map(&e, |x| fld.fold_expr(*x))),
expr_break(opt_ident) =>
expr_break(option::map(&opt_ident, |x| fld.fold_ident(x))),
expr_break(option::map(&opt_ident, |x| fld.fold_ident(*x))),
expr_again(opt_ident) =>
expr_again(option::map(&opt_ident, |x| fld.fold_ident(x))),
expr_ret(e) => expr_ret(option::map(&e, |x| fld.fold_expr(x))),
expr_again(option::map(&opt_ident, |x| fld.fold_ident(*x))),
expr_ret(e) => expr_ret(option::map(&e, |x| fld.fold_expr(*x))),
expr_log(i, lv, e) => expr_log(i, fld.fold_expr(lv),
fld.fold_expr(e)),
expr_assert(e) => expr_assert(fld.fold_expr(e)),
@ -515,7 +514,7 @@ fn noop_fold_expr(e: expr_, fld: ast_fold) -> expr_ {
expr_struct(path, fields, maybe_expr) => {
expr_struct(fld.fold_path(path),
vec::map(fields, |x| fold_field(*x)),
option::map(&maybe_expr, |x| fld.fold_expr(x)))
option::map(&maybe_expr, |x| fld.fold_expr(*x)))
}
}
}
@ -553,7 +552,7 @@ fn noop_fold_ty(t: ty_, fld: ast_fold) -> ty_ {
// ...nor do modules
fn noop_fold_mod(m: _mod, fld: ast_fold) -> _mod {
return {view_items: vec::map(m.view_items, |x| fld.fold_view_item(*x)),
items: vec::filter_map(m.items, |x| fld.fold_item(x))};
items: vec::filter_map(m.items, |x| fld.fold_item(*x))};
}
fn noop_fold_foreign_mod(nm: foreign_mod, fld: ast_fold) -> foreign_mod {
@ -579,7 +578,7 @@ fn noop_fold_variant(v: variant_, fld: ast_fold) -> variant_ {
let dtor_id = fld.new_id(dtor.node.id);
{node: {body: dtor_body,
id: dtor_id,.. dtor.node},
.. dtor}};
.. *dtor}};
kind = struct_variant_kind(@{
traits: ~[],
fields: vec::map(struct_def.fields,
@ -595,7 +594,7 @@ fn noop_fold_variant(v: variant_, fld: ast_fold) -> variant_ {
let variants = vec::map(enum_definition.variants,
|x| fld.fold_variant(*x));
let common = option::map(&enum_definition.common,
|x| fold_struct_def(x, fld));
|x| fold_struct_def(*x, fld));
kind = enum_variant_kind(ast::enum_def({ variants: variants,
common: common }));
}

View file

@ -25,7 +25,7 @@ type parse_sess = @{
cm: codemap::codemap,
mut next_id: node_id,
span_diagnostic: span_handler,
interner: ident_interner,
interner: @ident_interner,
// these two must be kept up to date
mut chpos: uint,
mut byte_pos: uint
@ -73,7 +73,7 @@ fn parse_crate_from_crate_file(input: &Path, cfg: ast::crate_cfg,
sess.chpos = rdr.chpos;
sess.byte_pos = sess.byte_pos + rdr.pos;
let cx = @{sess: sess, cfg: /* FIXME (#2543) */ copy p.cfg};
let companionmod = input.filestem().map(|s| Path(s));
let companionmod = input.filestem().map(|s| Path(*s));
let (m, attrs) = eval::eval_crate_directives_to_mod(
cx, cdirs, &prefix, &companionmod);
let mut hi = p.span.hi;

View file

@ -1,4 +1,5 @@
use io::println;//XXXXXXXXxxx
use io::ReaderUtil;
use util::interner;
use lexer::{string_reader, bump, is_eof, nextch,
is_whitespace, get_str_from, reader};
@ -129,7 +130,7 @@ fn consume_non_eol_whitespace(rdr: string_reader) {
fn push_blank_line_comment(rdr: string_reader, &comments: ~[cmnt]) {
debug!(">>> blank-line comment");
let v: ~[~str] = ~[];
vec::push(comments, {style: blank_line, lines: v, pos: rdr.chpos});
comments.push({style: blank_line, lines: v, pos: rdr.chpos});
}
fn consume_whitespace_counting_blank_lines(rdr: string_reader,
@ -148,7 +149,7 @@ fn read_shebang_comment(rdr: string_reader, code_to_the_left: bool,
debug!(">>> shebang comment");
let p = rdr.chpos;
debug!("<<< shebang comment");
vec::push(comments, {
comments.push({
style: if code_to_the_left { trailing } else { isolated },
lines: ~[read_one_line_comment(rdr)],
pos: p
@ -166,12 +167,12 @@ fn read_line_comments(rdr: string_reader, code_to_the_left: bool,
if is_doc_comment(line) { // doc-comments are not put in comments
break;
}
vec::push(lines, line);
lines.push(line);
consume_non_eol_whitespace(rdr);
}
debug!("<<< line comments");
if !lines.is_empty() {
vec::push(comments, {
comments.push({
style: if code_to_the_left { trailing } else { isolated },
lines: lines,
pos: p
@ -197,7 +198,7 @@ fn trim_whitespace_prefix_and_push_line(&lines: ~[~str],
} else { s1 = ~""; }
} else { s1 = s; }
log(debug, ~"pushing line: " + s1);
vec::push(lines, s1);
lines.push(s1);
}
fn read_block_comment(rdr: string_reader, code_to_the_left: bool,
@ -256,7 +257,7 @@ fn read_block_comment(rdr: string_reader, code_to_the_left: bool,
style = mixed;
}
debug!("<<< block comment");
vec::push(comments, {style: style, lines: lines, pos: p});
comments.push({style: style, lines: lines, pos: p});
}
fn peeking_at_comment(rdr: string_reader) -> bool {
@ -314,7 +315,7 @@ fn gather_comments_and_literals(span_diagnostic: diagnostic::span_handler,
let {tok: tok, sp: sp} = rdr.peek();
if token::is_lit(tok) {
let s = get_str_from(rdr, bstart);
vec::push(literals, {lit: s, pos: sp.lo});
literals.push({lit: s, pos: sp.lo});
log(debug, ~"tok lit: " + s);
} else {
log(debug, ~"tok: " + token::to_str(rdr.interner, tok));

View file

@ -229,7 +229,7 @@ impl parser: parser_common {
}
_ => ()
}
vec::push(v, f(self));
v.push(f(self));
}
return v;
@ -274,7 +274,7 @@ impl parser: parser_common {
_ => ()
}
if sep.trailing_sep_allowed && self.token == ket { break; }
vec::push(v, f(self));
v.push(f(self));
}
return v;
}

View file

@ -107,7 +107,7 @@ fn eval_crate_directive(cx: ctx, cdir: @ast::crate_directive, prefix: &Path,
// Thread defids, chpos and byte_pos through the parsers
cx.sess.chpos = r0.chpos;
cx.sess.byte_pos = cx.sess.byte_pos + r0.pos;
vec::push(items, i);
items.push(i);
}
ast::cdir_dir_mod(vis, id, cdirs, attrs) => {
let path = Path(cdir_path_opt(*cx.sess.interner.get(id), attrs));
@ -126,9 +126,9 @@ fn eval_crate_directive(cx: ctx, cdir: @ast::crate_directive, prefix: &Path,
vis: vis,
span: cdir.span};
cx.sess.next_id += 1;
vec::push(items, i);
items.push(i);
}
ast::cdir_view_item(vi) => vec::push(view_items, vi),
ast::cdir_view_item(vi) => view_items.push(vi),
ast::cdir_syntax(*) => ()
}
}

View file

@ -13,7 +13,7 @@ trait reader {
fn next_token() -> {tok: token::token, sp: span};
fn fatal(~str) -> !;
fn span_diag() -> span_handler;
pure fn interner() -> token::ident_interner;
pure fn interner() -> @token::ident_interner;
fn peek() -> {tok: token::token, sp: span};
fn dup() -> reader;
}
@ -26,7 +26,7 @@ type string_reader = @{
mut curr: char,
mut chpos: uint,
filemap: codemap::filemap,
interner: token::ident_interner,
interner: @token::ident_interner,
/* cached: */
mut peek_tok: token::token,
mut peek_span: span
@ -34,7 +34,7 @@ type string_reader = @{
fn new_string_reader(span_diagnostic: span_handler,
filemap: codemap::filemap,
itr: token::ident_interner) -> string_reader {
itr: @token::ident_interner) -> string_reader {
let r = new_low_level_string_reader(span_diagnostic, filemap, itr);
string_advance_token(r); /* fill in peek_* */
return r;
@ -43,7 +43,7 @@ fn new_string_reader(span_diagnostic: span_handler,
/* For comments.rs, which hackily pokes into 'pos' and 'curr' */
fn new_low_level_string_reader(span_diagnostic: span_handler,
filemap: codemap::filemap,
itr: token::ident_interner)
itr: @token::ident_interner)
-> string_reader {
let r = @{span_diagnostic: span_diagnostic, src: filemap.src,
mut col: 0u, mut pos: 0u, mut curr: -1 as char,
@ -78,7 +78,7 @@ impl string_reader: reader {
self.span_diagnostic.span_fatal(copy self.peek_span, m)
}
fn span_diag() -> span_handler { self.span_diagnostic }
pure fn interner() -> token::ident_interner { self.interner }
pure fn interner() -> @token::ident_interner { self.interner }
fn peek() -> {tok: token::token, sp: span} {
{tok: self.peek_tok, sp: self.peek_span}
}
@ -100,7 +100,7 @@ impl tt_reader: reader {
self.sp_diag.span_fatal(copy self.cur_span, m);
}
fn span_diag() -> span_handler { self.sp_diag }
pure fn interner() -> token::ident_interner { self.interner }
pure fn interner() -> @token::ident_interner { self.interner }
fn peek() -> {tok: token::token, sp: span} {
{ tok: self.cur_tok, sp: self.cur_span }
}

View file

@ -36,7 +36,7 @@ impl ObsoleteSyntax : cmp::Eq {
impl ObsoleteSyntax: to_bytes::IterBytes {
#[inline(always)]
pure fn iter_bytes(lsb0: bool, f: to_bytes::Cb) {
pure fn iter_bytes(+lsb0: bool, f: to_bytes::Cb) {
(self as uint).iter_bytes(lsb0, f);
}
}

View file

@ -237,7 +237,7 @@ struct parser {
mut restriction: restriction,
mut quote_depth: uint, // not (yet) related to the quasiquoter
reader: reader,
interner: interner<@~str>,
interner: @token::ident_interner,
keywords: HashMap<~str, ()>,
strict_keywords: HashMap<~str, ()>,
reserved_keywords: HashMap<~str, ()>,
@ -496,7 +496,7 @@ impl parser {
let mut ts = ~[self.parse_ty(false)];
while self.token == token::COMMA {
self.bump();
vec::push(ts, self.parse_ty(false));
ts.push(self.parse_ty(false));
}
let t = if vec::len(ts) == 1u { ts[0].node }
else { ty_tup(ts) };
@ -584,6 +584,29 @@ impl parser {
} else { infer(self.get_id()) }
}
fn is_named_argument() -> bool {
let offset = if self.token == token::BINOP(token::AND) {
1
} else if self.token == token::BINOP(token::MINUS) {
1
} else if self.token == token::ANDAND {
1
} else if self.token == token::BINOP(token::PLUS) {
if self.look_ahead(1) == token::BINOP(token::PLUS) {
2
} else {
1
}
} else { 0 };
if offset == 0 {
is_plain_ident(self.token)
&& self.look_ahead(1) == token::COLON
} else {
is_plain_ident(self.look_ahead(offset))
&& self.look_ahead(offset + 1) == token::COLON
}
}
fn parse_capture_item_or(parse_arg_fn: fn(parser) -> arg_or_capture_item)
-> arg_or_capture_item {
@ -605,29 +628,17 @@ impl parser {
// This version of parse arg doesn't necessarily require
// identifier names.
fn parse_arg_general(require_name: bool) -> arg {
let m = self.parse_arg_mode();
let i = if require_name {
let mut m;
let i = if require_name || self.is_named_argument() {
m = self.parse_arg_mode();
let name = self.parse_value_ident();
self.expect(token::COLON);
name
} else {
if is_plain_ident(self.token)
&& self.look_ahead(1u) == token::COLON {
let name = self.parse_value_ident();
self.bump();
name
} else { special_idents::invalid }
m = infer(self.get_id());
special_idents::invalid
};
match m {
expl(_) => {
if i == special_idents::invalid {
self.obsolete(copy self.span, ObsoleteModeInFnType);
}
}
_ => {}
}
let t = self.parse_ty(false);
{mode: m, ty: t, ident: i, id: self.get_id()}
@ -760,10 +771,10 @@ impl parser {
&& self.look_ahead(1u) == token::MOD_SEP;
if is_not_last {
vec::push(ids, parse_ident(self));
ids.push(parse_ident(self));
self.expect(token::MOD_SEP);
} else {
vec::push(ids, parse_last_ident(self));
ids.push(parse_last_ident(self));
break;
}
}
@ -892,7 +903,7 @@ impl parser {
}
let mut es = ~[self.parse_expr()];
while self.token == token::COMMA {
self.bump(); vec::push(es, self.parse_expr());
self.bump(); es.push(self.parse_expr());
}
hi = self.span.hi;
self.expect(token::RPAREN);
@ -1038,7 +1049,7 @@ impl parser {
self.bump();
let mut fields = ~[];
let mut base = None;
vec::push(fields, self.parse_field(token::COLON));
fields.push(self.parse_field(token::COLON));
while self.token != token::RBRACE {
if self.try_parse_obsolete_with() {
@ -1056,7 +1067,7 @@ impl parser {
// Accept an optional trailing comma.
break;
}
vec::push(fields, self.parse_field(token::COLON));
fields.push(self.parse_field(token::COLON));
}
hi = pth.span.hi;
@ -1305,7 +1316,7 @@ impl parser {
while self.token != ket || lparens > 0u {
if self.token == token::LPAREN { lparens += 1u; }
if self.token == token::RPAREN { lparens -= 1u; }
vec::push(ret_val, self.parse_matcher(name_idx));
ret_val.push(self.parse_matcher(name_idx));
}
self.bump();
@ -1711,7 +1722,7 @@ impl parser {
// record ends by an optional trailing comma
break;
}
vec::push(fields, self.parse_field(token::COLON));
fields.push(self.parse_field(token::COLON));
}
self.expect(token::RBRACE);
return expr_rec(fields, base);
@ -1746,7 +1757,7 @@ impl parser {
rules: default_blk},
span: expr.span};
vec::push(arms, {pats: pats, guard: guard, body: blk});
arms.push({pats: pats, guard: guard, body: blk});
}
let mut hi = self.span.hi;
self.bump();
@ -1791,7 +1802,7 @@ impl parser {
fn parse_pats() -> ~[@pat] {
let mut pats = ~[];
loop {
vec::push(pats, self.parse_pat(true));
pats.push(self.parse_pat(true));
if self.token == token::BINOP(token::OR) { self.bump(); }
else { return pats; }
};
@ -1838,7 +1849,7 @@ impl parser {
span: self.last_span
};
}
vec::push(fields, {ident: fieldname, pat: subpat});
fields.push({ident: fieldname, pat: subpat});
}
return (fields, etc);
}
@ -1926,7 +1937,7 @@ impl parser {
let mut fields = ~[self.parse_pat(refutable)];
while self.token == token::COMMA {
self.bump();
vec::push(fields, self.parse_pat(refutable));
fields.push(self.parse_pat(refutable));
}
if vec::len(fields) == 1u { self.expect(token::COMMA); }
hi = self.span.hi;
@ -2115,7 +2126,7 @@ impl parser {
let lo = self.span.lo;
let mut locals = ~[self.parse_local(is_mutbl, true)];
while self.eat(token::COMMA) {
vec::push(locals, self.parse_local(is_mutbl, true));
locals.push(self.parse_local(is_mutbl, true));
}
return @spanned(lo, self.last_span.hi, decl_local(locals));
}
@ -2216,17 +2227,12 @@ impl parser {
}
let lo = self.span.lo;
if self.eat_keyword(~"unsafe") {
self.expect(token::LBRACE);
let {inner, next} = maybe_parse_inner_attrs_and_next(self,
parse_attrs);
return (inner, self.parse_block_tail_(lo, unsafe_blk, next));
} else {
self.expect(token::LBRACE);
let {inner, next} = maybe_parse_inner_attrs_and_next(self,
parse_attrs);
return (inner, self.parse_block_tail_(lo, default_blk, next));
}
let us = self.eat_keyword(~"unsafe");
self.expect(token::LBRACE);
let {inner, next} = maybe_parse_inner_attrs_and_next(self,
parse_attrs);
let blk_check_mode = if us { unsafe_blk } else { default_blk };
return (inner, self.parse_block_tail_(lo, blk_check_mode, next));
}
fn parse_block_no_value() -> blk {
@ -2255,8 +2261,8 @@ impl parser {
for items.each |item| {
let decl = @spanned(item.span.lo, item.span.hi, decl_item(*item));
push(stmts, @spanned(item.span.lo, item.span.hi,
stmt_decl(decl, self.get_id())));
stmts.push(@spanned(item.span.lo, item.span.hi,
stmt_decl(decl, self.get_id())));
}
let mut initial_attrs = attrs_remaining;
@ -2267,43 +2273,46 @@ impl parser {
while self.token != token::RBRACE {
match self.token {
token::SEMI => {
self.bump(); // empty
}
_ => {
let stmt = self.parse_stmt(initial_attrs);
initial_attrs = ~[];
match stmt.node {
stmt_expr(e, stmt_id) => { // Expression without semicolon:
match self.token {
token::SEMI => {
self.bump();
push(stmts,
@{node: stmt_semi(e, stmt_id),.. *stmt});
}
token::RBRACE => {
expr = Some(e);
}
t => {
if classify::stmt_ends_with_semi(*stmt) {
self.fatal(~"expected `;` or `}` after \
expression but found `"
+ token_to_str(self.reader, t) + ~"`");
}
vec::push(stmts, stmt);
}
}
}
_ => { // All other kinds of statements:
vec::push(stmts, stmt);
if classify::stmt_ends_with_semi(*stmt) {
self.expect(token::SEMI);
}
}
token::SEMI => {
self.bump(); // empty
}
_ => {
let stmt = self.parse_stmt(initial_attrs);
initial_attrs = ~[];
match stmt.node {
stmt_expr(e, stmt_id) => {
// Expression without semicolon
match self.token {
token::SEMI => {
self.bump();
stmts.push(@{node: stmt_semi(e, stmt_id),
..*stmt});
}
token::RBRACE => {
expr = Some(e);
}
t => {
if classify::stmt_ends_with_semi(*stmt) {
self.fatal(
~"expected `;` or `}` after \
expression but found `"
+ token_to_str(self.reader, t)
+ ~"`");
}
stmts.push(stmt);
}
}
}
_ => { // All other kinds of statements:
stmts.push(stmt);
if classify::stmt_ends_with_semi(*stmt) {
self.expect(token::SEMI);
}
}
}
}
}
}
}
let mut hi = self.span.hi;
@ -2345,16 +2354,16 @@ impl parser {
};
match maybe_bound {
Some(bound) => {
self.bump();
push(bounds, bound);
}
None => {
push(bounds, bound_trait(self.parse_ty(false)));
}
Some(bound) => {
self.bump();
bounds.push(bound);
}
None => {
bounds.push(bound_trait(self.parse_ty(false)));
}
}
} else {
push(bounds, bound_trait(self.parse_ty(false)));
bounds.push(bound_trait(self.parse_ty(false)));
}
}
}
@ -2625,7 +2634,7 @@ impl parser {
self.expect(token::LBRACE);
while !self.eat(token::RBRACE) {
let vis = self.parse_visibility();
vec::push(meths, self.parse_method(vis));
meths.push(self.parse_method(vis));
}
(ident, item_impl(tps, opt_trait, ty, meths), None)
}
@ -2711,9 +2720,9 @@ impl parser {
for mms.each |mm| {
match *mm {
@field_member(struct_field) =>
vec::push(fields, struct_field),
fields.push(struct_field),
@method_member(the_method_member) =>
vec::push(methods, the_method_member)
methods.push(the_method_member)
}
}
}
@ -2744,7 +2753,7 @@ impl parser {
}
let actual_dtor = do the_dtor.map |dtor| {
let (d_body, d_attrs, d_s) = dtor;
let (d_body, d_attrs, d_s) = *dtor;
{node: {id: self.get_id(),
attrs: d_attrs,
self_id: self.get_id(),
@ -2885,7 +2894,7 @@ impl parser {
debug!("parse_mod_items: parse_item_or_view_item(attrs=%?)",
attrs);
match self.parse_item_or_view_item(attrs, true) {
iovi_item(item) => vec::push(items, item),
iovi_item(item) => items.push(item),
iovi_view_item(view_item) => {
self.span_fatal(view_item.span, ~"view items must be \
declared at the top of the \
@ -2926,7 +2935,8 @@ impl parser {
(id, item_mod(m), Some(inner_attrs.inner))
}
fn parse_item_foreign_fn(+attrs: ~[attribute]) -> @foreign_item {
fn parse_item_foreign_fn(vis: ast::visibility,
+attrs: ~[attribute]) -> @foreign_item {
let lo = self.span.lo;
let purity = self.parse_fn_purity();
let t = self.parse_fn_header();
@ -2937,10 +2947,12 @@ impl parser {
attrs: attrs,
node: foreign_item_fn(decl, purity, t.tps),
id: self.get_id(),
span: mk_sp(lo, hi)};
span: mk_sp(lo, hi),
vis: vis};
}
fn parse_item_foreign_const(+attrs: ~[attribute]) -> @foreign_item {
fn parse_item_foreign_const(vis: ast::visibility,
+attrs: ~[attribute]) -> @foreign_item {
let lo = self.span.lo;
self.expect_keyword(~"const");
let ident = self.parse_ident();
@ -2952,7 +2964,8 @@ impl parser {
attrs: attrs,
node: foreign_item_const(move ty),
id: self.get_id(),
span: mk_sp(lo, hi)};
span: mk_sp(lo, hi),
vis: vis};
}
fn parse_fn_purity() -> purity {
@ -2968,10 +2981,11 @@ impl parser {
}
fn parse_foreign_item(+attrs: ~[attribute]) -> @foreign_item {
let vis = self.parse_visibility();
if self.is_keyword(~"const") {
self.parse_item_foreign_const(move attrs)
self.parse_item_foreign_const(vis, move attrs)
} else {
self.parse_item_foreign_fn(move attrs)
self.parse_item_foreign_fn(vis, move attrs)
}
}
@ -2989,7 +3003,7 @@ impl parser {
let attrs = vec::append(initial_attrs,
self.parse_outer_attributes());
initial_attrs = ~[];
vec::push(items, self.parse_foreign_item(attrs));
items.push(self.parse_foreign_item(attrs));
}
return {sort: sort, view_items: view_items,
items: items};
@ -3102,9 +3116,9 @@ impl parser {
for mms.each |mm| {
match *mm {
@field_member(struct_field) =>
vec::push(fields, struct_field),
fields.push(struct_field),
@method_member(the_method_member) =>
vec::push(methods, the_method_member)
methods.push(the_method_member)
}
}
}
@ -3112,7 +3126,7 @@ impl parser {
}
self.bump();
let mut actual_dtor = do the_dtor.map |dtor| {
let (d_body, d_attrs, d_s) = dtor;
let (d_body, d_attrs, d_s) = *dtor;
{node: {id: self.get_id(),
attrs: d_attrs,
self_id: self.get_id(),
@ -3173,7 +3187,7 @@ impl parser {
seq_sep_trailing_disallowed(token::COMMA),
|p| p.parse_ty(false));
for arg_tys.each |ty| {
vec::push(args, {ty: *ty, id: self.get_id()});
args.push({ty: *ty, id: self.get_id()});
}
kind = tuple_variant_kind(args);
} else if self.eat(token::EQ) {
@ -3189,7 +3203,7 @@ impl parser {
let vr = {name: ident, attrs: variant_attrs,
kind: kind, id: self.get_id(),
disr_expr: disr_expr, vis: vis};
vec::push(variants, spanned(vlo, self.last_span.hi, vr));
variants.push(spanned(vlo, self.last_span.hi, vr));
if needs_comma && !self.eat(token::COMMA) { break; }
}
@ -3416,7 +3430,7 @@ impl parser {
while self.token == token::MOD_SEP {
self.bump();
let id = self.parse_ident();
vec::push(path, id);
path.push(id);
}
let path = @{span: mk_sp(lo, self.span.hi), global: false,
idents: path, rp: None, types: ~[]};
@ -3434,7 +3448,7 @@ impl parser {
token::IDENT(i, _) => {
self.bump();
vec::push(path, i);
path.push(i);
}
// foo::bar::{a,b,c}
@ -3477,7 +3491,7 @@ impl parser {
let mut vp = ~[self.parse_view_path()];
while self.token == token::COMMA {
self.bump();
vec::push(vp, self.parse_view_path());
vp.push(self.parse_view_path());
}
return vp;
}
@ -3497,8 +3511,8 @@ impl parser {
self.token_is_keyword(~"mod", next_tok))
}
fn parse_view_item(+attrs: ~[attribute]) -> @view_item {
let lo = self.span.lo, vis = self.parse_visibility();
fn parse_view_item(+attrs: ~[attribute], vis: visibility) -> @view_item {
let lo = self.span.lo;
let node = if self.eat_keyword(~"use") {
self.parse_use()
} else if self.eat_keyword(~"export") {
@ -3630,7 +3644,7 @@ impl parser {
_ => self.unexpected()
}
} else if self.is_view_item() {
let vi = self.parse_view_item(outer_attrs);
let vi = self.parse_view_item(outer_attrs, vis);
return spanned(lo, vi.span.hi, cdir_view_item(vi));
}
return self.fatal(~"expected crate directive");
@ -3651,7 +3665,7 @@ impl parser {
let mut first_outer_attr = first_outer_attr;
while self.token != term {
let cdir = @self.parse_crate_directive(first_outer_attr);
vec::push(cdirs, cdir);
cdirs.push(cdir);
first_outer_attr = ~[];
}
return cdirs;

View file

@ -12,9 +12,6 @@ use std::serialization::{Serializer,
serialize_bool,
deserialize_bool};
#[auto_serialize]
type str_num = uint;
#[auto_serialize]
enum binop {
PLUS,
@ -72,17 +69,17 @@ enum token {
LIT_INT(i64, ast::int_ty),
LIT_UINT(u64, ast::uint_ty),
LIT_INT_UNSUFFIXED(i64),
LIT_FLOAT(str_num, ast::float_ty),
LIT_STR(str_num),
LIT_FLOAT(ast::ident, ast::float_ty),
LIT_STR(ast::ident),
/* Name components */
IDENT(str_num, bool),
IDENT(ast::ident, bool),
UNDERSCORE,
/* For interpolation */
INTERPOLATED(nonterminal),
DOC_COMMENT(str_num),
DOC_COMMENT(ast::ident),
EOF,
}
@ -95,7 +92,7 @@ enum nonterminal {
nt_pat( @ast::pat),
nt_expr(@ast::expr),
nt_ty( @ast::ty),
nt_ident(str_num, bool),
nt_ident(ast::ident, bool),
nt_path(@ast::path),
nt_tt( @ast::token_tree), //needs @ed to break a circularity
nt_matchers(~[ast::matcher])
@ -116,7 +113,7 @@ fn binop_to_str(o: binop) -> ~str {
}
}
fn to_str(in: interner<@~str>, t: token) -> ~str {
fn to_str(in: @ident_interner, t: token) -> ~str {
match t {
EQ => ~"=",
LT => ~"<",
@ -174,7 +171,7 @@ fn to_str(in: interner<@~str>, t: token) -> ~str {
}
body + ast_util::float_ty_to_str(t)
}
LIT_STR(s) => { ~"\"" + str::escape_default( *in.get(s)) + ~"\"" }
LIT_STR(s) => { ~"\"" + str::escape_default(*in.get(s)) + ~"\"" }
/* Name components */
IDENT(s, _) => *in.get(s),
@ -281,49 +278,66 @@ pure fn is_bar(t: token) -> bool {
mod special_idents {
#[legacy_exports];
use ast::ident;
const underscore : ident = 0u;
const anon : ident = 1u;
const dtor : ident = 2u; // 'drop', but that's reserved
const invalid : ident = 3u; // ''
const unary : ident = 4u;
const not_fn : ident = 5u;
const idx_fn : ident = 6u;
const unary_minus_fn : ident = 7u;
const clownshoes_extensions : ident = 8u;
const underscore : ident = ident { repr: 0u };
const anon : ident = ident { repr: 1u };
const dtor : ident = ident { repr: 2u }; // 'drop', but that's reserved
const invalid : ident = ident { repr: 3u }; // ''
const unary : ident = ident { repr: 4u };
const not_fn : ident = ident { repr: 5u };
const idx_fn : ident = ident { repr: 6u };
const unary_minus_fn : ident = ident { repr: 7u };
const clownshoes_extensions : ident = ident { repr: 8u };
const self_ : ident = 9u; // 'self'
const self_ : ident = ident { repr: 9u }; // 'self'
/* for matcher NTs */
const item : ident = 10u;
const block : ident = 11u;
const stmt : ident = 12u;
const pat : ident = 13u;
const expr : ident = 14u;
const ty : ident = 15u;
const ident : ident = 16u;
const path : ident = 17u;
const tt : ident = 18u;
const matchers : ident = 19u;
const item : ident = ident { repr: 10u };
const block : ident = ident { repr: 11u };
const stmt : ident = ident { repr: 12u };
const pat : ident = ident { repr: 13u };
const expr : ident = ident { repr: 14u };
const ty : ident = ident { repr: 15u };
const ident : ident = ident { repr: 16u };
const path : ident = ident { repr: 17u };
const tt : ident = ident { repr: 18u };
const matchers : ident = ident { repr: 19u };
const str : ident = 20u; // for the type
const str : ident = ident { repr: 20u }; // for the type
/* outside of libsyntax */
const ty_visitor : ident = 21u;
const arg : ident = 22u;
const descrim : ident = 23u;
const clownshoe_abi : ident = 24u;
const clownshoe_stack_shim : ident = 25u;
const tydesc : ident = 26u;
const literally_dtor : ident = 27u;
const main : ident = 28u;
const opaque : ident = 29u;
const blk : ident = 30u;
const static : ident = 31u;
const intrinsic : ident = 32u;
const clownshoes_foreign_mod: ident = 33;
const ty_visitor : ident = ident { repr: 21u };
const arg : ident = ident { repr: 22u };
const descrim : ident = ident { repr: 23u };
const clownshoe_abi : ident = ident { repr: 24u };
const clownshoe_stack_shim : ident = ident { repr: 25u };
const tydesc : ident = ident { repr: 26u };
const literally_dtor : ident = ident { repr: 27u };
const main : ident = ident { repr: 28u };
const opaque : ident = ident { repr: 29u };
const blk : ident = ident { repr: 30u };
const static : ident = ident { repr: 31u };
const intrinsic : ident = ident { repr: 32u };
const clownshoes_foreign_mod: ident = ident { repr: 33 };
}
type ident_interner = util::interner::interner<@~str>;
struct ident_interner {
priv interner: util::interner::interner<@~str>,
}
impl ident_interner {
fn intern(val: @~str) -> ast::ident {
ast::ident { repr: self.interner.intern(val) }
}
fn gensym(val: @~str) -> ast::ident {
ast::ident { repr: self.interner.gensym(val) }
}
pure fn get(idx: ast::ident) -> @~str {
self.interner.get(idx.repr)
}
fn len() -> uint {
self.interner.len()
}
}
/** Key for thread-local data for sneaking interner information to the
* serializer/deserializer. It sounds like a hack because it is one.
@ -335,7 +349,7 @@ macro_rules! interner_key (
(-3 as uint, 0u)))
)
fn mk_ident_interner() -> ident_interner {
fn mk_ident_interner() -> @ident_interner {
/* the indices here must correspond to the numbers in special_idents */
let init_vec = ~[@~"_", @~"anon", @~"drop", @~"", @~"unary", @~"!",
@~"[]", @~"unary-", @~"__extensions__", @~"self",
@ -346,7 +360,9 @@ fn mk_ident_interner() -> ident_interner {
@~"dtor", @~"main", @~"<opaque>", @~"blk", @~"static",
@~"intrinsic", @~"__foreign_mod__"];
let rv = interner::mk_prefill::<@~str>(init_vec);
let rv = @ident_interner {
interner: interner::mk_prefill::<@~str>(init_vec)
};
/* having multiple interners will just confuse the serializer */
unsafe {
@ -360,8 +376,8 @@ fn mk_ident_interner() -> ident_interner {
/* for when we don't care about the contents; doesn't interact with TLD or
serialization */
fn mk_fake_ident_interner() -> ident_interner {
interner::mk::<@~str>()
fn mk_fake_ident_interner() -> @ident_interner {
@ident_interner { interner: interner::mk::<@~str>() }
}
/**

View file

@ -25,7 +25,7 @@ fn no_ann() -> pp_ann {
type ps =
@{s: pp::printer,
cm: Option<codemap>,
intr: token::ident_interner,
intr: @token::ident_interner,
comments: Option<~[comments::cmnt]>,
literals: Option<~[comments::lit]>,
mut cur_cmnt: uint,
@ -43,7 +43,7 @@ fn end(s: ps) {
pp::end(s.s);
}
fn rust_printer(writer: io::Writer, intr: ident_interner) -> ps {
fn rust_printer(writer: io::Writer, intr: @ident_interner) -> ps {
return @{s: pp::mk_printer(writer, default_columns),
cm: None::<codemap>,
intr: intr,
@ -63,7 +63,7 @@ const default_columns: uint = 78u;
// Requires you to pass an input filename and reader so that
// it can scan the input text for comments and literals to
// copy forward.
fn print_crate(cm: codemap, intr: ident_interner,
fn print_crate(cm: codemap, intr: @ident_interner,
span_diagnostic: diagnostic::span_handler,
crate: @ast::crate, filename: ~str, in: io::Reader,
out: io::Writer, ann: pp_ann, is_expanded: bool) {
@ -91,40 +91,40 @@ fn print_crate_(s: ps, &&crate: @ast::crate) {
eof(s.s);
}
fn ty_to_str(ty: @ast::ty, intr: ident_interner) -> ~str {
fn ty_to_str(ty: @ast::ty, intr: @ident_interner) -> ~str {
to_str(ty, print_type, intr)
}
fn pat_to_str(pat: @ast::pat, intr: ident_interner) -> ~str {
fn pat_to_str(pat: @ast::pat, intr: @ident_interner) -> ~str {
to_str(pat, print_pat, intr)
}
fn expr_to_str(e: @ast::expr, intr: ident_interner) -> ~str {
fn expr_to_str(e: @ast::expr, intr: @ident_interner) -> ~str {
to_str(e, print_expr, intr)
}
fn tt_to_str(tt: ast::token_tree, intr: ident_interner) -> ~str {
fn tt_to_str(tt: ast::token_tree, intr: @ident_interner) -> ~str {
to_str(tt, print_tt, intr)
}
fn stmt_to_str(s: ast::stmt, intr: ident_interner) -> ~str {
fn stmt_to_str(s: ast::stmt, intr: @ident_interner) -> ~str {
to_str(s, print_stmt, intr)
}
fn item_to_str(i: @ast::item, intr: ident_interner) -> ~str {
fn item_to_str(i: @ast::item, intr: @ident_interner) -> ~str {
to_str(i, print_item, intr)
}
fn typarams_to_str(tps: ~[ast::ty_param], intr: ident_interner) -> ~str {
fn typarams_to_str(tps: ~[ast::ty_param], intr: @ident_interner) -> ~str {
to_str(tps, print_type_params, intr)
}
fn path_to_str(&&p: @ast::path, intr: ident_interner) -> ~str {
fn path_to_str(&&p: @ast::path, intr: @ident_interner) -> ~str {
to_str(p, |a,b| print_path(a, b, false), intr)
}
fn fun_to_str(decl: ast::fn_decl, name: ast::ident,
params: ~[ast::ty_param], intr: ident_interner) -> ~str {
params: ~[ast::ty_param], intr: @ident_interner) -> ~str {
do io::with_str_writer |wr| {
let s = rust_printer(wr, intr);
print_fn(s, decl, None, name, params, None, ast::inherited);
@ -147,7 +147,7 @@ fn test_fun_to_str() {
assert fun_to_str(decl, "a", ~[]) == "fn a()";
}
fn block_to_str(blk: ast::blk, intr: ident_interner) -> ~str {
fn block_to_str(blk: ast::blk, intr: @ident_interner) -> ~str {
do io::with_str_writer |wr| {
let s = rust_printer(wr, intr);
// containing cbox, will be closed by print-block at }
@ -159,15 +159,15 @@ fn block_to_str(blk: ast::blk, intr: ident_interner) -> ~str {
}
}
fn meta_item_to_str(mi: @ast::meta_item, intr: ident_interner) -> ~str {
fn meta_item_to_str(mi: @ast::meta_item, intr: @ident_interner) -> ~str {
to_str(mi, print_meta_item, intr)
}
fn attribute_to_str(attr: ast::attribute, intr: ident_interner) -> ~str {
fn attribute_to_str(attr: ast::attribute, intr: @ident_interner) -> ~str {
to_str(attr, print_attribute, intr)
}
fn variant_to_str(var: ast::variant, intr: ident_interner) -> ~str {
fn variant_to_str(var: ast::variant, intr: @ident_interner) -> ~str {
to_str(var, print_variant, intr)
}
@ -443,6 +443,7 @@ fn print_item(s: ps, &&item: @ast::item) {
print_outer_attributes(s, item.attrs);
let ann_node = node_item(s, item);
s.ann.pre(ann_node);
print_visibility(s, item.vis);
match item.node {
ast::item_const(ty, expr) => {
head(s, visibility_qualified(item.vis, ~"const"));
@ -979,7 +980,7 @@ fn print_mac(s: ps, m: ast::mac) {
Some(@{node: ast::expr_vec(_, _), _}) => (),
_ => word(s.s, ~" ")
}
arg.iter(|a| print_expr(s, a));
arg.iter(|a| print_expr(s, *a));
// FIXME: extension 'body' (#2339)
}
ast::mac_invoc_tt(pth, tts) => {
@ -1111,7 +1112,7 @@ fn print_expr(s: ps, &&expr: @ast::expr) {
ast::expr_call(func, args, has_block) => {
let mut base_args = args;
let blk = if has_block {
let blk_arg = vec::pop(base_args);
let blk_arg = base_args.pop();
match blk_arg.node {
ast::expr_loop_body(_) => { head(s, ~"for"); }
ast::expr_do_body(_) => { head(s, ~"do"); }
@ -1177,7 +1178,7 @@ fn print_expr(s: ps, &&expr: @ast::expr) {
ast::expr_loop(blk, opt_ident) => {
head(s, ~"loop");
space(s.s);
opt_ident.iter(|ident| {print_ident(s, ident); space(s.s)});
opt_ident.iter(|ident| {print_ident(s, *ident); space(s.s)});
print_block(s, blk);
}
ast::expr_match(expr, arms) => {
@ -1360,12 +1361,12 @@ fn print_expr(s: ps, &&expr: @ast::expr) {
ast::expr_break(opt_ident) => {
word(s.s, ~"break");
space(s.s);
opt_ident.iter(|ident| {print_ident(s, ident); space(s.s)});
opt_ident.iter(|ident| {print_ident(s, *ident); space(s.s)});
}
ast::expr_again(opt_ident) => {
word(s.s, ~"loop");
space(s.s);
opt_ident.iter(|ident| {print_ident(s, ident); space(s.s)});
opt_ident.iter(|ident| {print_ident(s, *ident); space(s.s)});
}
ast::expr_ret(result) => {
word(s.s, ~"return");
@ -2059,7 +2060,7 @@ fn print_string(s: ps, st: ~str) {
word(s.s, ~"\"");
}
fn to_str<T>(t: T, f: fn@(ps, T), intr: ident_interner) -> ~str {
fn to_str<T>(t: T, f: fn@(ps, T), intr: @ident_interner) -> ~str {
do io::with_str_writer |wr| {
let s = rust_printer(wr, intr);
f(s, t);

View file

@ -13,6 +13,8 @@
#[allow(vecs_implicitly_copyable)];
#[allow(non_camel_case_types)];
#[allow(deprecated_mode)];
#[allow(deprecated_pattern)];
extern mod core(vers = "0.4");
extern mod std(vers = "0.4");
@ -129,6 +131,8 @@ mod ext {
#[legacy_exports]
mod auto_serialize;
#[legacy_exports]
mod auto_serialize2;
#[legacy_exports]
mod source_util;
mod pipes {

View file

@ -59,4 +59,4 @@ impl <T:Eq IterBytes Hash Const Copy> hash_interner<T>: interner<T> {
pure fn get(idx: uint) -> T { self.vect.get_elt(idx) }
fn len() -> uint { return self.vect.len(); }
}
}

View file

@ -241,7 +241,7 @@ fn visit_pat<E>(p: @pat, e: E, v: vt<E>) {
v.visit_pat(inner, e, v),
pat_ident(_, path, inner) => {
visit_path(path, e, v);
do option::iter(&inner) |subpat| { v.visit_pat(subpat, e, v)};
do option::iter(&inner) |subpat| { v.visit_pat(*subpat, e, v)};
}
pat_lit(ex) => v.visit_expr(ex, e, v),
pat_range(e1, e2) => { v.visit_expr(e1, e, v); v.visit_expr(e2, e, v); }
@ -342,10 +342,10 @@ fn visit_struct_def<E>(sd: @struct_def, nm: ast::ident, tps: ~[ty_param],
visit_path(p.path, e, v);
}
do option::iter(&sd.ctor) |ctor| {
visit_class_ctor_helper(ctor, nm, tps, ast_util::local_def(id), e, v);
visit_class_ctor_helper(*ctor, nm, tps, ast_util::local_def(id), e, v);
};
do option::iter(&sd.dtor) |dtor| {
visit_class_dtor_helper(dtor, tps, ast_util::local_def(id), e, v)
visit_class_dtor_helper(*dtor, tps, ast_util::local_def(id), e, v)
};
}
@ -395,7 +395,7 @@ fn visit_exprs<E>(exprs: ~[@expr], e: E, v: vt<E>) {
fn visit_mac<E>(m: mac, e: E, v: vt<E>) {
match m.node {
ast::mac_invoc(_, arg, _) => {
option::map(&arg, |arg| v.visit_expr(arg, e, v)); }
option::map(&arg, |arg| v.visit_expr(*arg, e, v)); }
ast::mac_invoc_tt(*) => { /* no user-serviceable parts inside */ }
ast::mac_ellipsis => (),
ast::mac_aq(*) => { /* FIXME: maybe visit (Issue #2340) */ }