Switch alts to use arrows

This commit is contained in:
Brian Anderson 2012-08-03 19:59:04 -07:00
parent c9d2769379
commit 025d86624d
329 changed files with 8095 additions and 8423 deletions

View file

@ -129,22 +129,22 @@ fn is_uuid(id: ~str) -> bool {
}
alt i {
0u {
0u => {
if str::len(part) == 8u {
correct += 1u;
}
}
1u | 2u | 3u {
1u | 2u | 3u => {
if str::len(part) == 4u {
correct += 1u;
}
}
4u {
4u => {
if str::len(part) == 12u {
correct += 1u;
}
}
_ { }
_ => { }
}
}
if correct >= 5u {
@ -193,8 +193,8 @@ fn is_archive_url(u: ~str) -> bool {
// url parsing, we wouldn't need it
alt str::find_str(u, ~"://") {
option::some(i) { has_archive_extension(u) }
_ { false }
option::some(i) => has_archive_extension(u),
_ => false
}
}
@ -224,15 +224,15 @@ fn load_link(mis: ~[@ast::meta_item]) -> (option<~str>,
let mut uuid = none;
for mis.each |a| {
alt a.node {
ast::meta_name_value(v, {node: ast::lit_str(s), span: _}) {
ast::meta_name_value(v, {node: ast::lit_str(s), span: _}) => {
alt *v {
~"name" { name = some(*s); }
~"vers" { vers = some(*s); }
~"uuid" { uuid = some(*s); }
_ { }
~"name" => name = some(*s),
~"vers" => vers = some(*s),
~"uuid" => uuid = some(*s),
_ => { }
}
}
_ { fail ~"load_link: meta items must be name-values"; }
_ => fail ~"load_link: meta items must be name-values"
}
}
(name, vers, uuid)
@ -251,15 +251,15 @@ fn load_crate(filename: ~str) -> option<crate> {
for c.node.attrs.each |a| {
alt a.node.value.node {
ast::meta_name_value(v, {node: ast::lit_str(s), span: _}) {
ast::meta_name_value(v, {node: ast::lit_str(s), span: _}) => {
alt *v {
~"desc" { desc = some(*v); }
~"sigs" { sigs = some(*v); }
~"crate_type" { crate_type = some(*v); }
_ { }
~"desc" => desc = some(*v),
~"sigs" => sigs = some(*v),
~"crate_type" => crate_type = some(*v),
_ => { }
}
}
ast::meta_list(v, mis) {
ast::meta_list(v, mis) => {
if *v == ~"link" {
let (n, v, u) = load_link(mis);
name = n;
@ -267,7 +267,7 @@ fn load_crate(filename: ~str) -> option<crate> {
uuid = u;
}
}
_ {
_ => {
fail ~"crate attributes may not contain " +
~"meta_words";
}
@ -280,7 +280,7 @@ fn load_crate(filename: ~str) -> option<crate> {
fn goto_view_item(e: env, i: @ast::view_item) {
alt i.node {
ast::view_item_use(ident, metas, id) {
ast::view_item_use(ident, metas, id) => {
let name_items =
attr::find_meta_items_by_name(metas, ~"name");
let m = if name_items.is_empty() {
@ -294,16 +294,16 @@ fn load_crate(filename: ~str) -> option<crate> {
for m.each |item| {
alt attr::get_meta_item_value_str(item) {
some(value) {
some(value) => {
let name = attr::get_meta_item_name(item);
alt *name {
~"vers" { attr_vers = *value; }
~"from" { attr_from = *value; }
_ {}
~"vers" => attr_vers = *value,
~"from" => attr_from = *value,
_ => ()
}
}
none {}
none => ()
}
}
@ -316,11 +316,11 @@ fn load_crate(filename: ~str) -> option<crate> {
};
alt *attr_name {
~"std" | ~"core" { }
_ { vec::push(e.deps, query); }
~"std" | ~"core" => (),
_ => vec::push(e.deps, query)
}
}
_ { }
_ => ()
}
}
fn goto_item(_e: env, _i: @ast::item) {
@ -340,7 +340,7 @@ fn load_crate(filename: ~str) -> option<crate> {
let deps = copy e.deps;
alt (name, vers, uuid) {
(some(name0), some(vers0), some(uuid0)) {
(some(name0), some(vers0), some(uuid0)) => {
some({
name: name0,
vers: vers0,
@ -350,7 +350,7 @@ fn load_crate(filename: ~str) -> option<crate> {
crate_type: crate_type,
deps: deps })
}
_ { return none; }
_ => return none
}
}
@ -391,30 +391,22 @@ fn parse_source(name: ~str, j: json::json) -> source {
}
alt j {
json::dict(j) {
json::dict(j) => {
let mut url = alt j.find(~"url") {
some(json::string(u)) {
*u
}
_ { fail ~"needed 'url' field in source"; }
some(json::string(u)) => *u,
_ => fail ~"needed 'url' field in source"
};
let method = alt j.find(~"method") {
some(json::string(u)) {
*u
}
_ { assume_source_method(url) }
some(json::string(u)) => *u,
_ => assume_source_method(url)
};
let key = alt j.find(~"key") {
some(json::string(u)) {
some(*u)
}
_ { none }
some(json::string(u)) => some(*u),
_ => none
};
let keyfp = alt j.find(~"keyfp") {
some(json::string(u)) {
some(*u)
}
_ { none }
some(json::string(u)) => some(*u),
_ => none
};
if method == ~"file" {
url = os::make_absolute(url);
@ -427,7 +419,7 @@ fn parse_source(name: ~str, j: json::json) -> source {
mut keyfp: keyfp,
mut packages: ~[mut] };
}
_ { fail ~"needed dict value in source"; }
_ => fail ~"needed dict value in source"
};
}
@ -435,20 +427,20 @@ fn try_parse_sources(filename: ~str, sources: map::hashmap<~str, source>) {
if !os::path_exists(filename) { return; }
let c = io::read_whole_file_str(filename);
alt json::from_str(result::get(c)) {
ok(json::dict(j)) {
ok(json::dict(j)) => {
for j.each |k, v| {
sources.insert(k, parse_source(k, v));
debug!{"source: %s", k};
}
}
ok(_) { fail ~"malformed sources.json"; }
err(e) { fail fmt!{"%s:%s", filename, e.to_str()}; }
ok(_) => fail ~"malformed sources.json",
err(e) => fail fmt!{"%s:%s", filename, e.to_str()}
}
}
fn load_one_source_package(src: source, p: map::hashmap<~str, json::json>) {
let name = alt p.find(~"name") {
some(json::string(n)) {
some(json::string(n)) => {
if !valid_pkg_name(*n) {
warn(~"malformed source json: "
+ src.name + ~", '" + *n + ~"'"+
@ -458,14 +450,14 @@ fn load_one_source_package(src: source, p: map::hashmap<~str, json::json>) {
}
*n
}
_ {
_ => {
warn(~"malformed source json: " + src.name + ~" (missing name)");
return;
}
};
let uuid = alt p.find(~"uuid") {
some(json::string(n)) {
some(json::string(n)) => {
if !is_uuid(*n) {
warn(~"malformed source json: "
+ src.name + ~", '" + *n + ~"'"+
@ -474,23 +466,23 @@ fn load_one_source_package(src: source, p: map::hashmap<~str, json::json>) {
}
*n
}
_ {
_ => {
warn(~"malformed source json: " + src.name + ~" (missing uuid)");
return;
}
};
let url = alt p.find(~"url") {
some(json::string(n)) { *n }
_ {
some(json::string(n)) => *n,
_ => {
warn(~"malformed source json: " + src.name + ~" (missing url)");
return;
}
};
let method = alt p.find(~"method") {
some(json::string(n)) { *n }
_ {
some(json::string(n)) => *n,
_ => {
warn(~"malformed source json: "
+ src.name + ~" (missing method)");
return;
@ -498,26 +490,26 @@ fn load_one_source_package(src: source, p: map::hashmap<~str, json::json>) {
};
let reference = alt p.find(~"ref") {
some(json::string(n)) { some(*n) }
_ { none }
some(json::string(n)) => some(*n),
_ => none
};
let mut tags = ~[];
alt p.find(~"tags") {
some(json::list(js)) {
some(json::list(js)) => {
for (*js).each |j| {
alt j {
json::string(j) { vec::grow(tags, 1u, *j); }
_ { }
json::string(j) => vec::grow(tags, 1u, *j),
_ => ()
}
}
}
_ { }
_ => ()
}
let description = alt p.find(~"description") {
some(json::string(n)) { *n }
_ {
some(json::string(n)) => *n,
_ => {
warn(~"malformed source json: " + src.name
+ ~" (missing description)");
return;
@ -536,11 +528,11 @@ fn load_one_source_package(src: source, p: map::hashmap<~str, json::json>) {
};
alt vec::position(src.packages, |pkg| pkg.uuid == uuid) {
some(idx) {
some(idx) => {
src.packages[idx] = newpkg;
log(debug, ~" updated package: " + src.name + ~"/" + name);
}
none {
none => {
vec::grow(src.packages, 1u, newpkg);
}
}
@ -554,17 +546,17 @@ fn load_source_info(c: cargo, src: source) {
if !os::path_exists(srcfile) { return; }
let srcstr = io::read_whole_file_str(srcfile);
alt json::from_str(result::get(srcstr)) {
ok(json::dict(s)) {
ok(json::dict(s)) => {
let o = parse_source(src.name, json::dict(s));
src.key = o.key;
src.keyfp = o.keyfp;
}
ok(_) {
ok(_) => {
warn(~"malformed source.json: " + src.name +
~"(source info is not a dict)");
}
err(e) {
err(e) => {
warn(fmt!{"%s:%s", src.name, e.to_str()});
}
};
@ -576,24 +568,24 @@ fn load_source_packages(c: cargo, src: source) {
if !os::path_exists(pkgfile) { return; }
let pkgstr = io::read_whole_file_str(pkgfile);
alt json::from_str(result::get(pkgstr)) {
ok(json::list(js)) {
ok(json::list(js)) => {
for (*js).each |j| {
alt j {
json::dict(p) {
json::dict(p) => {
load_one_source_package(src, p);
}
_ {
_ => {
warn(~"malformed source json: " + src.name +
~" (non-dict pkg)");
}
}
}
}
ok(_) {
ok(_) => {
warn(~"malformed packages.json: " + src.name +
~"(packages is not a list)");
}
err(e) {
err(e) => {
warn(fmt!{"%s:%s", src.name, e.to_str()});
}
};
@ -601,8 +593,8 @@ fn load_source_packages(c: cargo, src: source) {
fn build_cargo_options(argv: ~[~str]) -> options {
let matches = alt getopts::getopts(argv, opts()) {
result::ok(m) { m }
result::err(f) {
result::ok(m) => m,
result::err(f) => {
fail fmt!{"%s", getopts::fail_str(f)};
}
};
@ -632,14 +624,14 @@ fn build_cargo_options(argv: ~[~str]) -> options {
fn configure(opts: options) -> cargo {
let home = alt get_cargo_root() {
ok(home) { home }
err(_err) { result::get(get_cargo_sysroot()) }
ok(home) => home,
err(_err) => result::get(get_cargo_sysroot())
};
let get_cargo_dir = alt opts.mode {
system_mode { get_cargo_sysroot }
user_mode { get_cargo_root }
local_mode { get_cargo_root_nearest }
system_mode => get_cargo_sysroot,
user_mode => get_cargo_root,
local_mode => get_cargo_root_nearest
};
let p = result::get(get_cargo_dir());
@ -726,8 +718,8 @@ fn run_in_buildpath(what: ~str, path: ~str, subdir: ~str, cf: ~str,
fn test_one_crate(_c: cargo, path: ~str, cf: ~str) {
let buildpath = alt run_in_buildpath(~"testing", path, ~"/test", cf,
~[ ~"--test"]) {
none { return; }
some(bp) { bp }
none => return,
some(bp) => bp
};
run_programs(buildpath);
}
@ -735,8 +727,8 @@ fn test_one_crate(_c: cargo, path: ~str, cf: ~str) {
fn install_one_crate(c: cargo, path: ~str, cf: ~str) {
let buildpath = alt run_in_buildpath(~"installing", path,
~"/build", cf, ~[]) {
none { return; }
some(bp) { bp }
none => return,
some(bp) => bp
};
let newv = os::list_dir_path(buildpath);
let exec_suffix = os::exe_suffix();
@ -761,13 +753,13 @@ fn install_one_crate(c: cargo, path: ~str, cf: ~str) {
fn rustc_sysroot() -> ~str {
alt os::self_exe_path() {
some(path) {
some(path) => {
let path = ~[path, ~"..", ~"bin", ~"rustc"];
let rustc = path::normalize(path::connect_many(path));
debug!{" rustc: %s", rustc};
rustc
}
none { ~"rustc" }
none => ~"rustc"
}
}
@ -788,8 +780,8 @@ fn install_source(c: cargo, path: ~str) {
for cratefiles.each |cf| {
alt load_crate(cf) {
none { again; }
some(crate) {
none => again,
some(crate) => {
for crate.deps.each |query| {
// FIXME (#1356): handle cyclic dependencies
// (n.b. #1356 says "Cyclic dependency is an error
@ -797,8 +789,8 @@ fn install_source(c: cargo, path: ~str) {
let wd_base = c.workdir + path::path_sep();
let wd = alt tempfile::mkdtemp(wd_base, ~"") {
some(wd) { wd }
none { fail fmt!{"needed temp dir: %s", wd_base}; }
some(wd) => wd,
none => fail fmt!{"needed temp dir: %s", wd_base}
};
install_query(c, wd, query);
@ -847,18 +839,18 @@ fn install_file(c: cargo, wd: ~str, path: ~str) {
fn install_package(c: cargo, src: ~str, wd: ~str, pkg: package) {
let url = copy pkg.url;
let method = alt pkg.method {
~"git" { ~"git" }
~"file" { ~"file" }
_ { ~"curl" }
~"git" => ~"git",
~"file" => ~"file",
_ => ~"curl"
};
info(fmt!{"installing %s/%s via %s...", src, pkg.name, method});
alt method {
~"git" { install_git(c, wd, url, copy pkg.reference); }
~"file" { install_file(c, wd, url); }
~"curl" { install_curl(c, wd, copy url); }
_ {}
~"git" => install_git(c, wd, url, copy pkg.reference),
~"file" => install_file(c, wd, url),
~"curl" => install_curl(c, wd, copy url),
_ => ()
}
}
@ -922,7 +914,7 @@ fn install_named(c: cargo, wd: ~str, name: ~str) {
fn install_uuid_specific(c: cargo, wd: ~str, src: ~str, uuid: ~str) {
alt c.sources.find(src) {
some(s) {
some(s) => {
let packages = copy s.packages;
if vec::any(packages, |p| {
if p.uuid == uuid {
@ -931,14 +923,14 @@ fn install_uuid_specific(c: cargo, wd: ~str, src: ~str, uuid: ~str) {
} else { false }
}) { return; }
}
_ { }
_ => ()
}
error(~"can't find package: " + src + ~"/" + uuid);
}
fn install_named_specific(c: cargo, wd: ~str, src: ~str, name: ~str) {
alt c.sources.find(src) {
some(s) {
some(s) => {
let packages = copy s.packages;
if vec::any(packages, |p| {
if p.name == name {
@ -947,7 +939,7 @@ fn install_named_specific(c: cargo, wd: ~str, src: ~str, name: ~str) {
} else { false }
}) { return; }
}
_ { }
_ => ()
}
error(~"can't find package: " + src + ~"/" + name);
}
@ -969,7 +961,7 @@ fn cmd_uninstall(c: cargo) {
if is_uuid(target) {
for os::list_dir(lib).each |file| {
alt str::find_str(file, ~"-" + target + ~"-") {
some(idx) {
some(idx) => {
let full = path::normalize(path::connect(lib, file));
if os::remove_file(full) {
info(~"uninstalled: '" + full + ~"'");
@ -978,7 +970,7 @@ fn cmd_uninstall(c: cargo) {
}
return;
}
none { again; }
none => again
}
}
@ -986,7 +978,7 @@ fn cmd_uninstall(c: cargo) {
} else {
for os::list_dir(lib).each |file| {
alt str::find_str(file, ~"lib" + target + ~"-") {
some(idx) {
some(idx) => {
let full = path::normalize(path::connect(lib,
file));
if os::remove_file(full) {
@ -996,12 +988,12 @@ fn cmd_uninstall(c: cargo) {
}
return;
}
none { again; }
none => again
}
}
for os::list_dir(bin).each |file| {
alt str::find_str(file, target) {
some(idx) {
some(idx) => {
let full = path::normalize(path::connect(bin, file));
if os::remove_file(full) {
info(~"uninstalled: '" + full + ~"'");
@ -1010,7 +1002,7 @@ fn cmd_uninstall(c: cargo) {
}
return;
}
none { again; }
none => again
}
}
@ -1020,12 +1012,12 @@ fn cmd_uninstall(c: cargo) {
fn install_query(c: cargo, wd: ~str, target: ~str) {
alt c.dep_cache.find(target) {
some(inst) {
some(inst) => {
if inst {
return;
}
}
none {}
none => ()
}
c.dep_cache.insert(target, true);
@ -1047,7 +1039,7 @@ fn install_query(c: cargo, wd: ~str, target: ~str) {
let mut ps = copy target;
alt str::find_char(ps, '/') {
option::some(idx) {
option::some(idx) => {
let source = str::slice(ps, 0u, idx);
ps = str::slice(ps, idx + 1u, str::len(ps));
if is_uuid(ps) {
@ -1056,7 +1048,7 @@ fn install_query(c: cargo, wd: ~str, target: ~str) {
install_named_specific(c, wd, source, ps);
}
}
option::none {
option::none => {
if is_uuid(ps) {
install_uuid(c, wd, ps);
} else {
@ -1081,8 +1073,8 @@ fn install_query(c: cargo, wd: ~str, target: ~str) {
fn cmd_install(c: cargo) unsafe {
let wd_base = c.workdir + path::path_sep();
let wd = alt tempfile::mkdtemp(wd_base, ~"") {
some(wd) { wd }
none { fail fmt!{"needed temp dir: %s", wd_base}; }
some(wd) => wd,
none => fail fmt!{"needed temp dir: %s", wd_base}
};
if vec::len(c.opts.free) == 2u {
@ -1138,7 +1130,7 @@ fn sync_one_file(c: cargo, dir: ~str, src: source) -> bool {
os::copy_file(path::connect(url, ~"packages.json.sig"), sigfile);
alt copy src.key {
some(u) {
some(u) => {
let p = run::program_output(~"curl",
~[~"-f", ~"-s", ~"-o", keyfile, u]);
if p.status != 0 {
@ -1147,10 +1139,10 @@ fn sync_one_file(c: cargo, dir: ~str, src: source) -> bool {
}
pgp::add(c.root, keyfile);
}
_ { }
_ => ()
}
alt (src.key, src.keyfp) {
(some(_), some(f)) {
(some(_), some(f)) => {
let r = pgp::verify(c.root, pkgfile, sigfile, f);
if !r {
@ -1169,7 +1161,7 @@ fn sync_one_file(c: cargo, dir: ~str, src: source) -> bool {
}
}
}
_ {}
_ => ()
}
copy_warn(pkgfile, destpkgfile);
@ -1247,7 +1239,7 @@ fn sync_one_git(c: cargo, dir: ~str, src: source) -> bool {
let has_src_file = os::path_exists(srcfile);
alt copy src.key {
some(u) {
some(u) => {
let p = run::program_output(~"curl",
~[~"-f", ~"-s", ~"-o", keyfile, u]);
if p.status != 0 {
@ -1257,10 +1249,10 @@ fn sync_one_git(c: cargo, dir: ~str, src: source) -> bool {
}
pgp::add(c.root, keyfile);
}
_ { }
_ => ()
}
alt (src.key, src.keyfp) {
(some(_), some(f)) {
(some(_), some(f)) => {
let r = pgp::verify(c.root, pkgfile, sigfile, f);
if !r {
@ -1281,7 +1273,7 @@ fn sync_one_git(c: cargo, dir: ~str, src: source) -> bool {
}
}
}
_ {}
_ => ()
}
os::remove_file(keyfile);
@ -1327,7 +1319,7 @@ fn sync_one_curl(c: cargo, dir: ~str, src: source) -> bool {
}
alt copy src.key {
some(u) {
some(u) => {
let p = run::program_output(~"curl",
~[~"-f", ~"-s", ~"-o", keyfile, u]);
if p.status != 0 {
@ -1336,10 +1328,10 @@ fn sync_one_curl(c: cargo, dir: ~str, src: source) -> bool {
}
pgp::add(c.root, keyfile);
}
_ { }
_ => ()
}
alt (src.key, src.keyfp) {
(some(_), some(f)) {
(some(_), some(f)) => {
if smart {
url = src.url + ~"/packages.json.sig";
}
@ -1383,7 +1375,7 @@ fn sync_one_curl(c: cargo, dir: ~str, src: source) -> bool {
}
}
}
_ {}
_ => ()
}
copy_warn(pkgfile, destpkgfile);
@ -1412,9 +1404,9 @@ fn sync_one(c: cargo, src: source) {
need_dir(dir);
let result = alt src.method {
~"git" { sync_one_git(c, dir, src) }
~"file" { sync_one_file(c, dir, src) }
_ { sync_one_curl(c, dir, src) }
~"git" => sync_one_git(c, dir, src),
~"file" => sync_one_file(c, dir, src),
_ => sync_one_curl(c, dir, src)
};
if result {
@ -1499,10 +1491,10 @@ fn cmd_list(c: cargo) {
error(fmt!{"'%s' is an invalid source name", name});
} else {
alt c.sources.find(name) {
some(source) {
some(source) => {
print_source(source);
}
none {
none => {
error(fmt!{"no such source: %s", name});
}
}
@ -1571,7 +1563,7 @@ fn dump_sources(c: cargo) {
}
alt io::buffered_file_writer(out) {
result::ok(writer) {
result::ok(writer) => {
let hash = map::str_hash();
let root = json::dict(hash);
@ -1583,16 +1575,16 @@ fn dump_sources(c: cargo) {
chash.insert(~"method", json::string(@v.method));
alt copy v.key {
some(key) {
some(key) => {
chash.insert(~"key", json::string(@key));
}
_ {}
_ => ()
}
alt copy v.keyfp {
some(keyfp) {
some(keyfp) => {
chash.insert(~"keyfp", json::string(@keyfp));
}
_ {}
_ => ()
}
hash.insert(k, child);
@ -1600,7 +1592,7 @@ fn dump_sources(c: cargo) {
writer.write_str(json::to_str(root));
}
result::err(e) {
result::err(e) => {
error(fmt!{"could not dump sources: %s", e});
}
}
@ -1624,14 +1616,14 @@ fn cmd_sources(c: cargo) {
let action = c.opts.free[2u];
alt action {
~"clear" {
~"clear" => {
for c.sources.each_key |k| {
c.sources.remove(k);
}
info(~"cleared sources");
}
~"add" {
~"add" => {
if vec::len(c.opts.free) < 5u {
cmd_usage();
return;
@ -1646,10 +1638,10 @@ fn cmd_sources(c: cargo) {
}
alt c.sources.find(name) {
some(source) {
some(source) => {
error(fmt!{"source already exists: %s", name});
}
none {
none => {
c.sources.insert(name, @{
name: name,
mut url: url,
@ -1662,7 +1654,7 @@ fn cmd_sources(c: cargo) {
}
}
}
~"remove" {
~"remove" => {
if vec::len(c.opts.free) < 4u {
cmd_usage();
return;
@ -1676,16 +1668,16 @@ fn cmd_sources(c: cargo) {
}
alt c.sources.find(name) {
some(source) {
some(source) => {
c.sources.remove(name);
info(fmt!{"removed source: %s", name});
}
none {
none => {
error(fmt!{"no such source: %s", name});
}
}
}
~"set-url" {
~"set-url" => {
if vec::len(c.opts.free) < 5u {
cmd_usage();
return;
@ -1700,7 +1692,7 @@ fn cmd_sources(c: cargo) {
}
alt c.sources.find(name) {
some(source) {
some(source) => {
let old = copy source.url;
let method = assume_source_method(url);
@ -1711,12 +1703,12 @@ fn cmd_sources(c: cargo) {
info(fmt!{"changed source url: '%s' to '%s'", old, url});
}
none {
none => {
error(fmt!{"no such source: %s", name});
}
}
}
~"set-method" {
~"set-method" => {
if vec::len(c.opts.free) < 5u {
cmd_usage();
return;
@ -1731,13 +1723,13 @@ fn cmd_sources(c: cargo) {
}
alt c.sources.find(name) {
some(source) {
some(source) => {
let old = copy source.method;
source.method = alt method {
~"git" { ~"git" }
~"file" { ~"file" }
_ { ~"curl" }
~"git" => ~"git",
~"file" => ~"file",
_ => ~"curl"
};
c.sources.insert(name, source);
@ -1745,12 +1737,12 @@ fn cmd_sources(c: cargo) {
info(fmt!{"changed source method: '%s' to '%s'", old,
method});
}
none {
none => {
error(fmt!{"no such source: %s", name});
}
}
}
~"rename" {
~"rename" => {
if vec::len(c.opts.free) < 5u {
cmd_usage();
return;
@ -1769,17 +1761,17 @@ fn cmd_sources(c: cargo) {
}
alt c.sources.find(name) {
some(source) {
some(source) => {
c.sources.remove(name);
c.sources.insert(newn, source);
info(fmt!{"renamed source: %s to %s", name, newn});
}
none {
none => {
error(fmt!{"no such source: %s", name});
}
}
}
_ { cmd_usage(); }
_ => cmd_usage()
}
}
@ -1883,13 +1875,13 @@ fn main(argv: ~[~str]) {
}
if o.help {
alt o.free[1] {
~"init" { cmd_usage_init(); }
~"install" { cmd_usage_install(); }
~"uninstall" { cmd_usage_uninstall(); }
~"list" { cmd_usage_list(); }
~"search" { cmd_usage_search(); }
~"sources" { cmd_usage_sources(); }
_ { cmd_usage(); }
~"init" => cmd_usage_init(),
~"install" => cmd_usage_install(),
~"uninstall" => cmd_usage_uninstall(),
~"list" => cmd_usage_list(),
~"search" => cmd_usage_search(),
~"sources" => cmd_usage_sources(),
_ => cmd_usage()
}
return;
}
@ -1910,13 +1902,13 @@ fn main(argv: ~[~str]) {
}
alt o.free[1] {
~"init" { cmd_init(c); }
~"install" { cmd_install(c); }
~"uninstall" { cmd_uninstall(c); }
~"list" { cmd_list(c); }
~"search" { cmd_search(c); }
~"sources" { cmd_sources(c); }
_ { cmd_usage(); }
~"init" => cmd_init(c),
~"install" => cmd_install(c),
~"uninstall" => cmd_uninstall(c),
~"list" => cmd_list(c),
~"search" => cmd_search(c),
~"sources" => cmd_sources(c),
_ => cmd_usage()
}
dump_cache(c);

View file

@ -43,8 +43,8 @@ fn parse_config(args: ~[~str]) -> config {
let args_ = vec::tail(args);
let matches =
alt getopts::getopts(args_, opts) {
ok(m) { m }
err(f) { fail getopts::fail_str(f) }
ok(m) => m,
err(f) => fail getopts::fail_str(f)
};
return {compile_lib_path: getopts::opt_str(matches, ~"compile-lib-path"),
@ -85,7 +85,7 @@ fn log_config(config: config) {
}
fn opt_str(maybestr: option<~str>) -> ~str {
alt maybestr { option::some(s) { s } option::none { ~"(none)" } }
alt maybestr { option::some(s) => s, option::none => ~"(none)" }
}
fn str_opt(maybestr: ~str) -> option<~str> {
@ -94,20 +94,20 @@ fn str_opt(maybestr: ~str) -> option<~str> {
fn str_mode(s: ~str) -> mode {
alt s {
~"compile-fail" { mode_compile_fail }
~"run-fail" { mode_run_fail }
~"run-pass" { mode_run_pass }
~"pretty" { mode_pretty }
_ { fail ~"invalid mode" }
~"compile-fail" => mode_compile_fail,
~"run-fail" => mode_run_fail,
~"run-pass" => mode_run_pass,
~"pretty" => mode_pretty,
_ => fail ~"invalid mode"
}
}
fn mode_str(mode: mode) -> ~str {
alt mode {
mode_compile_fail { ~"compile-fail" }
mode_run_fail { ~"run-fail" }
mode_run_pass { ~"run-pass" }
mode_pretty { ~"pretty" }
mode_compile_fail => ~"compile-fail",
mode_run_fail => ~"run-fail",
mode_run_pass => ~"run-pass",
mode_pretty => ~"pretty"
}
}
@ -121,14 +121,14 @@ fn run_tests(config: config) {
fn test_opts(config: config) -> test::test_opts {
{filter:
alt config.filter {
option::some(s) { option::some(s) }
option::none { option::none }
option::some(s) => option::some(s),
option::none => option::none
},
run_ignored: config.run_ignored,
logfile:
alt config.logfile {
option::some(s) { option::some(s) }
option::none { option::none }
option::some(s) => option::some(s),
option::none => option::none
}
}
}
@ -149,7 +149,10 @@ fn make_tests(config: config) -> ~[test::test_desc] {
fn is_test(config: config, testfile: ~str) -> bool {
// Pretty-printer does not work with .rc files yet
let valid_extensions =
alt config.mode { mode_pretty { ~[~".rs"] } _ { ~[~".rc", ~".rs"] } };
alt config.mode {
mode_pretty => ~[~".rs"],
_ => ~[~".rc", ~".rs"]
};
let invalid_prefixes = ~[~".", ~"#", ~"~"];
let name = path::basename(testfile);

View file

@ -24,8 +24,8 @@ fn parse_expected(line_num: uint, line: ~str) -> ~[expected_error] unsafe {
let error_tag = ~"//~";
let mut idx;
alt str::find_str(line, error_tag) {
option::none { return ~[]; }
option::some(nn) { idx = (nn as uint) + str::len(error_tag); }
option::none => return ~[],
option::some(nn) => { idx = (nn as uint) + str::len(error_tag); }
}
// "//~^^^ kind msg" denotes a message expected

View file

@ -31,8 +31,8 @@ fn load_props(testfile: ~str) -> test_props {
let mut pp_exact = option::none;
for iter_header(testfile) |ln| {
alt parse_error_pattern(ln) {
option::some(ep) { vec::push(error_patterns, ep) }
option::none { }
option::some(ep) => vec::push(error_patterns, ep),
option::none => ()
};
if option::is_none(compile_flags) {
@ -108,17 +108,17 @@ fn parse_exec_env(line: ~str) -> option<(~str, ~str)> {
// nv is either FOO or FOO=BAR
let strs = str::splitn_char(nv, '=', 1u);
alt strs.len() {
1u { (strs[0], ~"") }
2u { (strs[0], strs[1]) }
n { fail fmt!{"Expected 1 or 2 strings, not %u", n}; }
1u => (strs[0], ~""),
2u => (strs[0], strs[1]),
n => fail fmt!{"Expected 1 or 2 strings, not %u", n}
}
}
}
fn parse_pp_exact(line: ~str, testfile: ~str) -> option<~str> {
alt parse_name_value_directive(line, ~"pp-exact") {
option::some(s) { option::some(s) }
option::none {
option::some(s) => option::some(s),
option::none => {
if parse_name_directive(line, ~"pp-exact") {
option::some(path::basename(testfile))
} else {
@ -136,12 +136,12 @@ fn parse_name_value_directive(line: ~str,
directive: ~str) -> option<~str> unsafe {
let keycolon = directive + ~":";
alt str::find_str(line, keycolon) {
option::some(colon) {
option::some(colon) => {
let value = str::slice(line, colon + str::len(keycolon),
str::len(line));
debug!{"%s: %s", directive, value};
option::some(value)
}
option::none { option::none }
option::none => option::none
}
}

View file

@ -75,10 +75,10 @@ fn run(lib_path: ~str,
while count > 0 {
let stream = comm::recv(p);
alt check stream {
(1, s) {
(1, s) => {
outs = s;
}
(2, s) {
(2, s) => {
errs = s;
}
};

View file

@ -19,10 +19,10 @@ fn run(config: config, testfile: ~str) {
debug!{"running %s", testfile};
let props = load_props(testfile);
alt config.mode {
mode_compile_fail { run_cfail_test(config, props, testfile); }
mode_run_fail { run_rfail_test(config, props, testfile); }
mode_run_pass { run_rpass_test(config, props, testfile); }
mode_pretty { run_pretty_test(config, props, testfile); }
mode_compile_fail => run_cfail_test(config, props, testfile),
mode_run_fail => run_rfail_test(config, props, testfile),
mode_run_pass => run_rpass_test(config, props, testfile),
mode_pretty => run_pretty_test(config, props, testfile)
}
}
@ -90,7 +90,7 @@ fn run_pretty_test(config: config, props: test_props, testfile: ~str) {
} else { logv(config, ~"testing for converging pretty-printing"); }
let rounds =
alt props.pp_exact { option::some(_) { 1 } option::none { 2 } };
alt props.pp_exact { option::some(_) => 1, option::none => 2 };
let mut srcs = ~[result::get(io::read_whole_file_str(testfile))];
@ -110,11 +110,11 @@ fn run_pretty_test(config: config, props: test_props, testfile: ~str) {
let mut expected =
alt props.pp_exact {
option::some(file) {
option::some(file) => {
let filepath = path::connect(path::dirname(testfile), file);
result::get(io::read_whole_file_str(filepath))
}
option::none { srcs[vec::len(srcs) - 2u] }
option::none => { srcs[vec::len(srcs) - 2u] }
};
let mut actual = srcs[vec::len(srcs) - 1u];
@ -384,8 +384,8 @@ fn make_run_args(config: config, _props: test_props, testfile: ~str) ->
// then split apart its command
let runtool =
alt config.runtool {
option::some(s) { option::some(s) }
option::none { option::none }
option::some(s) => option::some(s),
option::none => option::none
};
split_maybe_args(runtool)
};
@ -403,8 +403,8 @@ fn split_maybe_args(argstr: option<~str>) -> ~[~str] {
}
alt argstr {
option::some(s) { rm_whitespace(str::split_char(s, ' ')) }
option::none { ~[] }
option::some(s) => rm_whitespace(str::split_char(s, ' ')),
option::none => ~[]
}
}

View file

@ -8,10 +8,10 @@ fn make_new_path(path: ~str) -> ~str {
// Windows just uses PATH as the library search path, so we have to
// maintain the current value while adding our own
alt getenv(lib_path_env_var()) {
option::some(curr) {
option::some(curr) => {
fmt!{"%s%s%s", path, path_div(), curr}
}
option::none { path }
option::none => path
}
}

View file

@ -63,39 +63,39 @@ pure fn safe_to_steal_expr(e: @ast::expr, tm: test_mode) -> bool {
pure fn safe_to_use_expr(e: ast::expr, tm: test_mode) -> bool {
alt tm {
tm_converge {
tm_converge => {
alt e.node {
// If the fuzzer moves a block-ending-in-semicolon into callee
// position, the pretty-printer can't preserve this even by
// parenthesizing!! See email to marijn.
ast::expr_if(_, _, _) { false }
ast::expr_block(_) { false }
ast::expr_alt(_, _, _) { false }
ast::expr_while(_, _) { false }
ast::expr_if(_, _, _) => { false }
ast::expr_block(_) => { false }
ast::expr_alt(_, _, _) => { false }
ast::expr_while(_, _) => { false }
// https://github.com/mozilla/rust/issues/929
ast::expr_cast(_, _) { false }
ast::expr_assert(_) { false }
ast::expr_binary(_, _, _) { false }
ast::expr_assign(_, _) { false }
ast::expr_assign_op(_, _, _) { false }
ast::expr_cast(_, _) => { false }
ast::expr_assert(_) => { false }
ast::expr_binary(_, _, _) => { false }
ast::expr_assign(_, _) => { false }
ast::expr_assign_op(_, _, _) => { false }
ast::expr_fail(option::none) { false }
ast::expr_ret(option::none) { false }
ast::expr_fail(option::none) => { false }
ast::expr_ret(option::none) => { false }
// https://github.com/mozilla/rust/issues/953
ast::expr_fail(option::some(_)) { false }
ast::expr_fail(option::some(_)) => { false }
// https://github.com/mozilla/rust/issues/928
//ast::expr_cast(_, _) { false }
// https://github.com/mozilla/rust/issues/1458
ast::expr_call(_, _, _) { false }
ast::expr_call(_, _, _) => { false }
_ { true }
_ => { true }
}
}
tm_run { true }
tm_run => { true }
}
}
@ -141,23 +141,23 @@ fn steal(crate: ast::crate, tm: test_mode) -> stolen_stuff {
fn safe_to_replace_expr(e: ast::expr_, _tm: test_mode) -> bool {
alt e {
// https://github.com/mozilla/rust/issues/652
ast::expr_if(*) { false }
ast::expr_block(_) { false }
ast::expr_if(*) => { false }
ast::expr_block(_) => { false }
// expr_call is also missing a constraint
ast::expr_fn_block(*) { false }
ast::expr_fn_block(*) => { false }
_ { true }
_ => { true }
}
}
fn safe_to_replace_ty(t: ast::ty_, _tm: test_mode) -> bool {
alt t {
ast::ty_infer { false } // always implicit, always top level
ast::ty_bot { false } // in source, can only appear
ast::ty_infer => { false } // always implicit, always top level
ast::ty_bot => { false } // in source, can only appear
// as the out type of a function
ast::ty_mac(_) { false }
_ { true }
ast::ty_mac(_) => { false }
_ => { true }
}
}
@ -273,10 +273,10 @@ fn check_variants_T<T: copy>(
pprust::no_ann(),
false));
alt cx.mode {
tm_converge {
tm_converge => {
check_roundtrip_convergence(str3, 1u);
}
tm_run {
tm_run => {
let file_label = fmt!{"rusttmp/%s_%s_%u_%u",
last_part(filename),
thing_label, i, j};
@ -315,17 +315,17 @@ fn check_whole_compiler(code: ~str, suggested_filename_prefix: ~str,
let compile_result = check_compiling(filename);
let run_result = alt (compile_result, allow_running) {
(passed, true) { check_running(suggested_filename_prefix) }
(h, _) { h }
(passed, true) => { check_running(suggested_filename_prefix) }
(h, _) => { h }
};
alt run_result {
passed | cleanly_rejected(_) | known_bug(_) {
passed | cleanly_rejected(_) | known_bug(_) => {
removeIfExists(suggested_filename_prefix);
removeIfExists(suggested_filename_prefix + ~".rs");
removeDirIfExists(suggested_filename_prefix + ~".dSYM");
}
failed(s) {
failed(s) => {
log(error, ~"check_whole_compiler failure: " + s);
log(error, ~"Saved as: " + filename);
}
@ -365,17 +365,17 @@ fn check_running(exe_filename: ~str) -> happiness {
failed(~"Mentioned malloc")
} else {
alt p.status {
0 { passed }
100 { cleanly_rejected(~"running: explicit fail") }
101 | 247 { cleanly_rejected(~"running: timed out") }
245 | 246 | 138 | 252 {
0 => { passed }
100 => { cleanly_rejected(~"running: explicit fail") }
101 | 247 => { cleanly_rejected(~"running: timed out") }
245 | 246 | 138 | 252 => {
known_bug(~"https://github.com/mozilla/rust/issues/1466")
}
136 | 248 {
136 | 248 => {
known_bug(
~"SIGFPE - https://github.com/mozilla/rust/issues/944")
}
rc {
rc => {
failed(~"Rust program ran but exited with status " +
int::str(rc))
}
@ -442,8 +442,8 @@ fn has_raw_pointers(c: ast::crate) -> bool {
let has_rp = @mut false;
fn visit_ty(flag: @mut bool, t: @ast::ty) {
alt t.node {
ast::ty_ptr(_) { *flag = true; }
_ { }
ast::ty_ptr(_) => { *flag = true; }
_ => { }
}
}
let v =

View file

@ -40,9 +40,9 @@ pure fn is_false(v: bool) -> bool { !v }
/// Parse logic value from `s`
pure fn from_str(s: ~str) -> option<bool> {
alt check s {
~"true" { some(true) }
~"false" { some(false) }
_ { none }
~"true" => some(true),
~"false" => some(false),
_ => none
}
}

View file

@ -114,10 +114,10 @@ pure fn is_digit(c: char) -> bool {
*/
pure fn to_digit(c: char, radix: uint) -> option<uint> {
let val = alt c {
'0' to '9' { c as uint - ('0' as uint) }
'a' to 'z' { c as uint + 10u - ('a' as uint) }
'A' to 'Z' { c as uint + 10u - ('A' as uint) }
_ { return none; }
'0' to '9' => c as uint - ('0' as uint),
'a' to 'z' => c as uint + 10u - ('a' as uint),
'A' to 'Z' => c as uint + 10u - ('A' as uint),
_ => return none
};
if val < radix { some(val) }
else { none }
@ -159,14 +159,14 @@ fn escape_unicode(c: char) -> ~str {
*/
fn escape_default(c: char) -> ~str {
alt c {
'\t' { ~"\\t" }
'\r' { ~"\\r" }
'\n' { ~"\\n" }
'\\' { ~"\\\\" }
'\'' { ~"\\'" }
'"' { ~"\\\"" }
'\x20' to '\x7e' { str::from_char(c) }
_ { escape_unicode(c) }
'\t' => ~"\\t",
'\r' => ~"\\r",
'\n' => ~"\\n",
'\\' => ~"\\\\",
'\'' => ~"\\'",
'"' => ~"\\\"",
'\x20' to '\x7e' => str::from_char(c),
_ => escape_unicode(c)
}
}

View file

@ -410,8 +410,8 @@ fn test_select2_stress() {
let mut bs = 0;
for iter::repeat(msgs * times * 2u) {
alt check select2(po_a, po_b) {
either::left(~"a") { as += 1 }
either::right(~"b") { bs += 1 }
either::left(~"a") => as += 1,
either::right(~"b") => bs += 1
}
}

View file

@ -27,30 +27,22 @@ enum dlist<T> = @{
impl private_methods<T> for dlist_node<T> {
pure fn assert_links() {
alt self.next {
some(neighbour) {
alt neighbour.prev {
some(me) {
if !box::ptr_eq(*self, *me) {
fail ~"Asymmetric next-link in dlist node."
}
}
none { fail ~"One-way next-link in dlist node." }
}
some(neighbour) => alt neighbour.prev {
some(me) => if !box::ptr_eq(*self, *me) {
fail ~"Asymmetric next-link in dlist node."
}
none => fail ~"One-way next-link in dlist node."
}
none { }
none => ()
}
alt self.prev {
some(neighbour) {
alt neighbour.next {
some(me) {
if !box::ptr_eq(*me, *self) {
fail ~"Asymmetric prev-link in dlist node."
}
}
none { fail ~"One-way prev-link in dlist node." }
}
some(neighbour) => alt neighbour.next {
some(me) => if !box::ptr_eq(*me, *self) {
fail ~"Asymmetric prev-link in dlist node."
}
none => fail ~"One-way prev-link in dlist node."
}
none { }
none => ()
}
}
}
@ -64,8 +56,8 @@ impl extensions<T> for dlist_node<T> {
/// Get the next node in the list, failing if there isn't one.
pure fn next_node() -> dlist_node<T> {
alt self.next_link() {
some(nobe) { nobe }
none { fail ~"This dlist node has no next neighbour." }
some(nobe) => nobe,
none => fail ~"This dlist node has no next neighbour."
}
}
/// Get the previous node in the list, if there is one.
@ -76,8 +68,8 @@ impl extensions<T> for dlist_node<T> {
/// Get the previous node in the list, failing if there isn't one.
pure fn prev_node() -> dlist_node<T> {
alt self.prev_link() {
some(nobe) { nobe }
none { fail ~"This dlist node has no previous neighbour." }
some(nobe) => nobe,
none => fail ~"This dlist node has no previous neighbour."
}
}
}
@ -147,12 +139,12 @@ impl private_methods<T> for dlist<T> {
#[inline(always)]
fn link(+before: dlist_link<T>, +after: dlist_link<T>) {
alt before {
some(neighbour) { neighbour.next = after; }
none { self.hd = after; }
some(neighbour) => neighbour.next = after,
none => self.hd = after
}
alt after {
some(neighbour) { neighbour.prev = before; }
none { self.tl = before; }
some(neighbour) => neighbour.prev = before,
none => self.tl = before
}
}
// Remove a node from the list.
@ -295,19 +287,15 @@ impl extensions<T> for dlist<T> {
/// Get the node at the list's head, failing if empty. O(1).
pure fn head_n() -> dlist_node<T> {
alt self.hd {
some(nobe) { nobe }
none {
fail ~"Attempted to get the head of an empty dlist."
}
some(nobe) => nobe,
none => fail ~"Attempted to get the head of an empty dlist."
}
}
/// Get the node at the list's tail, failing if empty. O(1).
pure fn tail_n() -> dlist_node<T> {
alt self.tl {
some(nobe) { nobe }
none {
fail ~"Attempted to get the tail of an empty dlist."
}
some(nobe) => nobe,
none => fail ~"Attempted to get the tail of an empty dlist."
}
}

View file

@ -18,7 +18,10 @@ fn either<T, U, V>(f_left: fn(T) -> V,
* result is returned.
*/
alt value { left(l) { f_left(l) } right(r) { f_right(r) } }
alt value {
left(l) => f_left(l),
right(r) => f_right(r)
}
}
fn lefts<T: copy, U>(eithers: ~[either<T, U>]) -> ~[T] {
@ -26,7 +29,10 @@ fn lefts<T: copy, U>(eithers: ~[either<T, U>]) -> ~[T] {
let mut result: ~[T] = ~[];
for vec::each(eithers) |elt| {
alt elt { left(l) { vec::push(result, l); } _ {/* fallthrough */ } }
alt elt {
left(l) => vec::push(result, l),
_ => { /* fallthrough */ }
}
}
return result;
}
@ -36,7 +42,10 @@ fn rights<T, U: copy>(eithers: ~[either<T, U>]) -> ~[U] {
let mut result: ~[U] = ~[];
for vec::each(eithers) |elt| {
alt elt { right(r) { vec::push(result, r); } _ {/* fallthrough */ } }
alt elt {
right(r) => vec::push(result, r),
_ => { /* fallthrough */ }
}
}
return result;
}
@ -54,8 +63,8 @@ fn partition<T: copy, U: copy>(eithers: ~[either<T, U>])
let mut rights: ~[U] = ~[];
for vec::each(eithers) |elt| {
alt elt {
left(l) { vec::push(lefts, l); }
right(r) { vec::push(rights, r); }
left(l) => vec::push(lefts, l),
right(r) => vec::push(rights, r)
}
}
return {lefts: lefts, rights: rights};
@ -65,8 +74,8 @@ pure fn flip<T: copy, U: copy>(eith: either<T, U>) -> either<U, T> {
//! Flips between left and right of a given either
alt eith {
right(r) { left(r) }
left(l) { right(l) }
right(r) => left(r),
left(l) => right(l)
}
}
@ -80,21 +89,21 @@ pure fn to_result<T: copy, U: copy>(
*/
alt eith {
right(r) { result::ok(r) }
left(l) { result::err(l) }
right(r) => result::ok(r),
left(l) => result::err(l)
}
}
pure fn is_left<T, U>(eith: either<T, U>) -> bool {
//! Checks whether the given value is a left
alt eith { left(_) { true } _ { false } }
alt eith { left(_) => true, _ => false }
}
pure fn is_right<T, U>(eith: either<T, U>) -> bool {
//! Checks whether the given value is a right
alt eith { right(_) { true } _ { false } }
alt eith { right(_) => true, _ => false }
}
#[test]

View file

@ -123,8 +123,8 @@ mod ct {
if !('0' as u8 <= c && c <= '9' as u8) { return option::none; }
let n = (c - ('0' as u8)) as uint;
return alt peek_num(s, i + 1u, lim) {
none { some({num: n, next: i + 1u}) }
some(next) {
none => some({num: n, next: i + 1u}),
some(next) => {
let m = next.num;
let j = next.next;
some({num: n * 10u + m, next: j})
@ -151,8 +151,8 @@ mod ct {
if i >= lim { return {param: none, next: i}; }
let num = peek_num(s, i, lim);
return alt num {
none { {param: none, next: i} }
some(t) {
none => {param: none, next: i},
some(t) => {
let n = t.num;
let j = t.next;
if j < lim && s[j] == '$' as u8 {
@ -196,15 +196,16 @@ mod ct {
let param = parse_parameter(s, i + 1u, lim);
let j = param.next;
alt param.param {
none { {count: count_is_next_param, next: j} }
some(n) { {count: count_is_param(n), next: j} }
none => {count: count_is_next_param, next: j},
some(n) => {count: count_is_param(n), next: j}
}
} else {
let num = peek_num(s, i, lim);
alt num {
none { {count: count_implied, next: i} }
some(num) {
{count: count_is(num.num as int), next: num.next}
none => {count: count_implied, next: i},
some(num) => {
count: count_is(num.num as int),
next: num.next
}
}
};
@ -220,8 +221,8 @@ mod ct {
// If there were no digits specified, i.e. the precision
// was ".", then the precision is 0
alt count.count {
count_implied { {count: count_is(0), next: count.next} }
_ { count }
count_implied => {count: count_is(0), next: count.next},
_ => count
}
} else { {count: count_implied, next: i} };
}
@ -294,11 +295,11 @@ mod rt {
let prec = get_int_precision(cv);
let mut rs =
alt cv.ty {
ty_default { uint_to_str_prec(u, 10u, prec) }
ty_hex_lower { uint_to_str_prec(u, 16u, prec) }
ty_hex_upper { str::to_upper(uint_to_str_prec(u, 16u, prec)) }
ty_bits { uint_to_str_prec(u, 2u, prec) }
ty_octal { uint_to_str_prec(u, 8u, prec) }
ty_default => uint_to_str_prec(u, 10u, prec),
ty_hex_lower => uint_to_str_prec(u, 16u, prec),
ty_hex_upper => str::to_upper(uint_to_str_prec(u, 16u, prec)),
ty_bits => uint_to_str_prec(u, 2u, prec),
ty_octal => uint_to_str_prec(u, 8u, prec)
};
return unchecked { pad(cv, rs, pad_unsigned) };
}
@ -316,19 +317,19 @@ mod rt {
// For strings, precision is the maximum characters
// displayed
let mut unpadded = alt cv.precision {
count_implied { s.to_unique() }
count_is(max) {
if max as uint < str::char_len(s) {
str::substr(s, 0u, max as uint)
} else { s.to_unique() }
count_implied => s.to_unique(),
count_is(max) => if max as uint < str::char_len(s) {
str::substr(s, 0u, max as uint)
} else {
s.to_unique()
}
};
return unchecked { pad(cv, unpadded, pad_nozero) };
}
pure fn conv_float(cv: conv, f: float) -> ~str {
let (to_str, digits) = alt cv.precision {
count_is(c) { (float::to_str_exact, c as uint) }
count_implied { (float::to_str, 6u) }
count_is(c) => (float::to_str_exact, c as uint),
count_implied => (float::to_str, 6u)
};
let mut s = unchecked { to_str(f, digits) };
if 0.0 <= f {
@ -371,15 +372,15 @@ mod rt {
}
pure fn get_int_precision(cv: conv) -> uint {
return alt cv.precision {
count_is(c) { c as uint }
count_implied { 1u }
count_is(c) => c as uint,
count_implied => 1u
};
}
enum pad_mode { pad_signed, pad_unsigned, pad_nozero, pad_float }
fn pad(cv: conv, &s: ~str, mode: pad_mode) -> ~str {
let uwidth : uint = alt cv.width {
count_implied { return s; }
count_is(width) {
count_implied => return s,
count_is(width) => {
// FIXME: width should probably be uint (see Issue #1996)
width as uint
}
@ -393,13 +394,13 @@ mod rt {
return s + padstr;
}
let {might_zero_pad, signed} = alt mode {
pad_nozero { {might_zero_pad:false, signed:false} }
pad_signed { {might_zero_pad:true, signed:true } }
pad_float { {might_zero_pad:true, signed:true } }
pad_unsigned { {might_zero_pad:true, signed:false} }
pad_nozero => {might_zero_pad:false, signed:false},
pad_signed => {might_zero_pad:true, signed:true },
pad_float => {might_zero_pad:true, signed:true},
pad_unsigned => {might_zero_pad:true, signed:false}
};
pure fn have_precision(cv: conv) -> bool {
return alt cv.precision { count_implied { false } _ { true } };
return alt cv.precision { count_implied => false, _ => true };
}
let zero_padding = {
if might_zero_pad && have_flag(cv.flags, flag_left_zero_pad) &&

View file

@ -257,21 +257,21 @@ fn from_str(num: ~str) -> option<float> {
//The string must start with one of the following characters.
alt str::char_at(num, 0u) {
'-' | '+' | '0' to '9' | '.' {}
_ { return none; }
'-' | '+' | '0' to '9' | '.' => (),
_ => return none
}
//Determine if first char is '-'/'+'. Set [pos] and [neg] accordingly.
let mut neg = false; //Sign of the result
alt str::char_at(num, 0u) {
'-' {
'-' => {
neg = true;
pos = 1u;
}
'+' {
'+' => {
pos = 1u;
}
_ {}
_ => ()
}
//Examine the following chars until '.', 'e', 'E'
@ -280,16 +280,12 @@ fn from_str(num: ~str) -> option<float> {
c = char_range.ch;
pos = char_range.next;
alt c {
'0' to '9' {
'0' to '9' => {
total = total * 10f;
total += ((c as int) - ('0' as int)) as float;
}
'.' | 'e' | 'E' {
break;
}
_ {
return none;
}
'.' | 'e' | 'E' => break,
_ => return none
}
}
@ -300,16 +296,12 @@ fn from_str(num: ~str) -> option<float> {
c = char_range.ch;
pos = char_range.next;
alt c {
'0' | '1' | '2' | '3' | '4' | '5' | '6'| '7' | '8' | '9' {
'0' | '1' | '2' | '3' | '4' | '5' | '6'| '7' | '8' | '9' => {
decimal /= 10f;
total += (((c as int) - ('0' as int)) as float)*decimal;
}
'e' | 'E' {
break;
}
_ {
return none;
}
'e' | 'E' => break,
_ => return none
}
}
}
@ -321,26 +313,24 @@ fn from_str(num: ~str) -> option<float> {
let char_range = str::char_range_at(num, pos);
c = char_range.ch;
alt c {
'+' {
'+' => {
pos = char_range.next;
}
'-' {
'-' => {
pos = char_range.next;
neg_exponent = true;
}
_ {}
_ => ()
}
while(pos < len) {
let char_range = str::char_range_at(num, pos);
c = char_range.ch;
alt c {
'0' | '1' | '2' | '3' | '4' | '5' | '6'| '7' | '8' | '9' {
'0' | '1' | '2' | '3' | '4' | '5' | '6'| '7' | '8' | '9' => {
exponent *= 10u;
exponent += ((c as uint) - ('0' as uint));
}
_ {
break;
}
_ => break
}
pos = char_range.next;
}
@ -458,8 +448,8 @@ fn test_from_str() {
assert from_str(~"-inf") == some(neg_infinity);
// note: NaN != NaN, hence this slightly complex test
alt from_str(~"NaN") {
some(f) { assert is_NaN(f); }
none { fail; }
some(f) => assert is_NaN(f),
none => fail
}
assert from_str(~"") == none;

View file

@ -78,7 +78,7 @@ fn from_port<A:send>(-port: future_pipe::client::waiting<A>) -> future<A> {
port_ <-> *port;
let port = option::unwrap(port_);
alt recv(port) {
future_pipe::completed(data) { move_it!{data} }
future_pipe::completed(data) => move_it!{data}
}
}
}
@ -120,8 +120,8 @@ fn with<A,B>(future: future<A>, blk: fn(A) -> B) -> B {
//! Work with the value without copying it
let v = alt copy future.v {
either::left(v) { v }
either::right(f) {
either::left(v) => v,
either::right(f) => {
let v = @f();
future.v = either::left(v);
v

View file

@ -145,8 +145,8 @@ fn parse_buf(buf: ~[u8], radix: uint) -> option<T> {
let mut n = 0 as T;
loop {
alt char::to_digit(buf[i] as char, radix) {
some(d) { n += (d as T) * power; }
none { return none; }
some(d) => n += (d as T) * power,
none => return none
}
power *= radix as T;
if i <= start { return some(n); }

View file

@ -197,9 +197,9 @@ impl reader_util for reader {
fn convert_whence(whence: seek_style) -> i32 {
return alt whence {
seek_set { 0i32 }
seek_cur { 1i32 }
seek_end { 2i32 }
seek_set => 0i32,
seek_cur => 1i32,
seek_end => 2i32
};
}
@ -441,10 +441,10 @@ fn mk_file_writer(path: ~str, flags: ~[fileflag])
let mut fflags: c_int = wb();
for vec::each(flags) |f| {
alt f {
append { fflags |= O_APPEND as c_int; }
create { fflags |= O_CREAT as c_int; }
truncate { fflags |= O_TRUNC as c_int; }
no_flag { }
append => fflags |= O_APPEND as c_int,
create => fflags |= O_CREAT as c_int,
truncate => fflags |= O_TRUNC as c_int,
no_flag => ()
}
}
let fd = do os::as_c_charp(path) |pathbuf| {
@ -461,22 +461,22 @@ fn mk_file_writer(path: ~str, flags: ~[fileflag])
fn u64_to_le_bytes<T>(n: u64, size: uint, f: fn(v: &[u8]) -> T) -> T {
assert size <= 8u;
alt size {
1u { f(&[n as u8]) }
2u { f(&[n as u8,
(n >> 8) as u8]) }
4u { f(&[n as u8,
1u => f(&[n as u8]),
2u => f(&[n as u8,
(n >> 8) as u8]),
4u => f(&[n as u8,
(n >> 8) as u8,
(n >> 16) as u8,
(n >> 24) as u8]) }
8u { f(&[n as u8,
(n >> 24) as u8]),
8u => f(&[n as u8,
(n >> 8) as u8,
(n >> 16) as u8,
(n >> 24) as u8,
(n >> 32) as u8,
(n >> 40) as u8,
(n >> 48) as u8,
(n >> 56) as u8]) }
_ {
(n >> 56) as u8]),
_ => {
let mut bytes: ~[u8] = ~[], i = size, n = n;
while i > 0u {
@ -492,22 +492,22 @@ fn u64_to_le_bytes<T>(n: u64, size: uint, f: fn(v: &[u8]) -> T) -> T {
fn u64_to_be_bytes<T>(n: u64, size: uint, f: fn(v: &[u8]) -> T) -> T {
assert size <= 8u;
alt size {
1u { f(&[n as u8]) }
2u { f(&[(n >> 8) as u8,
n as u8]) }
4u { f(&[(n >> 24) as u8,
1u => f(&[n as u8]),
2u => f(&[(n >> 8) as u8,
n as u8]),
4u => f(&[(n >> 24) as u8,
(n >> 16) as u8,
(n >> 8) as u8,
n as u8]) }
8u { f(&[(n >> 56) as u8,
n as u8]),
8u => f(&[(n >> 56) as u8,
(n >> 48) as u8,
(n >> 40) as u8,
(n >> 32) as u8,
(n >> 24) as u8,
(n >> 16) as u8,
(n >> 8) as u8,
n as u8]) }
_ {
n as u8]),
_ => {
let mut bytes: ~[u8] = ~[];
let mut i = size;
while i > 0u {
@ -718,9 +718,9 @@ fn seek_in_buf(offset: int, pos: uint, len: uint, whence: seek_style) ->
let mut bpos = pos as int;
let blen = len as int;
alt whence {
seek_set { bpos = offset; }
seek_cur { bpos += offset; }
seek_end { bpos = blen + offset; }
seek_set => bpos = offset,
seek_cur => bpos += offset,
seek_end => bpos = blen + offset
}
if bpos < 0 { bpos = 0; } else if bpos > blen { bpos = blen; }
return bpos as uint;
@ -768,8 +768,8 @@ mod fsync {
new(-arg: arg<t>) { self.arg <- arg; }
drop {
alt self.arg.opt_level {
option::none { }
option::some(level) {
option::none => (),
option::some(level) => {
// fail hard if not succesful
assert(self.arg.fsync_fn(self.arg.val, level) != -1);
}
@ -892,30 +892,30 @@ mod tests {
#[test]
fn file_reader_not_exist() {
alt io::file_reader(~"not a file") {
result::err(e) {
result::err(e) => {
assert e == ~"error opening not a file";
}
result::ok(_) { fail; }
result::ok(_) => fail
}
}
#[test]
fn file_writer_bad_name() {
alt io::file_writer(~"?/?", ~[]) {
result::err(e) {
result::err(e) => {
assert str::starts_with(e, ~"error opening ?/?");
}
result::ok(_) { fail; }
result::ok(_) => fail
}
}
#[test]
fn buffered_file_writer_bad_name() {
alt io::buffered_file_writer(~"?/?") {
result::err(e) {
result::err(e) => {
assert e == ~"error opening ?/?";
}
result::ok(_) { fail; }
result::ok(_) => fail
}
}

View file

@ -2,14 +2,14 @@ type IMPL_T<A> = option<A>;
pure fn EACH<A>(self: IMPL_T<A>, f: fn(A) -> bool) {
alt self {
none { }
some(a) { f(a); }
none => (),
some(a) => { f(a); }
}
}
fn SIZE_HINT<A>(self: IMPL_T<A>) -> option<uint> {
alt self {
none { some(0u) }
some(_) { some(1u) }
none => some(0u),
some(_) => some(1u)
}
}

View file

@ -136,32 +136,32 @@ fn repeat(times: uint, blk: fn() -> bool) {
fn min<A:copy,IA:base_iter<A>>(self: IA) -> A {
alt do foldl::<A,option<A>,IA>(self, none) |a, b| {
alt a {
some(a_) if a_ < b {
some(a_) if a_ < b => {
// FIXME (#2005): Not sure if this is successfully optimized to
// a move
a
}
_ { some(b) }
_ => some(b)
}
} {
some(val) { val }
none { fail ~"min called on empty iterator" }
some(val) => val,
none => fail ~"min called on empty iterator"
}
}
fn max<A:copy,IA:base_iter<A>>(self: IA) -> A {
alt do foldl::<A,option<A>,IA>(self, none) |a, b| {
alt a {
some(a_) if a_ > b {
some(a_) if a_ > b => {
// FIXME (#2005): Not sure if this is successfully optimized to
// a move.
a
}
_ { some(b) }
_ => some(b)
}
} {
some(val) { val }
none { fail ~"max called on empty iterator" }
some(val) => val,
none => fail ~"max called on empty iterator"
}
}

View file

@ -23,7 +23,10 @@ pure fn get<T: copy>(opt: option<T>) -> T {
* Fails if the value equals `none`
*/
alt opt { some(x) { return x; } none { fail ~"option::get none"; } }
alt opt {
some(x) => return x,
none => fail ~"option::get none"
}
}
pure fn expect<T: copy>(opt: option<T>, reason: ~str) -> T {
@ -34,13 +37,13 @@ pure fn expect<T: copy>(opt: option<T>, reason: ~str) -> T {
Fails if the value equals `none`
"];
alt opt { some(x) { x } none { fail reason; } }
alt opt { some(x) => x, none => fail reason }
}
pure fn map<T, U>(opt: option<T>, f: fn(T) -> U) -> option<U> {
//! Maps a `some` value from one type to another
alt opt { some(x) { some(f(x)) } none { none } }
alt opt { some(x) => some(f(x)), none => none }
}
pure fn map_consume<T, U>(-opt: option<T>, f: fn(-T) -> U) -> option<U> {
@ -57,7 +60,7 @@ pure fn chain<T, U>(opt: option<T>, f: fn(T) -> option<U>) -> option<U> {
* function that returns an option.
*/
alt opt { some(x) { f(x) } none { none } }
alt opt { some(x) => f(x), none => none }
}
#[inline(always)]
@ -73,7 +76,7 @@ pure fn while_some<T>(+x: option<T>, blk: fn(+T) -> option<T>) {
pure fn is_none<T>(opt: option<T>) -> bool {
//! Returns true if the option equals `none`
alt opt { none { true } some(_) { false } }
alt opt { none => true, some(_) => false }
}
pure fn is_some<T>(opt: option<T>) -> bool {
@ -85,19 +88,19 @@ pure fn is_some<T>(opt: option<T>) -> bool {
pure fn get_default<T: copy>(opt: option<T>, def: T) -> T {
//! Returns the contained value or a default
alt opt { some(x) { x } none { def } }
alt opt { some(x) => x, none => def }
}
pure fn map_default<T, U>(opt: option<T>, +def: U, f: fn(T) -> U) -> U {
//! Applies a function to the contained value or returns a default
alt opt { none { def } some(t) { f(t) } }
alt opt { none => def, some(t) => f(t) }
}
pure fn iter<T>(opt: option<T>, f: fn(T)) {
//! Performs an operation on the contained value or does nothing
alt opt { none { } some(t) { f(t); } }
alt opt { none => (), some(t) => f(t) }
}
#[inline(always)]
@ -111,8 +114,8 @@ pure fn unwrap<T>(-opt: option<T>) -> T {
unsafe {
let addr = alt opt {
some(x) { ptr::addr_of(x) }
none { fail ~"option::unwrap none" }
some(x) => ptr::addr_of(x),
none => fail ~"option::unwrap none"
};
let liberated_value = unsafe::reinterpret_cast(*addr);
unsafe::forget(opt);

View file

@ -179,18 +179,16 @@ mod global_env {
do priv::weaken_task |weak_po| {
loop {
alt comm::select2(msg_po, weak_po) {
either::left(msg_getenv(n, resp_ch)) {
either::left(msg_getenv(n, resp_ch)) => {
comm::send(resp_ch, impl::getenv(n))
}
either::left(msg_setenv(n, v, resp_ch)) {
either::left(msg_setenv(n, v, resp_ch)) => {
comm::send(resp_ch, impl::setenv(n, v))
}
either::left(msg_env(resp_ch)) {
either::left(msg_env(resp_ch)) => {
comm::send(resp_ch, impl::env())
}
either::right(_) {
break;
}
either::right(_) => break
}
}
}
@ -286,8 +284,8 @@ fn fsync_fd(fd: c_int, level: io::fsync::level) -> c_int {
import libc::funcs::posix01::unistd::*;
alt level {
io::fsync::fsync
| io::fsync::fullfsync { return fsync(fd); }
io::fsync::fdatasync { return fdatasync(fd); }
| io::fsync::fullfsync => return fsync(fd),
io::fsync::fdatasync => return fdatasync(fd)
}
}
@ -297,8 +295,8 @@ fn fsync_fd(fd: c_int, level: io::fsync::level) -> c_int {
import libc::funcs::posix88::fcntl::*;
import libc::funcs::posix01::unistd::*;
alt level {
io::fsync::fsync { return fsync(fd); }
_ {
io::fsync::fsync => return fsync(fd),
_ => {
// According to man fnctl, the ok retval is only specified to be !=-1
if (fcntl(F_FULLFSYNC as c_int, fd) == -1 as c_int)
{ return -1 as c_int; }
@ -443,16 +441,12 @@ fn self_exe_path() -> option<path> {
*/
fn homedir() -> option<path> {
return alt getenv(~"HOME") {
some(p) {
if !str::is_empty(p) {
some(p)
} else {
secondary()
}
}
none {
secondary()
some(p) => if !str::is_empty(p) {
some(p)
} else {
secondary()
}
none => secondary()
};
#[cfg(unix)]

View file

@ -64,11 +64,11 @@ fn split_dirname_basename (pp: path) -> {dirname: ~str, basename: ~str} {
alt str::rfind(pp, |ch|
ch == consts::path_sep || ch == consts::alt_path_sep
) {
some(i) {
{dirname: str::slice(pp, 0u, i),
basename: str::slice(pp, i + 1u, str::len(pp))}
}
none { {dirname: ~".", basename: pp} }
some(i) => {
dirname: str::slice(pp, 0u, i),
basename: str::slice(pp, i + 1u, str::len(pp))
},
none => {dirname: ~".", basename: pp}
}
}

View file

@ -120,9 +120,9 @@ struct packet_header {
assert self.state != blocked || self.blocked_task != none;
self.blocked_task = none;
alt swap_state_acq(self.state, empty) {
empty | blocked { }
terminated { self.state = terminated; }
full { self.state = full; }
empty | blocked => (),
terminated => self.state = terminated,
full => self.state = full
}
}
@ -310,27 +310,25 @@ fn send<T: send, Tbuffer: send>(-p: send_packet_buffered<T, Tbuffer>,
p.payload <- some(payload);
let old_state = swap_state_rel(p.header.state, full);
alt old_state {
empty {
empty => {
// Yay, fastpath.
// The receiver will eventually clean this up.
//unsafe { forget(p); }
}
full { fail ~"duplicate send" }
blocked {
full => fail ~"duplicate send",
blocked => {
debug!{"waking up task for %?", p_};
alt p.header.blocked_task {
some(task) {
rustrt::task_signal_event(
task, ptr::addr_of(p.header) as *libc::c_void);
}
none { debug!{"just kidding!"} }
some(task) => rustrt::task_signal_event(
task, ptr::addr_of(p.header) as *libc::c_void),
none => debug!{"just kidding!"}
}
// The receiver will eventually clean this up.
//unsafe { forget(p); }
}
terminated {
terminated => {
// The receiver will never receive this. Rely on drop_glue
// to clean everything up.
}
@ -367,7 +365,7 @@ fn try_recv<T: send, Tbuffer: send>(-p: recv_packet_buffered<T, Tbuffer>)
let old_state = swap_state_acq(p.header.state,
blocked);
alt old_state {
empty {
empty => {
debug!{"no data available on %?, going to sleep.", p_};
if count == 0 {
wait_event(this);
@ -383,19 +381,17 @@ fn try_recv<T: send, Tbuffer: send>(-p: recv_packet_buffered<T, Tbuffer>)
}
debug!{"woke up, p.state = %?", copy p.header.state};
}
blocked {
if first {
fail ~"blocking on already blocked packet"
}
blocked => if first {
fail ~"blocking on already blocked packet"
}
full {
full => {
let mut payload = none;
payload <-> p.payload;
p.header.blocked_task = none;
p.header.state = empty;
return some(option::unwrap(payload))
}
terminated {
terminated => {
// This assert detects when we've accidentally unsafely
// casted too big of a number to a state.
assert old_state == terminated;
@ -409,9 +405,9 @@ fn try_recv<T: send, Tbuffer: send>(-p: recv_packet_buffered<T, Tbuffer>)
/// Returns true if messages are available.
pure fn peek<T: send, Tb: send>(p: recv_packet_buffered<T, Tb>) -> bool {
alt unsafe {(*p.header()).state} {
empty { false }
blocked { fail ~"peeking on blocked packet" }
full | terminated { true }
empty => false,
blocked => fail ~"peeking on blocked packet",
full | terminated => true
}
}
@ -425,11 +421,11 @@ impl peek<T: send, Tb: send> for recv_packet_buffered<T, Tb> {
fn sender_terminate<T: send>(p: *packet<T>) {
let p = unsafe { &*p };
alt swap_state_rel(p.header.state, terminated) {
empty {
empty => {
// The receiver will eventually clean up.
//unsafe { forget(p) }
}
blocked {
blocked => {
// wake up the target
alt p.header.blocked_task {
some(target) =>
@ -441,11 +437,11 @@ fn sender_terminate<T: send>(p: *packet<T>) {
// The receiver will eventually clean up.
//unsafe { forget(p) }
}
full {
full => {
// This is impossible
fail ~"you dun goofed"
}
terminated {
terminated => {
// I have to clean up, use drop_glue
}
}
@ -456,15 +452,15 @@ fn receiver_terminate<T: send>(p: *packet<T>) {
let p = unsafe { &*p };
assert p.header.blocked_task == none;
alt swap_state_rel(p.header.state, terminated) {
empty {
empty => {
// the sender will clean up
//unsafe { forget(p) }
}
blocked {
blocked => {
// this shouldn't happen.
fail ~"terminating a blocked packet"
}
terminated | full {
terminated | full => {
// I have to clean up, use drop_glue
}
}
@ -490,14 +486,14 @@ fn wait_many(pkts: &[*packet_header]) -> uint {
let p = unsafe { &*p };
let old = p.mark_blocked(this);
alt old {
full | terminated {
full | terminated => {
data_avail = true;
ready_packet = i;
(*p).state = old;
break;
}
blocked { fail ~"blocking on blocked packet" }
empty { }
blocked => fail ~"blocking on blocked packet",
empty => ()
}
}
@ -507,13 +503,11 @@ fn wait_many(pkts: &[*packet_header]) -> uint {
let pos = vec::position(pkts, |p| p == event);
alt pos {
some(i) {
some(i) => {
ready_packet = i;
data_avail = true;
}
none {
debug!{"ignoring spurious event, %?", event};
}
none => debug!{"ignoring spurious event, %?", event}
}
}
@ -569,9 +563,9 @@ fn select2<A: send, Ab: send, B: send, Bb: send>(
unsafe {
alt i {
0 { left((try_recv(a), b)) }
1 { right((a, try_recv(b))) }
_ { fail ~"select2 return an invalid packet" }
0 => left((try_recv(a), b)),
1 => right((a, try_recv(b))),
_ => fail ~"select2 return an invalid packet"
}
}
}
@ -586,9 +580,9 @@ fn selecti<T: selectable>(endpoints: &[T]) -> uint {
fn select2i<A: selectable, B: selectable>(a: A, b: B) -> either<(), ()> {
alt wait_many([a.header(), b.header()]/_) {
0 { left(()) }
1 { right(()) }
_ { fail ~"wait returned unexpected index" }
0 => left(()),
1 => right(()),
_ => fail ~"wait returned unexpected index"
}
}
@ -655,15 +649,13 @@ struct send_packet_buffered<T: send, Tbuffer: send> {
pure fn header() -> *packet_header {
alt self.p {
some(packet) {
unsafe {
let packet = &*packet;
let header = ptr::addr_of(packet.header);
//forget(packet);
header
}
some(packet) => unsafe {
let packet = &*packet;
let header = ptr::addr_of(packet.header);
//forget(packet);
header
}
none { fail ~"packet already consumed" }
none => fail ~"packet already consumed"
}
}
@ -718,15 +710,13 @@ struct recv_packet_buffered<T: send, Tbuffer: send> : selectable {
pure fn header() -> *packet_header {
alt self.p {
some(packet) {
unsafe {
let packet = &*packet;
let header = ptr::addr_of(packet.header);
//forget(packet);
header
}
some(packet) => unsafe {
let packet = &*packet;
let header = ptr::addr_of(packet.header);
//forget(packet);
header
}
none { fail ~"packet already consumed" }
none => fail ~"packet already consumed"
}
}
@ -847,11 +837,11 @@ impl port<T: send> of recv<T> for port<T> {
let mut endp = none;
endp <-> self.endp;
alt move pipes::try_recv(unwrap(endp)) {
some(streamp::data(x, endp)) {
some(streamp::data(x, endp)) => {
self.endp = some(move_it!{endp});
some(move_it!{x})
}
none { none }
none => none
}
}
@ -859,10 +849,8 @@ impl port<T: send> of recv<T> for port<T> {
let mut endp = none;
endp <-> self.endp;
let peek = alt endp {
some(endp) {
pipes::peek(endp)
}
none { fail ~"peeking empty stream" }
some(endp) => pipes::peek(endp),
none => fail ~"peeking empty stream"
};
self.endp <-> endp;
peek
@ -894,10 +882,10 @@ struct port_set<T: send> : recv<T> {
while result == none && ports.len() > 0 {
let i = wait_many(ports.map(|p| p.header()));
alt move ports[i].try_recv() {
some(copy m) {
some(copy m) => {
result = some(move m);
}
none {
none => {
// Remove this port.
let mut ports_ = ~[];
ports <-> ports_;
@ -914,8 +902,8 @@ struct port_set<T: send> : recv<T> {
fn recv() -> T {
match move self.try_recv() {
some(copy x) { move x }
none { fail ~"port_set: endpoints closed" }
some(copy x) => move x,
none => fail ~"port_set: endpoints closed"
}
}
@ -932,10 +920,8 @@ struct port_set<T: send> : recv<T> {
impl<T: send> of selectable for port<T> {
pure fn header() -> *packet_header unchecked {
alt self.endp {
some(endp) {
endp.header()
}
none { fail ~"peeking empty stream" }
some(endp) => endp.header(),
none => fail ~"peeking empty stream"
}
}
}
@ -968,22 +954,18 @@ impl<T: send, U: send, Left: selectable recv<T>, Right: selectable recv<U>>
fn select() -> either<T, U> {
alt self {
(lp, rp) {
alt select2i(lp, rp) {
left(()) { left (lp.recv()) }
right(()) { right(rp.recv()) }
}
(lp, rp) => alt select2i(lp, rp) {
left(()) => left (lp.recv()),
right(()) => right(rp.recv())
}
}
}
fn try_select() -> either<option<T>, option<U>> {
alt self {
(lp, rp) {
alt select2i(lp, rp) {
left(()) { left (lp.try_recv()) }
right(()) { right(rp.try_recv()) }
}
(lp, rp) => alt select2i(lp, rp) {
left(()) => left (lp.try_recv()),
right(()) => right(rp.try_recv())
}
}
}
@ -999,8 +981,8 @@ mod test {
c1.send(~"abc");
alt (p1, p2).select() {
right(_) { fail }
_ { }
right(_) => fail,
_ => ()
}
c2.send(123);

View file

@ -50,8 +50,8 @@ unsafe fn chan_from_global_ptr<T: send>(
// Wait to hear if we are the official instance of
// this global task
alt comm::recv::<msg>(setup_po) {
proceed { f(po); }
abort { }
proceed => f(po),
abort => ()
}
};

View file

@ -19,9 +19,9 @@ enum result<T, U> {
*/
pure fn get<T: copy, U>(res: result<T, U>) -> T {
alt res {
ok(t) { t }
err(the_err) {
unchecked{ fail fmt!{"get called on error result: %?", the_err}; }
ok(t) => t,
err(the_err) => unchecked {
fail fmt!{"get called on error result: %?", the_err}
}
}
}
@ -35,18 +35,16 @@ pure fn get<T: copy, U>(res: result<T, U>) -> T {
*/
pure fn get_err<T, U: copy>(res: result<T, U>) -> U {
alt res {
err(u) { u }
ok(_) {
fail ~"get_error called on ok result";
}
err(u) => u,
ok(_) => fail ~"get_error called on ok result"
}
}
/// Returns true if the result is `ok`
pure fn is_ok<T, U>(res: result<T, U>) -> bool {
alt res {
ok(_) { true }
err(_) { false }
ok(_) => true,
err(_) => false
}
}
@ -63,8 +61,8 @@ pure fn is_err<T, U>(res: result<T, U>) -> bool {
*/
pure fn to_either<T: copy, U: copy>(res: result<U, T>) -> either<T, U> {
alt res {
ok(res) { either::right(res) }
err(fail_) { either::left(fail_) }
ok(res) => either::right(res),
err(fail_) => either::left(fail_)
}
}
@ -85,8 +83,8 @@ pure fn to_either<T: copy, U: copy>(res: result<U, T>) -> either<T, U> {
fn chain<T, U: copy, V: copy>(res: result<T, V>, op: fn(T) -> result<U, V>)
-> result<U, V> {
alt res {
ok(t) { op(t) }
err(e) { err(e) }
ok(t) => op(t),
err(e) => err(e)
}
}
@ -103,8 +101,8 @@ fn chain_err<T: copy, U: copy, V: copy>(
op: fn(V) -> result<T, U>)
-> result<T, U> {
alt res {
ok(t) { ok(t) }
err(v) { op(v) }
ok(t) => ok(t),
err(v) => op(v)
}
}
@ -124,8 +122,8 @@ fn chain_err<T: copy, U: copy, V: copy>(
*/
fn iter<T, E>(res: result<T, E>, f: fn(T)) {
alt res {
ok(t) { f(t) }
err(_) { }
ok(t) => f(t),
err(_) => ()
}
}
@ -139,8 +137,8 @@ fn iter<T, E>(res: result<T, E>, f: fn(T)) {
*/
fn iter_err<T, E>(res: result<T, E>, f: fn(E)) {
alt res {
ok(_) { }
err(e) { f(e) }
ok(_) => (),
err(e) => f(e)
}
}
@ -161,8 +159,8 @@ fn iter_err<T, E>(res: result<T, E>, f: fn(E)) {
fn map<T, E: copy, U: copy>(res: result<T, E>, op: fn(T) -> U)
-> result<U, E> {
alt res {
ok(t) { ok(op(t)) }
err(e) { err(e) }
ok(t) => ok(op(t)),
err(e) => err(e)
}
}
@ -177,8 +175,8 @@ fn map<T, E: copy, U: copy>(res: result<T, E>, op: fn(T) -> U)
fn map_err<T: copy, E, F: copy>(res: result<T, E>, op: fn(E) -> F)
-> result<T, F> {
alt res {
ok(t) { ok(t) }
err(e) { err(op(e)) }
ok(t) => ok(t),
err(e) => err(op(e))
}
}
@ -189,15 +187,15 @@ impl extensions<T, E> for result<T, E> {
fn iter(f: fn(T)) {
alt self {
ok(t) { f(t) }
err(_) { }
ok(t) => f(t),
err(_) => ()
}
}
fn iter_err(f: fn(E)) {
alt self {
ok(_) { }
err(e) { f(e) }
ok(_) => (),
err(e) => f(e)
}
}
}
@ -207,8 +205,8 @@ impl extensions<T:copy, E> for result<T, E> {
fn map_err<F:copy>(op: fn(E) -> F) -> result<T,F> {
alt self {
ok(t) { ok(t) }
err(e) { err(op(e)) }
ok(t) => ok(t),
err(e) => err(op(e))
}
}
}
@ -218,8 +216,8 @@ impl extensions<T, E:copy> for result<T, E> {
fn map<U:copy>(op: fn(T) -> U) -> result<U,E> {
alt self {
ok(t) { ok(op(t)) }
err(e) { err(e) }
ok(t) => ok(op(t)),
err(e) => err(e)
}
}
}
@ -258,8 +256,8 @@ fn map_vec<T,U:copy,V:copy>(
vec::reserve(vs, vec::len(ts));
for vec::each(ts) |t| {
alt op(t) {
ok(v) { vec::push(vs, v); }
err(u) { return err(u); }
ok(v) => vec::push(vs, v),
err(u) => return err(u)
}
}
return ok(vs);
@ -269,12 +267,10 @@ fn map_opt<T,U:copy,V:copy>(
o_t: option<T>, op: fn(T) -> result<V,U>) -> result<option<V>,U> {
alt o_t {
none { ok(none) }
some(t) {
alt op(t) {
ok(v) { ok(some(v)) }
err(e) { err(e) }
}
none => ok(none),
some(t) => alt op(t) {
ok(v) => ok(some(v)),
err(e) => err(e)
}
}
}
@ -298,8 +294,8 @@ fn map_vec2<S,T,U:copy,V:copy>(ss: ~[S], ts: ~[T],
let mut i = 0u;
while i < n {
alt op(ss[i],ts[i]) {
ok(v) { vec::push(vs, v); }
err(u) { return err(u); }
ok(v) => vec::push(vs, v),
err(u) => return err(u)
}
i += 1u;
}
@ -319,8 +315,8 @@ fn iter_vec2<S,T,U:copy>(ss: ~[S], ts: ~[T],
let mut i = 0u;
while i < n {
alt op(ss[i],ts[i]) {
ok(()) { }
err(u) { return err(u); }
ok(()) => (),
err(u) => return err(u)
}
i += 1u;
}
@ -331,8 +327,8 @@ fn iter_vec2<S,T,U:copy>(ss: ~[S], ts: ~[T],
fn unwrap<T, U>(-res: result<T, U>) -> T {
unsafe {
let addr = alt res {
ok(x) { ptr::addr_of(x) }
err(_) { fail ~"error result" }
ok(x) => ptr::addr_of(x),
err(_) => fail ~"error result"
};
let liberated_value = unsafe::reinterpret_cast(*addr);
unsafe::forget(res);

View file

@ -97,7 +97,7 @@ fn with_envp<T>(env: option<~[(~str,~str)]>,
// On posixy systems we can pass a char** for envp, which is
// a null-terminated array of "k=v\n" strings.
alt env {
some(es) if !vec::is_empty(es) {
some(es) if !vec::is_empty(es) => {
let mut tmps = ~[];
let mut ptrs = ~[];
@ -112,9 +112,7 @@ fn with_envp<T>(env: option<~[(~str,~str)]>,
unsafe { cb(::unsafe::reinterpret_cast(p)) }
)
}
_ {
cb(ptr::null())
}
_ => cb(ptr::null())
}
}
@ -126,7 +124,7 @@ fn with_envp<T>(env: option<~[(~str,~str)]>,
// \0 to terminate.
unsafe {
alt env {
some(es) if !vec::is_empty(es) {
some(es) if !vec::is_empty(es) => {
let mut blk : ~[u8] = ~[];
for vec::each(es) |e| {
let (k,v) = e;
@ -138,9 +136,7 @@ fn with_envp<T>(env: option<~[(~str,~str)]>,
blk += ~[0_u8];
vec::as_buf(blk, |p, _len| cb(::unsafe::reinterpret_cast(p)))
}
_ {
cb(ptr::null())
}
_ => cb(ptr::null())
}
}
}
@ -148,8 +144,8 @@ fn with_envp<T>(env: option<~[(~str,~str)]>,
fn with_dirp<T>(d: option<~str>,
cb: fn(*libc::c_char) -> T) -> T {
alt d {
some(dir) { str::as_c_str(dir, cb) }
none { cb(ptr::null()) }
some(dir) => str::as_c_str(dir, cb),
none => cb(ptr::null())
}
}
@ -314,10 +310,10 @@ fn program_output(prog: ~str, args: ~[~str]) ->
while count > 0 {
let stream = comm::recv(p);
alt check stream {
(1, s) {
(1, s) => {
outs = s;
}
(2, s) {
(2, s) => {
errs = s;
}
};

View file

@ -116,14 +116,10 @@ mod linear {
let _ = for self.bucket_sequence(hash) |i| {
alt buckets[i] {
some(bkt) {
if bkt.hash == hash && self.eqfn(k, &bkt.key) {
return found_entry(i);
}
}
none => {
return found_hole(i);
some(bkt) => if bkt.hash == hash && self.eqfn(k, &bkt.key) {
return found_entry(i);
}
none => return found_hole(i)
}
};
return table_full;
@ -162,7 +158,7 @@ mod linear {
alt self.bucket_for_key_with_hash(self.buckets, hash,
unsafe{borrow(k)}) {
table_full => {fail ~"Internal logic error";}
found_hole(idx) {
found_hole(idx) => {
debug!{"insert fresh (%?->%?) at idx %?, hash %?",
k, v, idx, hash};
self.buckets[idx] = some({hash: hash, key: k, value: v});

View file

@ -342,18 +342,16 @@ fn unshift_char(&s: ~str, ch: char) { s = from_char(ch) + s; }
/// Returns a string with leading whitespace removed
pure fn trim_left(s: &str) -> ~str {
alt find(s, |c| !char::is_whitespace(c)) {
none { ~"" }
some(first) {
unsafe { unsafe::slice_bytes(s, first, len(s)) }
}
none => ~"",
some(first) => unsafe { unsafe::slice_bytes(s, first, len(s)) }
}
}
/// Returns a string with trailing whitespace removed
pure fn trim_right(s: &str) -> ~str {
alt rfind(s, |c| !char::is_whitespace(c)) {
none { ~"" }
some(last) {
none => ~"",
some(last) => {
let {next, _} = char_range_at(s, last);
unsafe { unsafe::slice_bytes(s, 0u, next) }
}
@ -2779,9 +2777,9 @@ mod tests {
let mut i = 0;
do chars_iter(~"x\u03c0y") |ch| {
alt check i {
0 { assert ch == 'x'; }
1 { assert ch == '\u03c0'; }
2 { assert ch == 'y'; }
0 => assert ch == 'x',
1 => assert ch == '\u03c0',
2 => assert ch == 'y'
}
i += 1;
}
@ -2795,9 +2793,9 @@ mod tests {
do bytes_iter(~"xyz") |bb| {
alt check i {
0 { assert bb == 'x' as u8; }
1 { assert bb == 'y' as u8; }
2 { assert bb == 'z' as u8; }
0 => assert bb == 'x' as u8,
1 => assert bb == 'y' as u8,
2 => assert bb == 'z' as u8
}
i += 1;
}
@ -2813,11 +2811,11 @@ mod tests {
do split_char_iter(data, ' ') |xx| {
alt ii {
0 { assert ~"\nMary" == xx; }
1 { assert ~"had" == xx; }
2 { assert ~"a" == xx; }
3 { assert ~"little" == xx; }
_ { () }
0 => assert ~"\nMary" == xx,
1 => assert ~"had" == xx,
2 => assert ~"a" == xx,
3 => assert ~"little" == xx,
_ => ()
}
ii += 1;
}
@ -2831,10 +2829,10 @@ mod tests {
do splitn_char_iter(data, ' ', 2u) |xx| {
alt ii {
0 { assert ~"\nMary" == xx; }
1 { assert ~"had" == xx; }
2 { assert ~"a little lamb\nLittle lamb\n" == xx; }
_ { () }
0 => assert ~"\nMary" == xx,
1 => assert ~"had" == xx,
2 => assert ~"a little lamb\nLittle lamb\n" == xx,
_ => ()
}
ii += 1;
}
@ -2848,11 +2846,11 @@ mod tests {
do words_iter(data) |ww| {
alt ii {
0 { assert ~"Mary" == ww; }
1 { assert ~"had" == ww; }
2 { assert ~"a" == ww; }
3 { assert ~"little" == ww; }
_ { () }
0 => assert ~"Mary" == ww,
1 => assert ~"had" == ww,
2 => assert ~"a" == ww,
3 => assert ~"little" == ww,
_ => ()
}
ii += 1;
}
@ -2868,11 +2866,11 @@ mod tests {
do lines_iter(lf) |x| {
alt ii {
0 { assert ~"" == x; }
1 { assert ~"Mary had a little lamb" == x; }
2 { assert ~"Little lamb" == x; }
3 { assert ~"" == x; }
_ { () }
0 => assert ~"" == x,
1 => assert ~"Mary had a little lamb" == x,
2 => assert ~"Little lamb" == x,
3 => assert ~"" == x,
_ => ()
}
ii += 1;
}

View file

@ -280,7 +280,7 @@ impl task_builder for task_builder {
blk(do future::from_fn {
alt comm::recv(po) {
exit(_, result) { result }
exit(_, result) => result
}
});
@ -503,8 +503,8 @@ fn try<T:send>(+f: fn~() -> T) -> result<T,()> {
comm::send(ch, f());
}
alt future::get(option::unwrap(result)) {
success { result::ok(comm::recv(po)) }
failure { result::err(()) }
success => result::ok(comm::recv(po)),
failure => result::err(())
}
}
@ -992,7 +992,7 @@ fn gen_child_taskgroup(linked: bool, supervised: bool)
* Step 1. Get spawner's taskgroup info.
*######################################################################*/
let spawner_group = alt unsafe { local_get(spawner, taskgroup_key()) } {
none {
none => {
// Main task, doing first spawn ever. Lazily initialise here.
let mut members = new_taskset();
taskset_insert(&mut members, spawner);
@ -1005,7 +1005,7 @@ fn gen_child_taskgroup(linked: bool, supervised: bool)
unsafe { local_set(spawner, taskgroup_key(), group); }
group
}
some(group) { group }
some(group) => group
};
/*######################################################################*
* Step 2. Process spawn options for child.
@ -1029,8 +1029,8 @@ fn gen_child_taskgroup(linked: bool, supervised: bool)
// it should be enabled only in debug builds.
let new_generation =
alt *old_ancestors {
some(arc) { access_ancestors(arc, |a| a.generation+1) }
none { 0 } // the actual value doesn't really matter.
some(arc) => access_ancestors(arc, |a| a.generation+1),
none => 0 // the actual value doesn't really matter.
};
assert new_generation < uint::max_value;
// Build a new node in the ancestor list.
@ -1074,8 +1074,8 @@ fn spawn_raw(opts: task_opts, +f: fn~()) {
let (child_tg, ancestors, f) = option::swap_unwrap(child_data);
// Create child task.
let new_task = alt opts.sched {
none { rustrt::new_task() }
some(sched_opts) { new_task_in_new_sched(sched_opts) }
none => rustrt::new_task(),
some(sched_opts) => new_task_in_new_sched(sched_opts)
};
assert !new_task.is_null();
// Getting killed after here would leak the task.
@ -1163,20 +1163,20 @@ fn spawn_raw(opts: task_opts, +f: fn~()) {
}
let num_threads = alt opts.mode {
single_threaded { 1u }
thread_per_core {
single_threaded => 1u,
thread_per_core => {
fail ~"thread_per_core scheduling mode unimplemented"
}
thread_per_task {
thread_per_task => {
fail ~"thread_per_task scheduling mode unimplemented"
}
manual_threads(threads) {
manual_threads(threads) => {
if threads == 0u {
fail ~"can not create a scheduler with no threads";
}
threads
}
osmain { 0u /* Won't be used */ }
osmain => 0u /* Won't be used */
};
let sched_id = if opts.mode != osmain {
@ -1273,7 +1273,10 @@ unsafe fn local_data_lookup<T: owned>(
let key_value = key_to_key_value(key);
let map_pos = (*map).position(|entry|
alt entry { some((k,_,_)) { k == key_value } none { false } }
alt entry {
some((k,_,_)) => k == key_value,
none => false
}
);
do map_pos.map |index| {
// .get() is guaranteed because of "none { false }" above.
@ -1334,20 +1337,16 @@ unsafe fn local_set<T: owned>(
let new_entry = some((keyval, data_ptr, data_box));
// Find a place to put it.
alt local_data_lookup(map, key) {
some((index, _old_data_ptr)) {
some((index, _old_data_ptr)) => {
// Key already had a value set, _old_data_ptr, whose reference
// will get dropped when the local_data box is overwritten.
(*map).set_elt(index, new_entry);
}
none {
none => {
// Find an empty slot. If not, grow the vector.
alt (*map).position(|x| x == none) {
some(empty_index) {
(*map).set_elt(empty_index, new_entry);
}
none {
(*map).push(new_entry);
}
some(empty_index) => (*map).set_elt(empty_index, new_entry),
none => (*map).push(new_entry)
}
}
}
@ -1698,8 +1697,8 @@ fn test_try_success() {
alt do try {
~"Success!"
} {
result::ok(~"Success!") { }
_ { fail; }
result::ok(~"Success!") => (),
_ => fail
}
}
@ -1709,8 +1708,8 @@ fn test_try_fail() {
alt do try {
fail
} {
result::err(()) { }
result::ok(()) { fail; }
result::err(()) => (),
result::ok(()) => fail
}
}
@ -2054,15 +2053,15 @@ fn test_tls_modify() unsafe {
fn my_key(+_x: @~str) { }
local_data_modify(my_key, |data| {
alt data {
some(@val) { fail ~"unwelcome value: " + val }
none { some(@~"first data") }
some(@val) => fail ~"unwelcome value: " + val,
none => some(@~"first data")
}
});
local_data_modify(my_key, |data| {
alt data {
some(@~"first data") { some(@~"next data") }
some(@val) { fail ~"wrong value: " + val }
none { fail ~"missing value" }
some(@~"first data") => some(@~"next data"),
some(@val) => fail ~"wrong value: " + val,
none => fail ~"missing value"
}
});
assert *(local_data_pop(my_key).get()) == ~"next data";

View file

@ -128,8 +128,8 @@ fn parse_buf(buf: ~[u8], radix: uint) -> option<T> {
let mut n = 0u as T;
loop {
alt char::to_digit(buf[i] as char, radix) {
some(d) { n += d as T * power; }
none { return none; }
some(d) => n += d as T * power,
none => return none
}
power *= radix as T;
if i == 0u { return some(n); }
@ -147,8 +147,8 @@ fn from_str_radix(buf: ~str, radix: u64) -> option<u64> {
let mut power = 1u64, n = 0u64;
loop {
alt char::to_digit(buf[i] as char, radix as uint) {
some(d) { n += d as u64 * power; }
none { return none; }
some(d) => n += d as u64 * power,
none => return none
}
power *= radix;
if i == 0u { return some(n); }

View file

@ -2,9 +2,8 @@ mod general_category {
pure fn Cc(c: char) -> bool {
return alt c {
'\x00' to '\x1f'
| '\x7f' to '\x9f'
{ true }
_ { false }
| '\x7f' to '\x9f' => true,
_ => false
};
}
@ -22,25 +21,22 @@ mod general_category {
| '\ufff9' to '\ufffb'
| '\U000110bd'
| '\U0001d173' to '\U0001d17a'
| '\U000e0001' to '\U000e007f'
{ true }
_ { false }
| '\U000e0001' to '\U000e007f' => true,
_ => false
};
}
pure fn Co(c: char) -> bool {
return alt c {
'\ue000' to '\uf8ff'
{ true }
_ { false }
'\ue000' to '\uf8ff' => true,
_ => false
};
}
pure fn Cs(c: char) -> bool {
return alt c {
'\ud800' to '\udfff'
{ true }
_ { false }
'\ud800' to '\udfff' => true,
_ => false
};
}
@ -644,8 +640,8 @@ mod general_category {
| '\U0001d7aa' to '\U0001d7c2'
| '\U0001d7c4' to '\U0001d7c9'
| '\U0001d7cb'
{ true }
_ { false }
=> true,
_ => false
};
}
@ -700,8 +696,8 @@ mod general_category {
| '\uaadd'
| '\uff70'
| '\uff9e' to '\uff9f'
{ true }
_ { false }
=> true,
_ => false
};
}
@ -886,8 +882,8 @@ mod general_category {
| '\U00012000' to '\U0001236e'
| '\U00013000' to '\U0001b001'
| '\U00020000' to '\U0002fa1d'
{ true }
_ { false }
=> true,
_ => false
};
}
@ -903,8 +899,8 @@ mod general_category {
| '\u1fbc'
| '\u1fcc'
| '\u1ffc'
{ true }
_ { false }
=> true,
_ => false
};
}
@ -1495,8 +1491,8 @@ mod general_category {
| '\U0001d756' to '\U0001d76e'
| '\U0001d790' to '\U0001d7a8'
| '\U0001d7ca'
{ true }
_ { false }
=> true,
_ => false
};
}
@ -1606,8 +1602,8 @@ mod general_category {
| '\U000110b7' to '\U000110b8'
| '\U0001d165' to '\U0001d166'
| '\U0001d16d' to '\U0001d172'
{ true }
_ { false }
=> true,
_ => false
};
}
@ -1617,8 +1613,8 @@ mod general_category {
| '\u20dd' to '\u20e0'
| '\u20e2' to '\u20e4'
| '\ua670' to '\ua672'
{ true }
_ { false }
=> true,
_ => false
};
}
@ -1810,8 +1806,8 @@ mod general_category {
| '\U0001d1aa' to '\U0001d1ad'
| '\U0001d242' to '\U0001d244'
| '\U000e0100' to '\U000e01ef'
{ true }
_ { false }
=> true,
_ => false
};
}
@ -1854,8 +1850,8 @@ mod general_category {
| '\U000104a0' to '\U000104a9'
| '\U00011066' to '\U0001106f'
| '\U0001d7ce' to '\U0001d7ff'
{ true }
_ { false }
=> true,
_ => false
};
}
@ -1873,8 +1869,8 @@ mod general_category {
| '\U0001034a'
| '\U000103d1' to '\U000103d5'
| '\U00012400' to '\U00012462'
{ true }
_ { false }
=> true,
_ => false
};
}
@ -1921,8 +1917,8 @@ mod general_category {
| '\U00011052' to '\U00011065'
| '\U0001d360' to '\U0001d371'
| '\U0001f100' to '\U0001f10a'
{ true }
_ { false }
=> true,
_ => false
};
}
@ -1934,8 +1930,8 @@ mod general_category {
| '\ufe33' to '\ufe34'
| '\ufe4d' to '\ufe4f'
| '\uff3f'
{ true }
_ { false }
=> true,
_ => false
};
}
@ -1956,8 +1952,8 @@ mod general_category {
| '\ufe58'
| '\ufe63'
| '\uff0d'
{ true }
_ { false }
=> true,
_ => false
};
}
@ -2033,8 +2029,8 @@ mod general_category {
| '\uff5d'
| '\uff60'
| '\uff63'
{ true }
_ { false }
=> true,
_ => false
};
}
@ -2050,8 +2046,8 @@ mod general_category {
| '\u2e0d'
| '\u2e1d'
| '\u2e21'
{ true }
_ { false }
=> true,
_ => false
};
}
@ -2068,8 +2064,8 @@ mod general_category {
| '\u2e0c'
| '\u2e1c'
| '\u2e20'
{ true }
_ { false }
=> true,
_ => false
};
}
@ -2201,8 +2197,8 @@ mod general_category {
| '\U000110bb' to '\U000110bc'
| '\U000110be' to '\U000110c1'
| '\U00012470' to '\U00012473'
{ true }
_ { false }
=> true,
_ => false
};
}
@ -2280,8 +2276,8 @@ mod general_category {
| '\uff5b'
| '\uff5f'
| '\uff62'
{ true }
_ { false }
=> true,
_ => false
};
}
@ -2303,8 +2299,8 @@ mod general_category {
| '\uff04'
| '\uffe0' to '\uffe1'
| '\uffe5' to '\uffe6'
{ true }
_ { false }
=> true,
_ => false
};
}
@ -2337,8 +2333,8 @@ mod general_category {
| '\uff3e'
| '\uff40'
| '\uffe3'
{ true }
_ { false }
=> true,
_ => false
};
}
@ -2408,8 +2404,8 @@ mod general_category {
| '\U0001d789'
| '\U0001d7a9'
| '\U0001d7c3'
{ true }
_ { false }
=> true,
_ => false
};
}
@ -2527,24 +2523,22 @@ mod general_category {
| '\U0001d245' to '\U0001d356'
| '\U0001f000' to '\U0001f0df'
| '\U0001f110' to '\U0001f773'
{ true }
_ { false }
=> true,
_ => false
};
}
pure fn Zl(c: char) -> bool {
return alt c {
'\u2028'
{ true }
_ { false }
'\u2028' => true,
_ => false
};
}
pure fn Zp(c: char) -> bool {
return alt c {
'\u2029'
{ true }
_ { false }
'\u2029' => true,
_ => false
};
}
@ -2558,8 +2552,8 @@ mod general_category {
| '\u202f'
| '\u205f'
| '\u3000'
{ true }
_ { false }
=> true,
_ => false
};
}
@ -3299,8 +3293,8 @@ mod derived_property {
| '\U0002a700' to '\U0002b734'
| '\U0002b740' to '\U0002b81d'
| '\U0002f800' to '\U0002fa1d'
{ true }
_ { false }
=> true,
_ => false
};
}
@ -4170,8 +4164,8 @@ mod derived_property {
| '\U0002b740' to '\U0002b81d'
| '\U0002f800' to '\U0002fa1d'
| '\U000e0100' to '\U000e01ef'
{ true }
_ { false }
=> true,
_ => false
};
}
@ -4676,8 +4670,8 @@ mod derived_property {
| '\U0002a700' to '\U0002b734'
| '\U0002b740' to '\U0002b81d'
| '\U0002f800' to '\U0002fa1d'
{ true }
_ { false }
=> true,
_ => false
};
}

View file

@ -357,8 +357,8 @@ fn split<T: copy>(v: &[T], f: fn(T) -> bool) -> ~[~[T]] {
let mut result = ~[];
while start < ln {
alt position_between(v, start, ln, f) {
none { break }
some(i) {
none => break,
some(i) => {
push(result, slice(v, start, i));
start = i + 1u;
}
@ -381,8 +381,8 @@ fn splitn<T: copy>(v: &[T], n: uint, f: fn(T) -> bool) -> ~[~[T]] {
let mut result = ~[];
while start < ln && count > 0u {
alt position_between(v, start, ln, f) {
none { break }
some(i) {
none => break,
some(i) => {
push(result, slice(v, start, i));
// Make sure to skip the separator.
start = i + 1u;
@ -406,8 +406,8 @@ fn rsplit<T: copy>(v: &[T], f: fn(T) -> bool) -> ~[~[T]] {
let mut result = ~[];
while end > 0u {
alt rposition_between(v, 0u, end, f) {
none { break }
some(i) {
none => break,
some(i) => {
push(result, slice(v, i + 1u, end));
end = i;
}
@ -430,8 +430,8 @@ fn rsplitn<T: copy>(v: &[T], n: uint, f: fn(T) -> bool) -> ~[~[T]] {
let mut result = ~[];
while end > 0u && count > 0u {
alt rposition_between(v, 0u, end, f) {
none { break }
some(i) {
none => break,
some(i) => {
push(result, slice(v, i + 1u, end));
// Make sure to skip the separator.
end = i;
@ -714,8 +714,8 @@ pure fn filter_map<T, U: copy>(v: &[T], f: fn(T) -> option<U>)
let mut result = ~[];
for each(v) |elem| {
alt f(elem) {
none {/* no-op */ }
some(result_elem) { unsafe { push(result, result_elem); } }
none => {/* no-op */ }
some(result_elem) => unsafe { push(result, result_elem); }
}
}
return result;

View file

@ -31,15 +31,15 @@ impl of to_base64 for ~[u8] {
}
alt check len % 3u {
0u { }
1u {
0u => (),
1u => {
let n = (self[i] as uint) << 16u;
str::push_char(s, chars[(n >> 18u) & 63u]);
str::push_char(s, chars[(n >> 12u) & 63u]);
str::push_char(s, '=');
str::push_char(s, '=');
}
2u {
2u => {
let n = (self[i] as uint) << 16u | (self[i + 1u] as uint) << 8u;
str::push_char(s, chars[(n >> 18u) & 63u]);
str::push_char(s, chars[(n >> 12u) & 63u]);
@ -97,18 +97,16 @@ impl of from_base64 for ~[u8] {
n |= 0x3Fu;
} else if ch == '=' {
alt len - i {
1u {
1u => {
vec::push(r, ((n >> 16u) & 0xFFu) as u8);
vec::push(r, ((n >> 8u ) & 0xFFu) as u8);
return copy r;
}
2u {
2u => {
vec::push(r, ((n >> 10u) & 0xFFu) as u8);
return copy r;
}
_ {
fail ~"invalid base64 padding";
}
_ => fail ~"invalid base64 padding"
}
} else {
fail ~"invalid base64 character";

View file

@ -180,35 +180,25 @@ class bitv {
self.die();
}
alt self.rep {
small(s) {
alt other.rep {
small(s1) {
alt op {
union { s.union(s1) }
intersect { s.intersect(s1) }
assign { s.become(s1) }
difference { s.difference(s1) }
}
}
big(s1) {
self.die();
}
small(s) => alt other.rep {
small(s1) => alt op {
union => s.union(s1),
intersect => s.intersect(s1),
assign => s.become(s1),
difference => s.difference(s1)
}
big(s1) => self.die()
}
big(s) => alt other.rep {
small(_) => self.die(),
big(s1) => alt op {
union => s.union(s1),
intersect => s.intersect(s1),
assign => s.become(s1),
difference => s.difference(s1)
}
}
}
big(s) {
alt other.rep {
small(_) { self.die(); }
big(s1) {
alt op {
union { s.union(s1) }
intersect { s.intersect(s1) }
assign { s.become(s1) }
difference { s.difference(s1) }
}
}
}
}
}
}
}
@ -243,10 +233,10 @@ class bitv {
#[inline(always)]
fn clone() -> ~bitv {
~alt self.rep {
small(b) {
small(b) => {
bitv{nbits: self.nbits, rep: small(~small_bitv{bits: b.bits})}
}
big(b) {
big(b) => {
let st = to_mut(from_elem(self.nbits / uint_bits + 1, 0));
let len = st.len();
for uint::range(0, len) |i| { st[i] = b.storage[i]; };
@ -260,8 +250,8 @@ class bitv {
pure fn get(i: uint) -> bool {
assert (i < self.nbits);
alt self.rep {
big(b) { b.get(i) }
small(s) { s.get(i) }
big(b) => b.get(i),
small(s) => s.get(i)
}
}
@ -274,8 +264,8 @@ class bitv {
fn set(i: uint, x: bool) {
assert (i < self.nbits);
alt self.rep {
big(b) { b.set(i, x); }
small(s) { s.set(i, x); }
big(b) => b.set(i, x),
small(s) => s.set(i, x)
}
}
@ -289,19 +279,13 @@ class bitv {
fn equal(v1: bitv) -> bool {
if self.nbits != v1.nbits { return false; }
alt self.rep {
small(b) {
alt v1.rep {
small(b1) { b.equals(b1) }
_ { false }
}
small(b) => alt v1.rep {
small(b1) => b.equals(b1),
_ => false
}
big(s) {
alt v1.rep {
big(s1) {
s.equals(s1)
}
small(_) { return false; }
}
big(s) => alt v1.rep {
big(s1) => s.equals(s1),
small(_) => return false
}
}
}
@ -310,10 +294,8 @@ class bitv {
#[inline(always)]
fn clear() {
alt self.rep {
small(b) { b.clear(); }
big(s) {
for s.each_storage() |w| { w = 0u }
}
small(b) => b.clear(),
big(s) => for s.each_storage() |w| { w = 0u }
}
}
@ -321,20 +303,16 @@ class bitv {
#[inline(always)]
fn set_all() {
alt self.rep {
small(b) { b.set_all(); }
big(s) {
for s.each_storage() |w| { w = !0u } }
}
small(b) => b.set_all(),
big(s) => for s.each_storage() |w| { w = !0u } }
}
/// Invert all bits
#[inline(always)]
fn invert() {
alt self.rep {
small(b) { b.invert(); }
big(s) {
for s.each_storage() |w| { w = !w } }
}
small(b) => b.invert(),
big(s) => for s.each_storage() |w| { w = !w } }
}
/**
@ -352,8 +330,8 @@ class bitv {
#[inline(always)]
fn is_true() -> bool {
alt self.rep {
small(b) { b.is_true() }
_ {
small(b) => b.is_true(),
_ => {
for self.each() |i| { if !i { return false; } }
true
}
@ -373,8 +351,8 @@ class bitv {
fn is_false() -> bool {
alt self.rep {
small(b) { b.is_false() }
big(_) {
small(b) => b.is_false(),
big(_) => {
for self.each() |i| { if i { return false; } }
true
}

View file

@ -47,8 +47,8 @@ class dtor_res {
new(dtor: option<fn@()>) { self.dtor = dtor; }
drop {
alt self.dtor {
option::none { }
option::some(f) { f(); }
option::none => (),
option::some(f) => f()
}
}
}

View file

@ -41,7 +41,7 @@ fn create<T: copy>() -> t<T> {
return rv;
}
fn get<T: copy>(elts: dvec<cell<T>>, i: uint) -> T {
alt elts.get_elt(i) { some(t) { t } _ { fail } }
alt elts.get_elt(i) { some(t) => t, _ => fail }
}
type repr<T> = {mut nelts: uint,
@ -239,45 +239,35 @@ mod tests {
fn intboxeq(&&a: @int, &&b: @int) -> bool { return a == b; }
fn taggyeq(a: taggy, b: taggy) -> bool {
alt a {
one(a1) {
alt b { one(b1) {return a1 == b1; } _ { return false; } }
one(a1) => alt b {
one(b1) => return a1 == b1,
_ => return false
}
two(a1, a2) {
alt b {
two(b1, b2) { return a1 == b1 && a2 == b2; }
_ { return false; }
}
two(a1, a2) => alt b {
two(b1, b2) => return a1 == b1 && a2 == b2,
_ => return false
}
three(a1, a2, a3) {
alt b {
three(b1, b2, b3) {
return a1 == b1 && a2 == b2 && a3 == b3;
}
_ { return false; }
}
three(a1, a2, a3) => alt b {
three(b1, b2, b3) => return a1 == b1 && a2 == b2 && a3 == b3,
_ => return false
}
}
}
fn taggypareq<T>(a: taggypar<T>, b: taggypar<T>) -> bool {
alt a {
onepar::<T>(a1) {
alt b {
onepar::<T>(b1) { return a1 == b1; } _ { return false; }
}
onepar::<T>(a1) => alt b {
onepar::<T>(b1) => return a1 == b1,
_ => return false
}
twopar::<T>(a1, a2) {
alt b {
twopar::<T>(b1, b2) { return a1 == b1 && a2 == b2; }
_ { return false; }
}
twopar::<T>(a1, a2) => alt b {
twopar::<T>(b1, b2) => return a1 == b1 && a2 == b2,
_ => return false
}
threepar::<T>(a1, a2, a3) {
alt b {
threepar::<T>(b1, b2, b3) {
return a1 == b1 && a2 == b2 && a3 == b3;
}
_ { return false; }
threepar::<T>(a1, a2, a3) => alt b {
threepar::<T>(b1, b2, b3) => {
return a1 == b1 && a2 == b2 && a3 == b3
}
_ => return false
}
}
}

View file

@ -114,8 +114,8 @@ fn maybe_get_doc(d: doc, tg: uint) -> option<doc> {
fn get_doc(d: doc, tg: uint) -> doc {
alt maybe_get_doc(d, tg) {
some(d) { return d; }
none {
some(d) => return d,
none => {
error!{"failed to find block with tag %u", tg};
fail;
}
@ -190,21 +190,13 @@ enum writer {
fn write_sized_vuint(w: io::writer, n: uint, size: uint) {
alt size {
1u {
w.write(&[0x80u8 | (n as u8)]);
}
2u {
w.write(&[0x40u8 | ((n >> 8_u) as u8), n as u8]);
}
3u {
w.write(&[0x20u8 | ((n >> 16_u) as u8), (n >> 8_u) as u8,
n as u8]);
}
4u {
w.write(&[0x10u8 | ((n >> 24_u) as u8), (n >> 16_u) as u8,
(n >> 8_u) as u8, n as u8]);
}
_ { fail fmt!{"vint to write too big: %?", n}; }
1u => w.write(&[0x80u8 | (n as u8)]),
2u => w.write(&[0x40u8 | ((n >> 8_u) as u8), n as u8]),
3u => w.write(&[0x20u8 | ((n >> 16_u) as u8), (n >> 8_u) as u8,
n as u8]),
4u => w.write(&[0x10u8 | ((n >> 24_u) as u8), (n >> 16_u) as u8,
(n >> 8_u) as u8, n as u8]),
_ => fail fmt!{"vint to write too big: %?", n}
};
}
@ -602,10 +594,9 @@ fn test_option_int() {
fn serialize_0<S: serialization::serializer>(s: S, v: option<int>) {
do s.emit_enum(~"core::option::t") {
alt v {
none {
s.emit_enum_variant(~"core::option::none", 0u, 0u, || { } );
}
some(v0) {
none => s.emit_enum_variant(
~"core::option::none", 0u, 0u, || { } ),
some(v0) => {
do s.emit_enum_variant(~"core::option::some", 1u, 1u) {
s.emit_enum_variant_arg(0u, || serialize_1(s, v0));
}
@ -622,8 +613,8 @@ fn test_option_int() {
do s.read_enum(~"core::option::t") {
do s.read_enum_variant |i| {
alt check i {
0u { none }
1u {
0u => none,
1u => {
let v0 = do s.read_enum_variant_arg(0u) {
deserialize_1(s)
};

View file

@ -31,8 +31,8 @@ fn init<K, V>() -> treemap<K, V> { @empty }
/// Insert a value into the map
fn insert<K: copy, V: copy>(m: treemap<K, V>, k: K, v: V) -> treemap<K, V> {
@alt m {
@empty { node(@k, @v, @empty, @empty) }
@node(@kk, vv, left, right) {
@empty => node(@k, @v, @empty, @empty),
@node(@kk, vv, left, right) => {
if k < kk {
node(@kk, vv, insert(left, k, v), right)
} else if k == kk {
@ -45,8 +45,8 @@ fn insert<K: copy, V: copy>(m: treemap<K, V>, k: K, v: V) -> treemap<K, V> {
/// Find a value based on the key
fn find<K, V: copy>(m: treemap<K, V>, k: K) -> option<V> {
alt *m {
empty { none }
node(@kk, @v, left, right) {
empty => none,
node(@kk, @v, left, right) => {
if k == kk {
some(v)
} else if k < kk { find(left, k) } else { find(right, k) }
@ -57,13 +57,13 @@ fn find<K, V: copy>(m: treemap<K, V>, k: K) -> option<V> {
/// Visit all pairs in the map in order.
fn traverse<K, V: copy>(m: treemap<K, V>, f: fn(K, V)) {
alt *m {
empty { }
empty => (),
/*
Previously, this had what looked like redundant
matches to me, so I changed it. but that may be a
de-optimization -- tjc
*/
node(@k, @v, left, right) {
node(@k, @v, left, right) => {
// copy v to make aliases work out
let v1 = v;
traverse(left, f);

View file

@ -140,7 +140,10 @@ fn is_arg(arg: ~str) -> bool {
}
fn name_str(nm: name) -> ~str {
return alt nm { short(ch) { str::from_char(ch) } long(s) { s } };
return alt nm {
short(ch) => str::from_char(ch),
long(s) => s
};
}
fn find_opt(opts: ~[opt], nm: name) -> option<uint> {
@ -162,18 +165,14 @@ enum fail_ {
/// Convert a `fail_` enum into an error string
fn fail_str(f: fail_) -> ~str {
return alt f {
argument_missing(nm) {
~"Argument to option '" + nm + ~"' missing."
}
unrecognized_option(nm) { ~"Unrecognized option: '" + nm + ~"'." }
option_missing(nm) { ~"Required option '" + nm + ~"' missing." }
option_duplicated(nm) {
~"Option '" + nm + ~"' given more than once."
}
unexpected_argument(nm) {
~"Option " + nm + ~" does not take an argument."
}
};
argument_missing(nm) => ~"Argument to option '" + nm + ~"' missing.",
unrecognized_option(nm) => ~"Unrecognized option: '" + nm + ~"'.",
option_missing(nm) => ~"Required option '" + nm + ~"' missing.",
option_duplicated(nm) => ~"Option '" + nm + ~"' given more than once.",
unexpected_argument(nm) => {
~"Option " + nm + ~" does not take an argument."
}
};
}
/**
@ -235,15 +234,13 @@ fn getopts(args: ~[~str], opts: ~[opt]) -> result unsafe {
*/
alt find_opt(opts, opt) {
some(id) {
last_valid_opt_id = option::some(id);
}
none {
some(id) => last_valid_opt_id = option::some(id),
none => {
let arg_follows =
option::is_some(last_valid_opt_id) &&
alt opts[option::get(last_valid_opt_id)].hasarg {
yes | maybe { true }
no { false }
yes | maybe => true,
no => false
};
if arg_follows && j + 1 < curlen {
i_arg = option::some(str::slice(cur, j, curlen));
@ -261,17 +258,17 @@ fn getopts(args: ~[~str], opts: ~[opt]) -> result unsafe {
for vec::each(names) |nm| {
name_pos += 1u;
let optid = alt find_opt(opts, nm) {
some(id) { id }
none { return err(unrecognized_option(name_str(nm))); }
some(id) => id,
none => return err(unrecognized_option(name_str(nm)))
};
alt opts[optid].hasarg {
no {
no => {
if !option::is_none::<~str>(i_arg) {
return err(unexpected_argument(name_str(nm)));
}
vec::push(vals[optid], given);
}
maybe {
maybe => {
if !option::is_none::<~str>(i_arg) {
vec::push(vals[optid], val(option::get(i_arg)));
} else if name_pos < vec::len::<name>(names) ||
@ -279,7 +276,7 @@ fn getopts(args: ~[~str], opts: ~[opt]) -> result unsafe {
vec::push(vals[optid], given);
} else { i += 1u; vec::push(vals[optid], val(args[i])); }
}
yes {
yes => {
if !option::is_none::<~str>(i_arg) {
vec::push(vals[optid],
val(option::get::<~str>(i_arg)));
@ -313,9 +310,12 @@ fn getopts(args: ~[~str], opts: ~[opt]) -> result unsafe {
fn opt_vals(m: matches, nm: ~str) -> ~[optval] {
return alt find_opt(m.opts, mkname(nm)) {
some(id) { m.vals[id] }
none { error!{"No option '%s' defined", nm}; fail }
};
some(id) => m.vals[id],
none => {
error!{"No option '%s' defined", nm};
fail
}
};
}
fn opt_val(m: matches, nm: ~str) -> optval { return opt_vals(m, nm)[0]; }
@ -329,8 +329,8 @@ fn opt_present(m: matches, nm: ~str) -> bool {
fn opts_present(m: matches, names: ~[~str]) -> bool {
for vec::each(names) |nm| {
alt find_opt(m.opts, mkname(nm)) {
some(_) { return true; }
_ { }
some(_) => return true,
_ => ()
}
}
return false;
@ -344,7 +344,7 @@ fn opts_present(m: matches, names: ~[~str]) -> bool {
* argument
*/
fn opt_str(m: matches, nm: ~str) -> ~str {
return alt opt_val(m, nm) { val(s) { s } _ { fail } };
return alt opt_val(m, nm) { val(s) => s, _ => fail };
}
/**
@ -356,8 +356,8 @@ fn opt_str(m: matches, nm: ~str) -> ~str {
fn opts_str(m: matches, names: ~[~str]) -> ~str {
for vec::each(names) |nm| {
alt opt_val(m, nm) {
val(s) { return s }
_ { }
val(s) => return s,
_ => ()
}
}
fail;
@ -373,7 +373,7 @@ fn opts_str(m: matches, names: ~[~str]) -> ~str {
fn opt_strs(m: matches, nm: ~str) -> ~[~str] {
let mut acc: ~[~str] = ~[];
for vec::each(opt_vals(m, nm)) |v| {
alt v { val(s) { vec::push(acc, s); } _ { } }
alt v { val(s) => vec::push(acc, s), _ => () }
}
return acc;
}
@ -382,7 +382,7 @@ fn opt_strs(m: matches, nm: ~str) -> ~[~str] {
fn opt_maybe_str(m: matches, nm: ~str) -> option<~str> {
let vals = opt_vals(m, nm);
if vec::len::<optval>(vals) == 0u { return none::<~str>; }
return alt vals[0] { val(s) { some::<~str>(s) } _ { none::<~str> } };
return alt vals[0] { val(s) => some::<~str>(s), _ => none::<~str> };
}
@ -396,7 +396,7 @@ fn opt_maybe_str(m: matches, nm: ~str) -> option<~str> {
fn opt_default(m: matches, nm: ~str, def: ~str) -> option<~str> {
let vals = opt_vals(m, nm);
if vec::len::<optval>(vals) == 0u { return none::<~str>; }
return alt vals[0] { val(s) { some::<~str>(s) } _ { some::<~str>(def) } }
return alt vals[0] { val(s) => some::<~str>(s), _ => some::<~str>(def) }
}
#[cfg(test)]
@ -414,11 +414,11 @@ mod tests {
fn check_fail_type(f: fail_, ft: fail_type) {
alt f {
argument_missing(_) { assert (ft == argument_missing_); }
unrecognized_option(_) { assert (ft == unrecognized_option_); }
option_missing(_) { assert (ft == option_missing_); }
option_duplicated(_) { assert (ft == option_duplicated_); }
unexpected_argument(_) { assert (ft == unexpected_argument_); }
argument_missing(_) => assert ft == argument_missing_,
unrecognized_option(_) => assert ft == unrecognized_option_,
option_missing(_) => assert ft == option_missing_,
option_duplicated(_) => assert ft == option_duplicated_,
unexpected_argument(_) => assert ft == unexpected_argument_
}
}
@ -430,7 +430,7 @@ mod tests {
let opts = ~[reqopt(~"test")];
let rs = getopts(args, opts);
alt check rs {
ok(m) {
ok(m) => {
assert (opt_present(m, ~"test"));
assert (opt_str(m, ~"test") == ~"20");
}
@ -443,8 +443,8 @@ mod tests {
let opts = ~[reqopt(~"test")];
let rs = getopts(args, opts);
alt rs {
err(f) { check_fail_type(f, option_missing_); }
_ { fail; }
err(f) => check_fail_type(f, option_missing_),
_ => fail
}
}
@ -454,8 +454,8 @@ mod tests {
let opts = ~[reqopt(~"test")];
let rs = getopts(args, opts);
alt rs {
err(f) { check_fail_type(f, argument_missing_); }
_ { fail; }
err(f) => check_fail_type(f, argument_missing_),
_ => fail
}
}
@ -465,8 +465,8 @@ mod tests {
let opts = ~[reqopt(~"test")];
let rs = getopts(args, opts);
alt rs {
err(f) { check_fail_type(f, option_duplicated_); }
_ { fail; }
err(f) => check_fail_type(f, option_duplicated_),
_ => fail
}
}
@ -476,11 +476,11 @@ mod tests {
let opts = ~[reqopt(~"t")];
let rs = getopts(args, opts);
alt rs {
ok(m) {
ok(m) => {
assert (opt_present(m, ~"t"));
assert (opt_str(m, ~"t") == ~"20");
}
_ { fail; }
_ => fail
}
}
@ -490,8 +490,8 @@ mod tests {
let opts = ~[reqopt(~"t")];
let rs = getopts(args, opts);
alt rs {
err(f) { check_fail_type(f, option_missing_); }
_ { fail; }
err(f) => check_fail_type(f, option_missing_),
_ => fail
}
}
@ -501,8 +501,8 @@ mod tests {
let opts = ~[reqopt(~"t")];
let rs = getopts(args, opts);
alt rs {
err(f) { check_fail_type(f, argument_missing_); }
_ { fail; }
err(f) => check_fail_type(f, argument_missing_),
_ => fail
}
}
@ -512,8 +512,8 @@ mod tests {
let opts = ~[reqopt(~"t")];
let rs = getopts(args, opts);
alt rs {
err(f) { check_fail_type(f, option_duplicated_); }
_ { fail; }
err(f) => check_fail_type(f, option_duplicated_),
_ => fail
}
}
@ -525,11 +525,11 @@ mod tests {
let opts = ~[optopt(~"test")];
let rs = getopts(args, opts);
alt rs {
ok(m) {
ok(m) => {
assert (opt_present(m, ~"test"));
assert (opt_str(m, ~"test") == ~"20");
}
_ { fail; }
_ => fail
}
}
@ -539,8 +539,8 @@ mod tests {
let opts = ~[optopt(~"test")];
let rs = getopts(args, opts);
alt rs {
ok(m) { assert (!opt_present(m, ~"test")); }
_ { fail; }
ok(m) => assert (!opt_present(m, ~"test")),
_ => fail
}
}
@ -550,8 +550,8 @@ mod tests {
let opts = ~[optopt(~"test")];
let rs = getopts(args, opts);
alt rs {
err(f) { check_fail_type(f, argument_missing_); }
_ { fail; }
err(f) => check_fail_type(f, argument_missing_),
_ => fail
}
}
@ -561,8 +561,8 @@ mod tests {
let opts = ~[optopt(~"test")];
let rs = getopts(args, opts);
alt rs {
err(f) { check_fail_type(f, option_duplicated_); }
_ { fail; }
err(f) => check_fail_type(f, option_duplicated_),
_ => fail
}
}
@ -572,11 +572,11 @@ mod tests {
let opts = ~[optopt(~"t")];
let rs = getopts(args, opts);
alt rs {
ok(m) {
ok(m) => {
assert (opt_present(m, ~"t"));
assert (opt_str(m, ~"t") == ~"20");
}
_ { fail; }
_ => fail
}
}
@ -586,8 +586,8 @@ mod tests {
let opts = ~[optopt(~"t")];
let rs = getopts(args, opts);
alt rs {
ok(m) { assert (!opt_present(m, ~"t")); }
_ { fail; }
ok(m) => assert (!opt_present(m, ~"t")),
_ => fail
}
}
@ -597,8 +597,8 @@ mod tests {
let opts = ~[optopt(~"t")];
let rs = getopts(args, opts);
alt rs {
err(f) { check_fail_type(f, argument_missing_); }
_ { fail; }
err(f) => check_fail_type(f, argument_missing_),
_ => fail
}
}
@ -608,8 +608,8 @@ mod tests {
let opts = ~[optopt(~"t")];
let rs = getopts(args, opts);
alt rs {
err(f) { check_fail_type(f, option_duplicated_); }
_ { fail; }
err(f) => check_fail_type(f, option_duplicated_),
_ => fail
}
}
@ -621,8 +621,8 @@ mod tests {
let opts = ~[optflag(~"test")];
let rs = getopts(args, opts);
alt rs {
ok(m) { assert (opt_present(m, ~"test")); }
_ { fail; }
ok(m) => assert (opt_present(m, ~"test")),
_ => fail
}
}
@ -632,8 +632,8 @@ mod tests {
let opts = ~[optflag(~"test")];
let rs = getopts(args, opts);
alt rs {
ok(m) { assert (!opt_present(m, ~"test")); }
_ { fail; }
ok(m) => assert (!opt_present(m, ~"test")),
_ => fail
}
}
@ -643,11 +643,11 @@ mod tests {
let opts = ~[optflag(~"test")];
let rs = getopts(args, opts);
alt rs {
err(f) {
err(f) => {
log(error, fail_str(f));
check_fail_type(f, unexpected_argument_);
}
_ { fail; }
_ => fail
}
}
@ -657,8 +657,8 @@ mod tests {
let opts = ~[optflag(~"test")];
let rs = getopts(args, opts);
alt rs {
err(f) { check_fail_type(f, option_duplicated_); }
_ { fail; }
err(f) => check_fail_type(f, option_duplicated_),
_ => fail
}
}
@ -668,8 +668,8 @@ mod tests {
let opts = ~[optflag(~"t")];
let rs = getopts(args, opts);
alt rs {
ok(m) { assert (opt_present(m, ~"t")); }
_ { fail; }
ok(m) => assert (opt_present(m, ~"t")),
_ => fail
}
}
@ -679,8 +679,8 @@ mod tests {
let opts = ~[optflag(~"t")];
let rs = getopts(args, opts);
alt rs {
ok(m) { assert (!opt_present(m, ~"t")); }
_ { fail; }
ok(m) => assert (!opt_present(m, ~"t")),
_ => fail
}
}
@ -690,12 +690,12 @@ mod tests {
let opts = ~[optflag(~"t")];
let rs = getopts(args, opts);
alt rs {
ok(m) {
ok(m) => {
// The next variable after the flag is just a free argument
assert (m.free[0] == ~"20");
}
_ { fail; }
_ => fail
}
}
@ -705,8 +705,8 @@ mod tests {
let opts = ~[optflag(~"t")];
let rs = getopts(args, opts);
alt rs {
err(f) { check_fail_type(f, option_duplicated_); }
_ { fail; }
err(f) => check_fail_type(f, option_duplicated_),
_ => fail
}
}
@ -718,11 +718,11 @@ mod tests {
let opts = ~[optmulti(~"test")];
let rs = getopts(args, opts);
alt rs {
ok(m) {
ok(m) => {
assert (opt_present(m, ~"test"));
assert (opt_str(m, ~"test") == ~"20");
}
_ { fail; }
_ => fail
}
}
@ -732,8 +732,8 @@ mod tests {
let opts = ~[optmulti(~"test")];
let rs = getopts(args, opts);
alt rs {
ok(m) { assert (!opt_present(m, ~"test")); }
_ { fail; }
ok(m) => assert (!opt_present(m, ~"test")),
_ => fail
}
}
@ -743,8 +743,8 @@ mod tests {
let opts = ~[optmulti(~"test")];
let rs = getopts(args, opts);
alt rs {
err(f) { check_fail_type(f, argument_missing_); }
_ { fail; }
err(f) => check_fail_type(f, argument_missing_),
_ => fail
}
}
@ -754,13 +754,13 @@ mod tests {
let opts = ~[optmulti(~"test")];
let rs = getopts(args, opts);
alt rs {
ok(m) {
ok(m) => {
assert (opt_present(m, ~"test"));
assert (opt_str(m, ~"test") == ~"20");
assert (opt_strs(m, ~"test")[0] == ~"20");
assert (opt_strs(m, ~"test")[1] == ~"30");
}
_ { fail; }
_ => fail
}
}
@ -770,11 +770,11 @@ mod tests {
let opts = ~[optmulti(~"t")];
let rs = getopts(args, opts);
alt rs {
ok(m) {
ok(m) => {
assert (opt_present(m, ~"t"));
assert (opt_str(m, ~"t") == ~"20");
}
_ { fail; }
_ => fail
}
}
@ -784,8 +784,8 @@ mod tests {
let opts = ~[optmulti(~"t")];
let rs = getopts(args, opts);
alt rs {
ok(m) { assert (!opt_present(m, ~"t")); }
_ { fail; }
ok(m) => assert (!opt_present(m, ~"t")),
_ => fail
}
}
@ -795,8 +795,8 @@ mod tests {
let opts = ~[optmulti(~"t")];
let rs = getopts(args, opts);
alt rs {
err(f) { check_fail_type(f, argument_missing_); }
_ { fail; }
err(f) => check_fail_type(f, argument_missing_),
_ => fail
}
}
@ -806,13 +806,13 @@ mod tests {
let opts = ~[optmulti(~"t")];
let rs = getopts(args, opts);
alt rs {
ok(m) {
ok(m) => {
assert (opt_present(m, ~"t"));
assert (opt_str(m, ~"t") == ~"20");
assert (opt_strs(m, ~"t")[0] == ~"20");
assert (opt_strs(m, ~"t")[1] == ~"30");
}
_ { fail; }
_ => fail
}
}
@ -822,8 +822,8 @@ mod tests {
let opts = ~[optmulti(~"t")];
let rs = getopts(args, opts);
alt rs {
err(f) { check_fail_type(f, unrecognized_option_); }
_ { fail; }
err(f) => check_fail_type(f, unrecognized_option_),
_ => fail
}
}
@ -833,8 +833,8 @@ mod tests {
let opts = ~[optmulti(~"test")];
let rs = getopts(args, opts);
alt rs {
err(f) { check_fail_type(f, unrecognized_option_); }
_ { fail; }
err(f) => check_fail_type(f, unrecognized_option_),
_ => fail
}
}
@ -850,7 +850,7 @@ mod tests {
optopt(~"notpresent")];
let rs = getopts(args, opts);
alt rs {
ok(m) {
ok(m) => {
assert (m.free[0] == ~"prog");
assert (m.free[1] == ~"free1");
assert (opt_str(m, ~"s") == ~"20");
@ -864,7 +864,7 @@ mod tests {
assert (opt_strs(m, ~"n")[1] == ~"-60 70");
assert (!opt_present(m, ~"notpresent"));
}
_ { fail; }
_ => fail
}
}
@ -873,8 +873,8 @@ mod tests {
let args = ~[~"-e", ~"foo", ~"--encrypt", ~"foo"];
let opts = ~[optopt(~"e"), optopt(~"encrypt")];
let matches = alt getopts(args, opts) {
result::ok(m) { m }
result::err(f) { fail; }
result::ok(m) => m,
result::err(f) => fail
};
assert opts_present(matches, ~[~"e"]);
assert opts_present(matches, ~[~"encrypt"]);
@ -894,8 +894,8 @@ mod tests {
let args = ~[~"-Lfoo"];
let opts = ~[optmulti(~"L")];
let matches = alt getopts(args, opts) {
result::ok(m) { m }
result::err(f) { fail; }
result::ok(m) => m,
result::err(f) => fail
};
assert opts_present(matches, ~[~"L"]);
assert opts_str(matches, ~[~"L"]) == ~"foo";

View file

@ -46,14 +46,10 @@ type error = {
/// Serializes a json value into a io::writer
fn to_writer(wr: io::writer, j: json) {
alt j {
num(n) { wr.write_str(float::to_str(n, 6u)); }
string(s) {
wr.write_str(escape_str(*s));
}
boolean(b) {
wr.write_str(if b { ~"true" } else { ~"false" });
}
list(v) {
num(n) => wr.write_str(float::to_str(n, 6u)),
string(s) => wr.write_str(escape_str(*s)),
boolean(b) => wr.write_str(if b { ~"true" } else { ~"false" }),
list(v) => {
wr.write_char('[');
let mut first = true;
for (*v).each |item| {
@ -65,7 +61,7 @@ fn to_writer(wr: io::writer, j: json) {
};
wr.write_char(']');
}
dict(d) {
dict(d) => {
if d.size() == 0u {
wr.write_str(~"{}");
return;
@ -84,9 +80,7 @@ fn to_writer(wr: io::writer, j: json) {
};
wr.write_str(~" }");
}
null {
wr.write_str(~"null");
}
null => wr.write_str(~"null")
}
}
@ -94,14 +88,14 @@ fn escape_str(s: ~str) -> ~str {
let mut escaped = ~"\"";
do str::chars_iter(s) |c| {
alt c {
'"' { escaped += ~"\\\""; }
'\\' { escaped += ~"\\\\"; }
'\x08' { escaped += ~"\\b"; }
'\x0c' { escaped += ~"\\f"; }
'\n' { escaped += ~"\\n"; }
'\r' { escaped += ~"\\r"; }
'\t' { escaped += ~"\\t"; }
_ { escaped += str::from_char(c); }
'"' => escaped += ~"\\\"",
'\\' => escaped += ~"\\\\",
'\x08' => escaped += ~"\\b",
'\x0c' => escaped += ~"\\f",
'\n' => escaped += ~"\\n",
'\r' => escaped += ~"\\r",
'\t' => escaped += ~"\\t",
_ => escaped += str::from_char(c)
}
};
@ -151,7 +145,7 @@ impl parser for parser {
fn parse() -> result<json, error> {
alt self.parse_value() {
ok(value) {
ok(value) => {
// Skip trailing whitespaces.
self.parse_whitespace();
// Make sure there is no trailing characters.
@ -161,7 +155,7 @@ impl parser for parser {
self.error(~"trailing characters")
}
}
e { e }
e => e
}
}
@ -171,19 +165,17 @@ impl parser for parser {
if self.eof() { return self.error(~"EOF while parsing value"); }
alt self.ch {
'n' { self.parse_ident(~"ull", null) }
't' { self.parse_ident(~"rue", boolean(true)) }
'f' { self.parse_ident(~"alse", boolean(false)) }
'0' to '9' | '-' { self.parse_number() }
'"' {
alt self.parse_str() {
ok(s) { ok(string(s)) }
err(e) { err(e) }
}
'n' => self.parse_ident(~"ull", null),
't' => self.parse_ident(~"rue", boolean(true)),
'f' => self.parse_ident(~"alse", boolean(false)),
'0' to '9' | '-' => self.parse_number(),
'"' => alt self.parse_str() {
ok(s) => ok(string(s)),
err(e) => err(e)
}
'[' { self.parse_list() }
'{' { self.parse_object() }
_ { self.error(~"invalid syntax") }
'[' => self.parse_list(),
'{' => self.parse_object(),
_ => self.error(~"invalid syntax")
}
}
@ -209,21 +201,21 @@ impl parser for parser {
}
let mut res = alt self.parse_integer() {
ok(res) { res }
err(e) { return err(e); }
ok(res) => res,
err(e) => return err(e)
};
if self.ch == '.' {
alt self.parse_decimal(res) {
ok(r) { res = r; }
err(e) { return err(e); }
ok(r) => res = r,
err(e) => return err(e)
}
}
if self.ch == 'e' || self.ch == 'E' {
alt self.parse_exponent(res) {
ok(r) { res = r; }
err(e) { return err(e); }
ok(r) => res = r,
err(e) => return err(e)
}
}
@ -234,29 +226,29 @@ impl parser for parser {
let mut res = 0f;
alt self.ch {
'0' {
'0' => {
self.bump();
// There can be only one leading '0'.
alt self.ch {
'0' to '9' { return self.error(~"invalid number"); }
_ {}
'0' to '9' => return self.error(~"invalid number"),
_ => ()
}
}
'1' to '9' {
'1' to '9' => {
while !self.eof() {
alt self.ch {
'0' to '9' {
'0' to '9' => {
res *= 10f;
res += ((self.ch as int) - ('0' as int)) as float;
self.bump();
}
_ { break; }
_ => break
}
}
}
_ { return self.error(~"invalid number"); }
_ => return self.error(~"invalid number")
}
ok(res)
@ -267,21 +259,21 @@ impl parser for parser {
// Make sure a digit follows the decimal place.
alt self.ch {
'0' to '9' {}
_ { return self.error(~"invalid number"); }
'0' to '9' => (),
_ => return self.error(~"invalid number")
}
let mut res = res;
let mut dec = 1f;
while !self.eof() {
alt self.ch {
'0' to '9' {
'0' to '9' => {
dec /= 10f;
res += (((self.ch as int) - ('0' as int)) as float) * dec;
self.bump();
}
_ { break; }
_ => break
}
}
@ -296,26 +288,26 @@ impl parser for parser {
let mut neg_exp = false;
alt self.ch {
'+' { self.bump(); }
'-' { self.bump(); neg_exp = true; }
_ {}
'+' => self.bump(),
'-' => { self.bump(); neg_exp = true; }
_ => ()
}
// Make sure a digit follows the exponent place.
alt self.ch {
'0' to '9' {}
_ { return self.error(~"invalid number"); }
'0' to '9' => (),
_ => return self.error(~"invalid number")
}
while !self.eof() {
alt self.ch {
'0' to '9' {
'0' to '9' => {
exp *= 10u;
exp += (self.ch as uint) - ('0' as uint);
self.bump();
}
_ { break; }
_ => break
}
}
@ -338,25 +330,25 @@ impl parser for parser {
if (escape) {
alt self.ch {
'"' { str::push_char(res, '"'); }
'\\' { str::push_char(res, '\\'); }
'/' { str::push_char(res, '/'); }
'b' { str::push_char(res, '\x08'); }
'f' { str::push_char(res, '\x0c'); }
'n' { str::push_char(res, '\n'); }
'r' { str::push_char(res, '\r'); }
't' { str::push_char(res, '\t'); }
'u' {
'"' => str::push_char(res, '"'),
'\\' => str::push_char(res, '\\'),
'/' => str::push_char(res, '/'),
'b' => str::push_char(res, '\x08'),
'f' => str::push_char(res, '\x0c'),
'n' => str::push_char(res, '\n'),
'r' => str::push_char(res, '\r'),
't' => str::push_char(res, '\t'),
'u' => {
// Parse \u1234.
let mut i = 0u;
let mut n = 0u;
while i < 4u {
alt self.next_char() {
'0' to '9' {
'0' to '9' => {
n = n * 10u +
(self.ch as uint) - ('0' as uint);
}
_ { return self.error(~"invalid \\u escape"); }
_ => return self.error(~"invalid \\u escape")
}
i += 1u;
}
@ -368,7 +360,7 @@ impl parser for parser {
str::push_char(res, n as char);
}
_ { return self.error(~"invalid escape"); }
_ => return self.error(~"invalid escape")
}
escape = false;
} else if self.ch == '\\' {
@ -398,8 +390,8 @@ impl parser for parser {
loop {
alt self.parse_value() {
ok(v) { vec::push(values, v); }
e { return e; }
ok(v) => vec::push(values, v),
e => return e
}
self.parse_whitespace();
@ -408,9 +400,9 @@ impl parser for parser {
}
alt self.ch {
',' { self.bump(); }
']' { self.bump(); return ok(list(@values)); }
_ { return self.error(~"expected `,` or `]`"); }
',' => self.bump(),
']' => { self.bump(); return ok(list(@values)); }
_ => return self.error(~"expected `,` or `]`")
}
};
}
@ -434,8 +426,8 @@ impl parser for parser {
}
let key = alt self.parse_str() {
ok(key) { key }
err(e) { return err(e); }
ok(key) => key,
err(e) => return err(e)
};
self.parse_whitespace();
@ -447,15 +439,15 @@ impl parser for parser {
self.bump();
alt self.parse_value() {
ok(value) { values.insert(copy *key, value); }
e { return e; }
ok(value) => { values.insert(copy *key, value); }
e => return e
}
self.parse_whitespace();
alt self.ch {
',' { self.bump(); }
'}' { self.bump(); return ok(dict(values)); }
_ {
',' => self.bump(),
'}' => { self.bump(); return ok(dict(values)); }
_ => {
if self.eof() { break; }
return self.error(~"expected `,` or `}`");
}
@ -486,18 +478,17 @@ fn from_str(s: ~str) -> result<json, error> {
/// Test if two json values are equal
fn eq(value0: json, value1: json) -> bool {
alt (value0, value1) {
(num(f0), num(f1)) { f0 == f1 }
(string(s0), string(s1)) { s0 == s1 }
(boolean(b0), boolean(b1)) { b0 == b1 }
(list(l0), list(l1)) { vec::all2(*l0, *l1, eq) }
(dict(d0), dict(d1)) {
(num(f0), num(f1)) => f0 == f1,
(string(s0), string(s1)) => s0 == s1,
(boolean(b0), boolean(b1)) => b0 == b1,
(list(l0), list(l1)) => vec::all2(*l0, *l1, eq),
(dict(d0), dict(d1)) => {
if d0.size() == d1.size() {
let mut equal = true;
for d0.each |k, v0| {
alt d1.find(k) {
some(v1) {
if !eq(v0, v1) { equal = false; } }
none { equal = false; }
some(v1) => if !eq(v0, v1) { equal = false },
none => equal = false
}
};
equal
@ -505,8 +496,8 @@ fn eq(value0: json, value1: json) -> bool {
false
}
}
(null, null) { true }
_ { false }
(null, null) => true,
_ => false
}
}
@ -626,8 +617,8 @@ impl <A: to_json copy> of to_json for hashmap<~str, A> {
impl <A: to_json> of to_json for option<A> {
fn to_json() -> json {
alt self {
none { null }
some(value) { value.to_json() }
none => null,
some(value) => value.to_json()
}
}
}

View file

@ -44,11 +44,11 @@ fn find<T: copy>(ls: @list<T>, f: fn(T) -> bool) -> option<T> {
let mut ls = ls;
loop {
ls = alt *ls {
cons(hd, tl) {
cons(hd, tl) => {
if f(hd) { return some(hd); }
tl
}
nil { return none; }
nil => return none
}
};
}
@ -64,8 +64,8 @@ fn has<T: copy>(ls: @list<T>, elt: T) -> bool {
/// Returns true if the list is empty
pure fn is_empty<T: copy>(ls: @list<T>) -> bool {
alt *ls {
nil { true }
_ { false }
nil => true,
_ => false
}
}
@ -84,21 +84,24 @@ fn len<T>(ls: @list<T>) -> uint {
/// Returns all but the first element of a list
pure fn tail<T: copy>(ls: @list<T>) -> @list<T> {
alt *ls {
cons(_, tl) { return tl; }
nil { fail ~"list empty" }
cons(_, tl) => return tl,
nil => fail ~"list empty"
}
}
/// Returns the first element of a list
pure fn head<T: copy>(ls: @list<T>) -> T {
alt check *ls { cons(hd, _) { hd } }
alt check *ls { cons(hd, _) => hd }
}
/// Appends one list to another
pure fn append<T: copy>(l: @list<T>, m: @list<T>) -> @list<T> {
alt *l {
nil { return m; }
cons(x, xs) { let rest = append(xs, m); return @cons(x, rest); }
nil => return m,
cons(x, xs) => {
let rest = append(xs, m);
return @cons(x, rest);
}
}
}
@ -112,11 +115,11 @@ fn iter<T>(l: @list<T>, f: fn(T)) {
let mut cur = l;
loop {
cur = alt *cur {
cons(hd, tl) {
cons(hd, tl) => {
f(hd);
tl
}
nil { break; }
nil => break
}
}
}
@ -126,11 +129,11 @@ fn each<T>(l: @list<T>, f: fn(T) -> bool) {
let mut cur = l;
loop {
cur = alt *cur {
cons(hd, tl) {
cons(hd, tl) => {
if !f(hd) { return; }
tl
}
nil { break; }
nil => break
}
}
}

View file

@ -134,12 +134,12 @@ mod chained {
let mut comp = 1u; // for logging
loop {
alt copy e0.next {
none {
none => {
debug!{"search_tbl: absent, comp %u, hash %u, idx %u",
comp, h, idx};
return not_found;
}
some(e1) {
some(e1) => {
comp += 1u;
if e1.hash == h && self.eqer(&e1.key, k) {
debug!{"search_tbl: present, comp %u, \
@ -157,12 +157,12 @@ mod chained {
fn search_tbl(k: &K, h: uint) -> search_result<K,V> {
let idx = h % vec::len(self.chains);
alt copy self.chains[idx] {
none {
none => {
debug!{"search_tbl: none, comp %u, hash %u, idx %u",
0u, h, idx};
return not_found;
}
some(e) {
some(e) => {
if e.hash == h && self.eqer(&e.key, k) {
debug!{"search_tbl: present, comp %u, hash %u, idx %u",
1u, h, idx};
@ -194,8 +194,8 @@ mod chained {
let mut chain = self.chains[i];
loop {
chain = alt chain {
none { break; }
some(entry) {
none => break,
some(entry) => {
let next = entry.next;
if !blk(entry) { return; }
next
@ -217,15 +217,15 @@ mod chained {
fn contains_key_ref(k: &K) -> bool {
let hash = self.hasher(k);
alt self.search_tbl(k, hash) {
not_found {false}
found_first(*) | found_after(*) {true}
not_found => false,
found_first(*) | found_after(*) => true
}
}
fn insert(+k: K, +v: V) -> bool {
let hash = self.hasher(&k);
alt self.search_tbl(&k, hash) {
not_found {
not_found => {
self.count += 1u;
let idx = hash % vec::len(self.chains);
let old_chain = self.chains[idx];
@ -245,7 +245,7 @@ mod chained {
return true;
}
found_first(idx, entry) {
found_first(idx, entry) => {
self.chains[idx] = some(@entry {
hash: hash,
key: k,
@ -253,7 +253,7 @@ mod chained {
next: entry.next});
return false;
}
found_after(prev, entry) {
found_after(prev, entry) => {
prev.next = some(@entry {
hash: hash,
key: k,
@ -266,9 +266,9 @@ mod chained {
fn find(+k: K) -> option<V> {
alt self.search_tbl(&k, self.hasher(&k)) {
not_found {none}
found_first(_, entry) {some(entry.value)}
found_after(_, entry) {some(entry.value)}
not_found => none,
found_first(_, entry) => some(entry.value),
found_after(_, entry) => some(entry.value)
}
}
@ -282,13 +282,13 @@ mod chained {
fn remove(+k: K) -> option<V> {
alt self.search_tbl(&k, self.hasher(&k)) {
not_found {none}
found_first(idx, entry) {
not_found => none,
found_first(idx, entry) => {
self.count -= 1u;
self.chains[idx] = entry.next;
some(entry.value)
}
found_after(eprev, entry) {
found_after(eprev, entry) => {
self.count -= 1u;
eprev.next = entry.next;
some(entry.value)
@ -639,8 +639,8 @@ mod tests {
while i < num_to_insert {
let v = hm.remove(i);
alt v {
option::some(u) { assert (u == i * i); }
option::none { fail; }
option::some(u) => assert (u == i * i),
option::none => fail
}
i += 2u;
}

View file

@ -48,23 +48,19 @@ type parse_addr_err = {
*/
fn format_addr(ip: ip_addr) -> ~str {
alt ip {
ipv4(addr) {
unsafe {
let result = uv_ip4_name(&addr);
if result == ~"" {
fail ~"failed to convert inner sockaddr_in address to str"
}
result
ipv4(addr) => unsafe {
let result = uv_ip4_name(&addr);
if result == ~"" {
fail ~"failed to convert inner sockaddr_in address to str"
}
result
}
ipv6(addr) {
unsafe {
let result = uv_ip6_name(&addr);
if result == ~"" {
fail ~"failed to convert inner sockaddr_in address to str"
}
result
ipv6(addr) => unsafe {
let result = uv_ip6_name(&addr);
if result == ~"" {
fail ~"failed to convert inner sockaddr_in address to str"
}
result
}
}
}
@ -108,10 +104,10 @@ fn get_addr(++node: ~str, iotask: iotask)
ptr::null(),
ptr::null());
alt result {
0i32 {
0i32 => {
set_data_for_req(handle_ptr, handle_data_ptr);
}
_ {
_ => {
output_ch.send(result::err(get_addr_unknown_error));
}
}
@ -139,10 +135,8 @@ mod v4 {
*/
fn parse_addr(ip: ~str) -> ip_addr {
alt try_parse_addr(ip) {
result::ok(addr) { copy(addr) }
result::err(err_data) {
fail err_data.err_msg
}
result::ok(addr) => copy(addr),
result::err(err_data) => fail err_data.err_msg
}
}
// the simple, old style numberic representation of
@ -162,8 +156,8 @@ mod v4 {
fn parse_to_ipv4_rep(ip: ~str) -> result::result<ipv4_rep, ~str> {
let parts = vec::map(str::split_char(ip, '.'), |s| {
alt uint::from_str(s) {
some(n) if n <= 255u { n }
_ { 256u }
some(n) if n <= 255u => n,
_ => 256u
}
});
if vec::len(parts) != 4u {
@ -227,10 +221,8 @@ mod v6 {
*/
fn parse_addr(ip: ~str) -> ip_addr {
alt try_parse_addr(ip) {
result::ok(addr) { copy(addr) }
result::err(err_data) {
fail err_data.err_msg
}
result::ok(addr) => copy(addr),
result::err(err_data) => fail err_data.err_msg
}
}
fn try_parse_addr(ip: ~str) -> result::result<ip_addr,parse_addr_err> {
@ -335,11 +327,11 @@ mod test {
#[test]
fn test_ip_ipv4_bad_parse() {
alt v4::try_parse_addr(~"b4df00d") {
result::err(err_info) {
result::err(err_info) => {
log(debug, fmt!{"got error as expected %?", err_info});
assert true;
}
result::ok(addr) {
result::ok(addr) => {
fail fmt!{"Expected failure, but got addr %?", addr};
}
}
@ -348,11 +340,11 @@ mod test {
#[ignore(target_os="win32")]
fn test_ip_ipv6_bad_parse() {
alt v6::try_parse_addr(~"::,~2234k;") {
result::err(err_info) {
result::err(err_info) => {
log(debug, fmt!{"got error as expected %?", err_info});
assert true;
}
result::ok(addr) {
result::ok(addr) => {
fail fmt!{"Expected failure, but got addr %?", addr};
}
}
@ -373,12 +365,8 @@ mod test {
localhost_name, vec::len(results)});
for vec::each(results) |r| {
let ipv_prefix = alt r {
ipv4(_) {
~"IPv4"
}
ipv6(_) {
~"IPv6"
}
ipv4(_) => ~"IPv4",
ipv6(_) => ~"IPv6"
};
log(debug, fmt!{"test_get_addr: result %s: '%s'",
ipv_prefix, format_addr(r)});

View file

@ -152,16 +152,16 @@ fn connect(-input_ip: ip::ip_addr, port: uint,
log(debug, fmt!{"stream_handle_ptr in interact %?",
stream_handle_ptr});
alt uv::ll::tcp_init( loop_ptr, stream_handle_ptr) {
0i32 {
0i32 => {
log(debug, ~"tcp_init successful");
alt input_ip {
ipv4 {
ipv4 => {
log(debug, ~"dealing w/ ipv4 connection..");
let connect_req_ptr =
ptr::addr_of((*socket_data_ptr).connect_req);
let addr_str = ip::format_addr(input_ip);
let connect_result = alt input_ip {
ip::ipv4(addr) {
ip::ipv4(addr) => {
// have to "recreate" the sockaddr_in/6
// since the ip_addr discards the port
// info.. should probably add an additional
@ -175,7 +175,7 @@ fn connect(-input_ip: ip::ip_addr, port: uint,
ptr::addr_of(in_addr),
tcp_connect_on_connect_cb)
}
ip::ipv6(addr) {
ip::ipv6(addr) => {
log(debug, fmt!{"addr: %?", addr});
let in_addr = uv::ll::ip6_addr(addr_str, port as int);
uv::ll::tcp_connect6(
@ -186,7 +186,7 @@ fn connect(-input_ip: ip::ip_addr, port: uint,
}
};
alt connect_result {
0i32 {
0i32 => {
log(debug, ~"tcp_connect successful");
// reusable data that we'll have for the
// duration..
@ -201,7 +201,7 @@ fn connect(-input_ip: ip::ip_addr, port: uint,
// let tcp_connect_on_connect_cb send on
// the result_ch, now..
}
_ {
_ => {
// immediate connect failure.. probably a garbage
// ip or somesuch
let err_data = uv::ll::get_last_err_data(loop_ptr);
@ -215,7 +215,7 @@ fn connect(-input_ip: ip::ip_addr, port: uint,
}
}
}
_ {
_ => {
// failure to create a tcp handle
let err_data = uv::ll::get_last_err_data(loop_ptr);
comm::send((*conn_data_ptr).result_ch,
@ -224,19 +224,19 @@ fn connect(-input_ip: ip::ip_addr, port: uint,
}
};
alt comm::recv(result_po) {
conn_success {
conn_success => {
log(debug, ~"tcp::connect - received success on result_po");
result::ok(tcp_socket(socket_data))
}
conn_failure(err_data) {
conn_failure(err_data) => {
comm::recv(closed_signal_po);
log(debug, ~"tcp::connect - received failure on result_po");
// still have to free the malloc'd stream handle..
rustrt::rust_uv_current_kernel_free(stream_handle_ptr
as *libc::c_void);
let tcp_conn_err = alt err_data.err_name {
~"ECONNREFUSED" { connection_refused }
_ { generic_connect_err(err_data.err_name, err_data.err_msg) }
~"ECONNREFUSED" => connection_refused,
_ => generic_connect_err(err_data.err_name, err_data.err_msg)
};
result::err(tcp_conn_err)
}
@ -470,7 +470,7 @@ fn accept(new_conn: tcp_new_connection)
-> result::result<tcp_socket, tcp_err_data> unsafe {
alt new_conn{
new_tcp_conn(server_handle_ptr) {
new_tcp_conn(server_handle_ptr) => {
let server_data_ptr = uv::ll::get_data_for_uv_handle(
server_handle_ptr) as *tcp_listen_fc_data;
let reader_po = comm::port::<result::result<~[u8], tcp_err_data>>();
@ -502,26 +502,26 @@ fn accept(new_conn: tcp_new_connection)
let loop_ptr = uv::ll::get_loop_for_uv_handle(
server_handle_ptr);
alt uv::ll::tcp_init(loop_ptr, client_stream_handle_ptr) {
0i32 {
0i32 => {
log(debug, ~"uv_tcp_init successful for client stream");
alt uv::ll::accept(
server_handle_ptr as *libc::c_void,
client_stream_handle_ptr as *libc::c_void) {
0i32 {
0i32 => {
log(debug, ~"successfully accepted client connection");
uv::ll::set_data_for_uv_handle(client_stream_handle_ptr,
client_socket_data_ptr
as *libc::c_void);
comm::send(result_ch, none);
}
_ {
_ => {
log(debug, ~"failed to accept client conn");
comm::send(result_ch, some(
uv::ll::get_last_err_data(loop_ptr).to_tcp_err()));
}
}
}
_ {
_ => {
log(debug, ~"failed to init client stream");
comm::send(result_ch, some(
uv::ll::get_last_err_data(loop_ptr).to_tcp_err()));
@ -529,12 +529,8 @@ fn accept(new_conn: tcp_new_connection)
}
// UNSAFE LIBUV INTERACTION END
alt comm::recv(result_po) {
some(err_data) {
result::err(err_data)
}
none {
result::ok(tcp_socket(client_socket_data))
}
some(err_data) => result::err(err_data),
none => result::ok(tcp_socket(client_socket_data))
}
}
}
@ -615,19 +611,19 @@ fn listen_common(-host_ip: ip::ip_addr, port: uint, backlog: uint,
let loc_ip = copy(host_ip);
do iotask::interact(iotask) |loop_ptr| {
alt uv::ll::tcp_init(loop_ptr, server_stream_ptr) {
0i32 {
0i32 => {
uv::ll::set_data_for_uv_handle(
server_stream_ptr,
server_data_ptr);
let addr_str = ip::format_addr(loc_ip);
let bind_result = alt loc_ip {
ip::ipv4(addr) {
ip::ipv4(addr) => {
log(debug, fmt!{"addr: %?", addr});
let in_addr = uv::ll::ip4_addr(addr_str, port as int);
uv::ll::tcp_bind(server_stream_ptr,
ptr::addr_of(in_addr))
}
ip::ipv6(addr) {
ip::ipv6(addr) => {
log(debug, fmt!{"addr: %?", addr});
let in_addr = uv::ll::ip6_addr(addr_str, port as int);
uv::ll::tcp_bind6(server_stream_ptr,
@ -635,28 +631,26 @@ fn listen_common(-host_ip: ip::ip_addr, port: uint, backlog: uint,
}
};
alt bind_result {
0i32 {
0i32 => {
alt uv::ll::listen(server_stream_ptr,
backlog as libc::c_int,
tcp_lfc_on_connection_cb) {
0i32 {
comm::send(setup_ch, none);
}
_ {
0i32 => comm::send(setup_ch, none),
_ => {
log(debug, ~"failure to uv_listen()");
let err_data = uv::ll::get_last_err_data(loop_ptr);
comm::send(setup_ch, some(err_data));
}
}
}
_ {
_ => {
log(debug, ~"failure to uv_tcp_bind");
let err_data = uv::ll::get_last_err_data(loop_ptr);
comm::send(setup_ch, some(err_data));
}
}
}
_ {
_ => {
log(debug, ~"failure to uv_tcp_init");
let err_data = uv::ll::get_last_err_data(loop_ptr);
comm::send(setup_ch, some(err_data));
@ -666,7 +660,7 @@ fn listen_common(-host_ip: ip::ip_addr, port: uint, backlog: uint,
setup_ch.recv()
};
alt setup_result {
some(err_data) {
some(err_data) => {
do iotask::interact(iotask) |loop_ptr| {
log(debug, fmt!{"tcp::listen post-kill recv hl interact %?",
loop_ptr});
@ -675,15 +669,15 @@ fn listen_common(-host_ip: ip::ip_addr, port: uint, backlog: uint,
};
stream_closed_po.recv();
alt err_data.err_name {
~"EACCES" {
~"EACCES" => {
log(debug, ~"Got EACCES error");
result::err(access_denied)
}
~"EADDRINUSE" {
~"EADDRINUSE" => {
log(debug, ~"Got EADDRINUSE error");
result::err(address_in_use)
}
_ {
_ => {
log(debug, fmt!{"Got '%s' '%s' libuv error",
err_data.err_name, err_data.err_msg});
result::err(
@ -691,7 +685,7 @@ fn listen_common(-host_ip: ip::ip_addr, port: uint, backlog: uint,
}
}
}
none {
none => {
on_establish_cb(kill_ch);
let kill_result = comm::recv(kill_po);
do iotask::interact(iotask) |loop_ptr| {
@ -703,14 +697,10 @@ fn listen_common(-host_ip: ip::ip_addr, port: uint, backlog: uint,
stream_closed_po.recv();
alt kill_result {
// some failure post bind/listen
some(err_data) {
result::err(generic_listen_err(err_data.err_name,
err_data.err_msg))
}
some(err_data) => result::err(generic_listen_err(err_data.err_name,
err_data.err_msg)),
// clean exit
none {
result::ok(())
}
none => result::ok(())
}
}
}
@ -889,7 +879,7 @@ fn read_common_impl(socket_data: *tcp_socket_data, timeout_msecs: uint)
};
log(debug, ~"tcp::read after recv_timeout");
alt read_result {
none {
none => {
log(debug, ~"tcp::read: timed out..");
let err_data = {
err_name: ~"TIMEOUT",
@ -898,7 +888,7 @@ fn read_common_impl(socket_data: *tcp_socket_data, timeout_msecs: uint)
read_stop_common_impl(socket_data);
result::err(err_data)
}
some(data_result) {
some(data_result) => {
log(debug, ~"tcp::read got data");
read_stop_common_impl(socket_data);
data_result
@ -916,11 +906,11 @@ fn read_stop_common_impl(socket_data: *tcp_socket_data) ->
do iotask::interact((*socket_data).iotask) |loop_ptr| {
log(debug, ~"in interact cb for tcp::read_stop");
alt uv::ll::read_stop(stream_handle_ptr as *uv::ll::uv_stream_t) {
0i32 {
0i32 => {
log(debug, ~"successfully called uv_read_stop");
comm::send(stop_ch, none);
}
_ {
_ => {
log(debug, ~"failure in calling uv_read_stop");
let err_data = uv::ll::get_last_err_data(loop_ptr);
comm::send(stop_ch, some(err_data.to_tcp_err()));
@ -928,12 +918,8 @@ fn read_stop_common_impl(socket_data: *tcp_socket_data) ->
}
};
alt comm::recv(stop_po) {
some(err_data) {
result::err(err_data.to_tcp_err())
}
none {
result::ok(())
}
some(err_data) => result::err(err_data.to_tcp_err()),
none => result::ok(())
}
}
@ -950,11 +936,11 @@ fn read_start_common_impl(socket_data: *tcp_socket_data)
alt uv::ll::read_start(stream_handle_ptr as *uv::ll::uv_stream_t,
on_alloc_cb,
on_tcp_read_cb) {
0i32 {
0i32 => {
log(debug, ~"success doing uv_read_start");
comm::send(start_ch, none);
}
_ {
_ => {
log(debug, ~"error attempting uv_read_start");
let err_data = uv::ll::get_last_err_data(loop_ptr);
comm::send(start_ch, some(err_data));
@ -962,12 +948,8 @@ fn read_start_common_impl(socket_data: *tcp_socket_data)
}
};
alt comm::recv(start_po) {
some(err_data) {
result::err(err_data.to_tcp_err())
}
none {
result::ok((*socket_data).reader_po)
}
some(err_data) => result::err(err_data.to_tcp_err()),
none => result::ok((*socket_data).reader_po)
}
}
@ -995,11 +977,11 @@ fn write_common_impl(socket_data_ptr: *tcp_socket_data,
stream_handle_ptr,
write_buf_vec_ptr,
tcp_write_complete_cb) {
0i32 {
0i32 => {
log(debug, ~"uv_write() invoked successfully");
uv::ll::set_data_for_req(write_req_ptr, write_data_ptr);
}
_ {
_ => {
log(debug, ~"error invoking uv_write()");
let err_data = uv::ll::get_last_err_data(loop_ptr);
comm::send((*write_data_ptr).result_ch,
@ -1012,8 +994,8 @@ fn write_common_impl(socket_data_ptr: *tcp_socket_data,
// ownership of everything to the I/O task and let it deal with the
// aftermath, so we don't have to sit here blocking.
alt comm::recv(result_po) {
tcp_write_success { result::ok(()) }
tcp_write_error(err_data) { result::err(err_data.to_tcp_err()) }
tcp_write_success => result::ok(()),
tcp_write_error(err_data) => result::err(err_data.to_tcp_err())
}
}
@ -1043,10 +1025,8 @@ extern fn tcp_lfc_on_connection_cb(handle: *uv::ll::uv_tcp_t,
let kill_ch = (*server_data_ptr).kill_ch;
if (*server_data_ptr).active {
alt status {
0i32 {
(*server_data_ptr).on_connect_cb(handle);
}
_ {
0i32 => (*server_data_ptr).on_connect_cb(handle),
_ => {
let loop_ptr = uv::ll::get_loop_for_uv_handle(handle);
comm::send(kill_ch,
some(uv::ll::get_last_err_data(loop_ptr)
@ -1103,7 +1083,7 @@ extern fn on_tcp_read_cb(stream: *uv::ll::uv_stream_t,
as *tcp_socket_data;
alt nread as int {
// incoming err.. probably eof
-1 {
-1 => {
let err_data = uv::ll::get_last_err_data(loop_ptr).to_tcp_err();
log(debug, fmt!{"on_tcp_read_cb: incoming err.. name %? msg %?",
err_data.err_name, err_data.err_msg});
@ -1111,9 +1091,9 @@ extern fn on_tcp_read_cb(stream: *uv::ll::uv_stream_t,
comm::send(reader_ch, result::err(err_data));
}
// do nothing .. unneeded buf
0 {}
0 => (),
// have data
_ {
_ => {
// we have data
log(debug, fmt!{"tcp on_read_cb nread: %d", nread as int});
let reader_ch = (*socket_data_ptr).reader_ch;
@ -1196,11 +1176,11 @@ extern fn tcp_connect_on_connect_cb(connect_req_ptr: *uv::ll::uv_connect_t,
let tcp_stream_ptr =
uv::ll::get_stream_handle_from_connect_req(connect_req_ptr);
alt status {
0i32 {
0i32 => {
log(debug, ~"successful tcp connection!");
comm::send(result_ch, conn_success);
}
_ {
_ => {
log(debug, ~"error in tcp_connect_on_connect_cb");
let loop_ptr = uv::ll::get_loop_for_uv_handle(tcp_stream_ptr);
let err_data = uv::ll::get_last_err_data(loop_ptr);
@ -1357,11 +1337,8 @@ mod test {
hl_loop)
};
alt actual_resp_result.get_err() {
connection_refused {
}
_ {
fail ~"unknown error.. expected connection_refused"
}
connection_refused => (),
_ => fail ~"unknown error.. expected connection_refused"
}
}
fn impl_gl_tcp_ipv4_server_address_in_use() {
@ -1406,12 +1383,12 @@ mod test {
hl_loop)
};
alt listen_err {
address_in_use {
address_in_use => {
assert true;
}
_ {
_ => {
fail ~"expected address_in_use listen error,"+
~"but got a different error varient. check logs.";
~"but got a different error varient. check logs.";
}
}
}
@ -1425,10 +1402,10 @@ mod test {
server_port,
hl_loop);
alt listen_err {
access_denied {
access_denied => {
assert true;
}
_ {
_ => {
fail ~"expected address_in_use listen error,"+
~"but got a different error varient. check logs.";
}
@ -1539,7 +1516,7 @@ mod test {
~"connection!");
let received_req_bytes = read(sock, 0u);
alt received_req_bytes {
result::ok(data) {
result::ok(data) => {
log(debug, ~"SERVER: got REQ str::from_bytes..");
log(debug, fmt!{"SERVER: REQ data len: %?",
vec::len(data)});
@ -1550,7 +1527,7 @@ mod test {
log(debug, ~"SERVER: after write.. die");
comm::send(kill_ch, none);
}
result::err(err_data) {
result::err(err_data) => {
log(debug, fmt!{"SERVER: error recvd: %s %s",
err_data.err_name, err_data.err_msg});
comm::send(kill_ch, some(err_data));
@ -1568,14 +1545,14 @@ mod test {
// err check on listen_result
if result::is_err(listen_result) {
alt result::get_err(listen_result) {
generic_listen_err(name, msg) {
generic_listen_err(name, msg) => {
fail fmt!{"SERVER: exited abnormally name %s msg %s",
name, msg};
}
access_denied {
access_denied => {
fail ~"SERVER: exited abnormally, got access denied..";
}
address_in_use {
address_in_use => {
fail ~"SERVER: exited abnormally, got address in use...";
}
}

View file

@ -52,22 +52,22 @@ fn encode_inner(s: ~str, full_url: bool) -> ~str {
'A' to 'Z' |
'a' to 'z' |
'0' to '9' |
'-' | '.' | '_' | '~' {
'-' | '.' | '_' | '~' => {
str::push_char(out, ch);
}
_ {
if full_url {
_ => {
if full_url {
match ch {
// gen-delims:
':' | '/' | '?' | '#' | '[' | ']' | '@' |
// sub-delims:
'!' | '$' | '&' | '"' | '(' | ')' | '*' |
'+' | ',' | ';' | '=' {
'+' | ',' | ';' | '=' => {
str::push_char(out, ch);
}
_ { out += #fmt("%%%X", ch as uint); }
_ => out += #fmt("%%%X", ch as uint)
}
} else {
out += #fmt("%%%X", ch as uint);
@ -106,7 +106,7 @@ fn decode_inner(s: ~str, full_url: bool) -> ~str {
while !rdr.eof() {
match rdr.read_char() {
'%' {
'%' => {
let bytes = rdr.read_bytes(2u);
let ch = uint::parse_buf(bytes, 16u).get() as char;
@ -118,19 +118,19 @@ fn decode_inner(s: ~str, full_url: bool) -> ~str {
// sub-delims:
'!' | '$' | '&' | '"' | '(' | ')' | '*' |
'+' | ',' | ';' | '=' {
'+' | ',' | ';' | '=' => {
str::push_char(out, '%');
str::push_char(out, bytes[0u] as char);
str::push_char(out, bytes[1u] as char);
}
ch { str::push_char(out, ch); }
ch => str::push_char(out, ch)
}
} else {
str::push_char(out, ch);
}
}
ch { str::push_char(out, ch); }
ch => str::push_char(out, ch)
}
}
@ -161,11 +161,11 @@ fn encode_plus(s: ~str) -> ~str {
while !rdr.eof() {
let ch = rdr.read_byte() as char;
match ch {
'A' to 'Z' | 'a' to 'z' | '0' to '9' | '_' | '.' | '-' {
'A' to 'Z' | 'a' to 'z' | '0' to '9' | '_' | '.' | '-' => {
str::push_char(out, ch);
}
' ' { str::push_char(out, '+'); }
_ { out += #fmt("%%%X", ch as uint); }
' ' => str::push_char(out, '+'),
_ => out += #fmt("%%%X", ch as uint)
}
}
@ -212,11 +212,11 @@ fn decode_form_urlencoded(s: ~[u8]) ->
while !rdr.eof() {
match rdr.read_char() {
'&' | ';' {
'&' | ';' => {
if key != ~"" && value != ~"" {
let values = match m.find(key) {
some(values) { values }
none {
some(values) => values,
none => {
let values = @dvec();
m.insert(key, values);
values
@ -229,14 +229,14 @@ fn decode_form_urlencoded(s: ~[u8]) ->
key = ~"";
value = ~"";
}
'=' { parsing_key = false; }
ch {
'=' => parsing_key = false,
ch => {
let ch = match ch {
'%' {
'%' => {
uint::parse_buf(rdr.read_bytes(2u), 16u).get() as char
}
'+' { ' ' }
ch { ch }
'+' => ' ',
ch => ch
};
if parsing_key {
@ -250,8 +250,8 @@ fn decode_form_urlencoded(s: ~[u8]) ->
if key != ~"" && value != ~"" {
let values = match m.find(key) {
some(values) { values }
none {
some(values) => values,
none => {
let values = @dvec();
m.insert(key, values);
values
@ -333,14 +333,14 @@ fn query_to_str(query: query) -> ~str {
fn get_scheme(rawurl: ~str) -> result::result<(~str, ~str), @~str> {
for str::each_chari(rawurl) |i,c| {
match c {
'A' to 'Z' | 'a' to 'z' { again; }
'0' to '9' | '+' | '-' | '.' {
'A' to 'Z' | 'a' to 'z' => again,
'0' to '9' | '+' | '-' | '.' => {
if i == 0 {
return result::err(@~"url: Scheme must begin with a letter.");
}
again;
}
':' {
':' => {
if i == 0 {
return result::err(@~"url: Scheme cannot be empty.");
} else {
@ -348,7 +348,7 @@ fn get_scheme(rawurl: ~str) -> result::result<(~str, ~str), @~str> {
rawurl.slice(i+1,str::len(rawurl))));
}
}
_ {
_ => {
return result::err(@~"url: Invalid character in scheme.");
}
}
@ -393,34 +393,34 @@ fn get_authority(rawurl: ~str) ->
// deal with input class first
match c {
'0' to '9' { }
'A' to 'F' | 'a' to 'f' {
'0' to '9' => (),
'A' to 'F' | 'a' to 'f' => {
if in == digit {
in = hex;
}
}
'G' to 'Z' | 'g' to 'z' | '-' | '.' | '_' | '~' | '%' |
'&' |'\'' | '(' | ')' | '+' | '!' | '*' | ',' | ';' | '=' {
'&' |'\'' | '(' | ')' | '+' | '!' | '*' | ',' | ';' | '=' => {
in = unreserved;
}
':' | '@' | '?' | '#' | '/' {
':' | '@' | '?' | '#' | '/' => {
// separators, don't change anything
}
_ {
_ => {
return result::err(@~"Illegal character in authority");
}
}
// now process states
match c {
':' {
':' => {
colon_count += 1;
match st {
start {
start => {
pos = i;
st = pass_host_port;
}
pass_host_port {
pass_host_port => {
// multiple colons means ipv6 address.
if in == unreserved {
return result::err(
@ -428,7 +428,7 @@ fn get_authority(rawurl: ~str) ->
}
st = ip6_host;
}
in_host {
in_host => {
pos = i;
// can't be sure whether this is an ipv6 address or a port
if in == unreserved {
@ -436,55 +436,55 @@ fn get_authority(rawurl: ~str) ->
}
st = ip6_port;
}
ip6_port {
ip6_port => {
if in == unreserved {
return result::err(@~"Illegal characters in authority.");
}
st = ip6_host;
}
ip6_host {
ip6_host => {
if colon_count > 7 {
host = str::slice(rawurl, begin, i);
pos = i;
st = in_port;
}
}
_ {
_ => {
return result::err(@~"Invalid ':' in authority.");
}
}
in = digit; // reset input class
}
'@' {
'@' => {
in = digit; // reset input class
colon_count = 0; // reset count
match st {
start {
start => {
let user = str::slice(rawurl, begin, i);
userinfo = option::some({user : user,
pass: option::none});
st = in_host;
}
pass_host_port {
pass_host_port => {
let user = str::slice(rawurl, begin, pos);
let pass = str::slice(rawurl, pos+1, i);
userinfo = option::some({user: user,
pass: option::some(pass)});
st = in_host;
}
_ {
_ => {
return result::err(@~"Invalid '@' in authority.");
}
}
begin = i+1;
}
'?' | '#' | '/' {
'?' | '#' | '/' => {
end = i;
break;
}
_ { }
_ => ()
}
end = i;
}
@ -498,24 +498,24 @@ fn get_authority(rawurl: ~str) ->
// finish up
match st {
start {
start => {
if host_is_end_plus_one() {
host = str::slice(rawurl, begin, end+1);
} else {
host = str::slice(rawurl, begin, end);
}
}
pass_host_port | ip6_port {
pass_host_port | ip6_port => {
if in != digit {
return result::err(@~"Non-digit characters in port.");
}
host = str::slice(rawurl, begin, pos);
port = option::some(str::slice(rawurl, pos+1, end));
}
ip6_host | in_host {
ip6_host | in_host => {
host = str::slice(rawurl, begin, end);
}
in_port {
in_port => {
if in != digit {
return result::err(@~"Non-digit characters in port.");
}
@ -537,14 +537,14 @@ fn get_path(rawurl: ~str, authority : bool) ->
for str::each_chari(rawurl) |i,c| {
match c {
'A' to 'Z' | 'a' to 'z' | '0' to '9' | '&' |'\'' | '(' | ')' | '.'
| '@' | ':' | '%' | '/' | '+' | '!' | '*' | ',' | ';' | '=' {
| '@' | ':' | '%' | '/' | '+' | '!' | '*' | ',' | ';' | '=' => {
again;
}
'?' | '#' {
'?' | '#' => {
end = i;
break;
}
_ { return result::err(@~"Invalid character in path.") }
_ => return result::err(@~"Invalid character in path.")
}
}

View file

@ -134,13 +134,13 @@ fn prepend_str(rope: rope, str: @~str) -> rope {
/// Concatenate two ropes
fn append_rope(left: rope, right: rope) -> rope {
alt(left) {
node::empty { return right; }
node::content(left_content) {
node::empty => return right,
node::content(left_content) => {
alt(right) {
node::empty { return left; }
node::content(right_content) {
node::empty => return left,
node::content(right_content) => {
return node::content(node::concat2(left_content, right_content));
}
}
}
}
}
@ -198,12 +198,10 @@ Section: Keeping ropes healthy
*/
fn bal(rope:rope) -> rope {
alt(rope) {
node::empty { return rope }
node::content(x) {
alt(node::bal(x)) {
option::none { rope }
option::some(y) { node::content(y) }
}
node::empty => return rope,
node::content(x) => alt(node::bal(x)) {
option::none => rope,
option::some(y) => node::content(y)
}
}
}
@ -229,12 +227,11 @@ Section: Transforming ropes
fn sub_chars(rope: rope, char_offset: uint, char_len: uint) -> rope {
if char_len == 0u { return node::empty; }
alt(rope) {
node::empty { fail }
node::content(node) {
if char_len > node::char_len(node) { fail }
else {
return node::content(node::sub_chars(node, char_offset, char_len))
}
node::empty => fail,
node::content(node) => if char_len > node::char_len(node) {
fail
} else {
return node::content(node::sub_chars(node, char_offset, char_len))
}
}
}
@ -255,12 +252,11 @@ fn sub_chars(rope: rope, char_offset: uint, char_len: uint) -> rope {
fn sub_bytes(rope: rope, byte_offset: uint, byte_len: uint) -> rope {
if byte_len == 0u { return node::empty; }
alt(rope) {
node::empty { fail }
node::content(node) {
if byte_len > node::byte_len(node) { fail }
else {
return node::content(node::sub_bytes(node, byte_offset, byte_len))
}
node::empty => fail,
node::content(node) =>if byte_len > node::byte_len(node) {
fail
} else {
return node::content(node::sub_bytes(node, byte_offset, byte_len))
}
}
}
@ -281,10 +277,10 @@ Section: Comparing ropes
*/
fn cmp(left: rope, right: rope) -> int {
alt((left, right)) {
(node::empty, node::empty) { return 0; }
(node::empty, _) { return -1;}
(_, node::empty) { return 1;}
(node::content(a), node::content(b)) {
(node::empty, node::empty) => return 0,
(node::empty, _) => return -1,
(_, node::empty) => return 1,
(node::content(a), node::content(b)) => {
return node::cmp(a, b);
}
}
@ -384,8 +380,8 @@ Section: Iterating
*/
fn loop_chars(rope: rope, it: fn(char) -> bool) -> bool {
alt(rope) {
node::empty { return true }
node::content(x) { return node::loop_chars(x, it) }
node::empty => return true,
node::content(x) => return node::loop_chars(x, it)
}
}
@ -427,8 +423,8 @@ fn iter_chars(rope: rope, it: fn(char)) {
*/
fn loop_leaves(rope: rope, it: fn(node::leaf) -> bool) -> bool{
alt(rope) {
node::empty { return true }
node::content(x) {return node::loop_leaves(x, it)}
node::empty => return true,
node::content(x) => return node::loop_leaves(x, it)
}
}
@ -436,8 +432,8 @@ mod iterator {
mod leaf {
fn start(rope: rope) -> node::leaf_iterator::t {
alt(rope) {
node::empty { return node::leaf_iterator::empty() }
node::content(x) { return node::leaf_iterator::start(x) }
node::empty => return node::leaf_iterator::empty(),
node::content(x) => return node::leaf_iterator::start(x)
}
}
fn next(it: node::leaf_iterator::t) -> option<node::leaf> {
@ -447,8 +443,8 @@ mod iterator {
mod char {
fn start(rope: rope) -> node::char_iterator::t {
alt(rope) {
node::empty { return node::char_iterator::empty() }
node::content(x) { return node::char_iterator::start(x) }
node::empty => return node::char_iterator::empty(),
node::content(x) => return node::char_iterator::start(x)
}
}
fn next(it: node::char_iterator::t) -> option<char> {
@ -474,8 +470,8 @@ mod iterator {
*/
fn height(rope: rope) -> uint {
alt(rope) {
node::empty { return 0u; }
node::content(x) { return node::height(x); }
node::empty => return 0u,
node::content(x) => return node::height(x)
}
}
@ -490,8 +486,8 @@ fn height(rope: rope) -> uint {
*/
pure fn char_len(rope: rope) -> uint {
alt(rope) {
node::empty { return 0u; }
node::content(x) { return node::char_len(x) }
node::empty => return 0u,
node::content(x) => return node::char_len(x)
}
}
@ -504,8 +500,8 @@ pure fn char_len(rope: rope) -> uint {
*/
pure fn byte_len(rope: rope) -> uint {
alt(rope) {
node::empty { return 0u; }
node::content(x) { return node::byte_len(x) }
node::empty => return 0u,
node::content(x) => return node::byte_len(x)
}
}
@ -527,8 +523,8 @@ pure fn byte_len(rope: rope) -> uint {
*/
fn char_at(rope: rope, pos: uint) -> char {
alt(rope) {
node::empty { fail }
node::content(x) { return node::char_at(x, pos) }
node::empty => fail,
node::content(x) => return node::char_at(x, pos)
}
}
@ -735,15 +731,15 @@ mod node {
pure fn byte_len(node: @node) -> uint {
//FIXME (#2744): Could we do this without the pattern-matching?
alt(*node) {
leaf(y) { return y.byte_len; }
concat(y){ return y.byte_len; }
leaf(y) => return y.byte_len,
concat(y) => return y.byte_len
}
}
pure fn char_len(node: @node) -> uint {
alt(*node) {
leaf(y) { return y.char_len; }
concat(y) { return y.char_len; }
leaf(y) => return y.char_len,
concat(y) => return y.char_len
}
}
@ -805,8 +801,8 @@ mod node {
let it = leaf_iterator::start(node);
loop {
alt(leaf_iterator::next(it)) {
option::none { break; }
option::some(x) {
option::none => break,
option::some(x) => {
//FIXME (#2744): Replace with memcpy or something similar
let mut local_buf: ~[u8] =
unsafe::reinterpret_cast(*x.content);
@ -832,8 +828,8 @@ mod node {
*/
fn flatten(node: @node) -> @node unsafe {
alt(*node) {
leaf(_) { return node }
concat(x) {
leaf(_) => return node,
concat(x) => {
return @leaf({
byte_offset: 0u,
byte_len: x.byte_len,
@ -866,8 +862,8 @@ mod node {
let it = leaf_iterator::start(node);
loop {
alt (leaf_iterator::next(it)) {
option::none { break; }
option::some(x) { vec::push(forest, @leaf(x)); }
option::none => break,
option::some(x) => vec::push(forest, @leaf(x))
}
}
//2. Rebuild tree from forest
@ -903,7 +899,7 @@ mod node {
return node;
}
alt(*node) {
node::leaf(x) {
node::leaf(x) => {
let char_len =
str::count_chars(*x.content, byte_offset, byte_len);
return @leaf({byte_offset: byte_offset,
@ -911,7 +907,7 @@ mod node {
char_len: char_len,
content: x.content});
}
node::concat(x) {
node::concat(x) => {
let left_len: uint = node::byte_len(x.left);
if byte_offset <= left_len {
if byte_offset + byte_len <= left_len {
@ -961,7 +957,7 @@ mod node {
let mut char_offset = char_offset;
loop {
alt(*node) {
node::leaf(x) {
node::leaf(x) => {
if char_offset == 0u && char_len == x.char_len {
return node;
}
@ -974,7 +970,7 @@ mod node {
char_len: char_len,
content: x.content});
}
node::concat(x) {
node::concat(x) => {
if char_offset == 0u && char_len == x.char_len {return node;}
let left_len : uint = node::char_len(x.left);
if char_offset <= left_len {
@ -1012,8 +1008,8 @@ mod node {
fn height(node: @node) -> uint {
alt(*node) {
leaf(_) { return 0u; }
concat(x) { return x.height; }
leaf(_) => return 0u,
concat(x) => return x.height
}
}
@ -1023,16 +1019,14 @@ mod node {
let mut result = 0;
while result == 0 {
alt((char_iterator::next(ita), char_iterator::next(itb))) {
(option::none, option::none) {
break;
}
(option::some(chara), option::some(charb)) {
(option::none, option::none) => break,
(option::some(chara), option::some(charb)) => {
result = char::cmp(chara, charb);
}
(option::some(_), _) {
(option::some(_), _) => {
result = 1;
}
(_, option::some(_)) {
(_, option::some(_)) => {
result = -1;
}
}
@ -1066,15 +1060,11 @@ mod node {
let mut current = node;
loop {
alt(*current) {
leaf(x) {
return it(x);
}
concat(x) {
if loop_leaves(x.left, it) { //non tail call
current = x.right; //tail call
} else {
return false;
}
leaf(x) => return it(x),
concat(x) => if loop_leaves(x.left, it) { //non tail call
current = x.right; //tail call
} else {
return false;
}
}
};
@ -1102,10 +1092,8 @@ mod node {
let mut pos = pos;
loop {
alt *node {
leaf(x) {
return str::char_at(*x.content, pos);
}
concat({left, right, _}) {
leaf(x) => return str::char_at(*x.content, pos),
concat({left, right, _}) => {
let left_len = char_len(left);
node = if left_len > pos { left }
else { pos -= left_len; right };
@ -1139,15 +1127,13 @@ mod node {
let current = it.stack[it.stackpos];
it.stackpos -= 1;
alt(*current) {
concat(x) {
concat(x) => {
it.stackpos += 1;
it.stack[it.stackpos] = x.right;
it.stackpos += 1;
it.stack[it.stackpos] = x.left;
}
leaf(x) {
return option::some(x);
}
leaf(x) => return option::some(x)
}
};
}
@ -1179,16 +1165,12 @@ mod node {
fn next(it: t) -> option<char> {
loop {
alt(get_current_or_next_leaf(it)) {
option::none { return option::none; }
option::some(_) {
option::none => return option::none,
option::some(_) => {
let next_char = get_next_char_in_leaf(it);
alt(next_char) {
option::none {
again;
}
option::some(_) {
return next_char;
}
option::none => again,
option::some(_) => return next_char
}
}
}
@ -1197,12 +1179,12 @@ mod node {
fn get_current_or_next_leaf(it: t) -> option<leaf> {
alt(it.leaf) {
option::some(_) { return it.leaf }
option::none {
option::some(_) => return it.leaf,
option::none => {
let next = leaf_iterator::next(it.leaf_iterator);
alt(next) {
option::none { return option::none }
option::some(_) {
option::none => return option::none,
option::some(_) => {
it.leaf = next;
it.leaf_byte_pos = 0u;
return next;
@ -1214,8 +1196,8 @@ mod node {
fn get_next_char_in_leaf(it: t) -> option<char> {
alt copy it.leaf {
option::none { return option::none }
option::some(aleaf) {
option::none => return option::none,
option::some(aleaf) => {
if it.leaf_byte_pos >= aleaf.byte_len {
//We are actually past the end of the leaf
it.leaf = option::none;
@ -1239,17 +1221,17 @@ mod tests {
//Utility function, used for sanity check
fn rope_to_string(r: rope) -> ~str {
alt(r) {
node::empty { return ~"" }
node::content(x) {
node::empty => return ~"",
node::content(x) => {
let str = @mut ~"";
fn aux(str: @mut ~str, node: @node::node) unsafe {
alt(*node) {
node::leaf(x) {
node::leaf(x) => {
*str += str::slice(
*x.content, x.byte_offset,
x.byte_offset + x.byte_len);
}
node::concat(x) {
node::concat(x) => {
aux(str, x.left);
aux(str, x.right);
}
@ -1293,11 +1275,11 @@ mod tests {
let mut equal = true;
while equal {
alt(node::char_iterator::next(rope_iter)) {
option::none {
option::none => {
if string_iter < string_len {
equal = false;
} break; }
option::some(c) {
option::some(c) => {
let {ch, next} = str::char_range_at(*sample, string_iter);
string_iter = next;
if ch != c { equal = false; break; }
@ -1320,8 +1302,8 @@ mod tests {
let it = iterator::char::start(r);
loop {
alt(node::char_iterator::next(it)) {
option::none { break; }
option::some(_) { len += 1u; }
option::none => break,
option::some(_) => len += 1u
}
}

View file

@ -244,16 +244,12 @@ fn deserialize_bool<D: deserializer>(d: D) -> bool {
fn serialize_option<S: serializer,T>(s: S, v: option<T>, st: fn(T)) {
do s.emit_enum(~"option") {
alt v {
none {
do s.emit_enum_variant(~"none", 0u, 0u) {
}
none => do s.emit_enum_variant(~"none", 0u, 0u) {
}
some(v) {
do s.emit_enum_variant(~"some", 1u, 1u) {
do s.emit_enum_variant_arg(0u) {
st(v)
}
some(v) => do s.emit_enum_variant(~"some", 1u, 1u) {
do s.emit_enum_variant_arg(0u) {
st(v)
}
}
}
@ -265,14 +261,8 @@ fn deserialize_option<D: deserializer,T: copy>(d: D, st: fn() -> T)
do d.read_enum(~"option") {
do d.read_enum_variant |i| {
alt check i {
0u { // none
none
}
1u { // some(v)
some(d.read_enum_variant_arg(0u, || {
st()
}))
}
0u => none,
1u => some(d.read_enum_variant_arg(0u, || st() ))
}
}
}

View file

@ -49,8 +49,11 @@ pure fn find<T: copy>(self: smallintmap<T>, key: uint) -> option<T> {
*/
pure fn get<T: copy>(self: smallintmap<T>, key: uint) -> T {
alt find(self, key) {
none { error!{"smallintmap::get(): key not present"}; fail; }
some(v) { return v; }
none => {
error!{"smallintmap::get(): key not present"};
fail;
}
some(v) => return v
}
}
@ -64,7 +67,10 @@ impl <V: copy> of map::map<uint, V> for smallintmap<V> {
fn size() -> uint {
let mut sz = 0u;
for self.v.each |item| {
alt item { some(_) { sz += 1u; } _ {} }
alt item {
some(_) => sz += 1u,
_ => ()
}
}
sz
}
@ -98,10 +104,8 @@ impl <V: copy> of map::map<uint, V> for smallintmap<V> {
let mut idx = 0u, l = self.v.len();
while idx < l {
alt self.v.get_elt(idx) {
some(elt) {
if !it(idx, elt) { break; }
}
none { }
some(elt) => if !it(idx, elt) { break }
none => ()
}
idx += 1u;
}
@ -116,10 +120,8 @@ impl <V: copy> of map::map<uint, V> for smallintmap<V> {
let mut idx = 0u, l = self.v.len();
while idx < l {
alt self.v.get_elt(idx) {
some(elt) {
if !it(&idx, &elt) { break; }
}
none { }
some(elt) => if !it(&idx, &elt) { break }
none => ()
}
idx += 1u;
}

View file

@ -22,10 +22,10 @@ fn mkdtemp(prefix: ~str, suffix: ~str) -> option<~str> {
fn test_mkdtemp() {
let r = mkdtemp(~"./", ~"foobar");
alt r {
some(p) {
some(p) => {
os::remove_dir(p);
assert(str::ends_with(p, ~"foobar"));
}
_ { assert(false); }
_ => assert(false)
}
}

View file

@ -36,13 +36,13 @@ fn color_supported() -> bool {
let supported_terms = ~[~"xterm-color", ~"xterm",
~"screen-bce", ~"xterm-256color"];
return alt os::getenv(~"TERM") {
option::some(env) {
option::some(env) => {
for vec::each(supported_terms) |term| {
if term == env { return true; }
}
false
}
option::none { false }
option::none => false
};
}

View file

@ -53,8 +53,8 @@ type test_desc = {
fn test_main(args: ~[~str], tests: ~[test_desc]) {
let opts =
alt parse_opts(args) {
either::left(o) { o }
either::right(m) { fail m }
either::left(o) => o,
either::right(m) => fail m
};
if !run_tests_console(opts, tests) { fail ~"Some tests failed"; }
}
@ -70,8 +70,8 @@ fn parse_opts(args: ~[~str]) -> opt_res {
let opts = ~[getopts::optflag(~"ignored"), getopts::optopt(~"logfile")];
let matches =
alt getopts::getopts(args_, opts) {
ok(m) { m }
err(f) { return either::right(getopts::fail_str(f)) }
ok(m) => m,
err(f) => return either::right(getopts::fail_str(f))
};
let filter =
@ -106,32 +106,30 @@ fn run_tests_console(opts: test_opts,
fn callback(event: testevent, st: console_test_state) {
alt event {
te_filtered(filtered_tests) {
te_filtered(filtered_tests) => {
st.total = vec::len(filtered_tests);
let noun = if st.total != 1u { ~"tests" } else { ~"test" };
st.out.write_line(fmt!{"\nrunning %u %s", st.total, noun});
}
te_wait(test) { st.out.write_str(fmt!{"test %s ... ", test.name}); }
te_result(test, result) {
te_wait(test) => st.out.write_str(fmt!{"test %s ... ", test.name}),
te_result(test, result) => {
alt st.log_out {
some(f) {
write_log(f, result, test);
}
none {}
some(f) => write_log(f, result, test),
none => ()
}
alt result {
tr_ok {
tr_ok => {
st.passed += 1u;
write_ok(st.out, st.use_color);
st.out.write_line(~"");
}
tr_failed {
tr_failed => {
st.failed += 1u;
write_failed(st.out, st.use_color);
st.out.write_line(~"");
vec::push(st.failures, copy test);
}
tr_ignored {
tr_ignored => {
st.ignored += 1u;
write_ignored(st.out, st.use_color);
st.out.write_line(~"");
@ -142,15 +140,13 @@ fn run_tests_console(opts: test_opts,
}
let log_out = alt opts.logfile {
some(path) {
alt io::file_writer(path, ~[io::create, io::truncate]) {
result::ok(w) { some(w) }
result::err(s) {
fail(fmt!{"can't open output file: %s", s})
}
}
some(path) => alt io::file_writer(path, ~[io::create, io::truncate]) {
result::ok(w) => some(w),
result::err(s) => {
fail(fmt!{"can't open output file: %s", s})
}
}
none { none }
none => none
};
let st =
@ -185,9 +181,9 @@ fn run_tests_console(opts: test_opts,
fn write_log(out: io::writer, result: test_result, test: test_desc) {
out.write_line(fmt!{"%s %s",
alt result {
tr_ok { ~"ok" }
tr_failed { ~"failed" }
tr_ignored { ~"ignored" }
tr_ok => ~"ok",
tr_failed => ~"failed",
tr_ignored => ~"ignored"
}, test.name});
}
@ -339,8 +335,8 @@ fn filter_tests(opts: test_opts,
} else {
let filter_str =
alt opts.filter {
option::some(f) { f }
option::none { ~"" }
option::some(f) => f,
option::none => ~""
};
fn filter_fn(test: test_desc, filter_str: ~str) ->
@ -483,16 +479,20 @@ mod tests {
#[test]
fn first_free_arg_should_be_a_filter() {
let args = ~[~"progname", ~"filter"];
let opts = alt parse_opts(args) { either::left(o) { o }
_ { fail ~"Malformed arg in first_free_arg_should_be_a_filter"; } };
let opts = alt parse_opts(args) {
either::left(o) => o,
_ => fail ~"Malformed arg in first_free_arg_should_be_a_filter"
};
assert ~"filter" == option::get(opts.filter);
}
#[test]
fn parse_ignored_flag() {
let args = ~[~"progname", ~"filter", ~"--ignored"];
let opts = alt parse_opts(args) { either::left(o) { o }
_ { fail ~"Malformed arg in parse_ignored_flag"; } };
let opts = alt parse_opts(args) {
either::left(o) => o,
_ => fail ~"Malformed arg in parse_ignored_flag"
};
assert (opts.run_ignored);
}

View file

@ -182,11 +182,11 @@ fn strptime(s: ~str, format: ~str) -> result<tm, ~str> {
pos = next;
alt ch {
'0' to '9' {
'0' to '9' => {
value = value * 10_i32 + (ch as i32 - '0' as i32);
}
' ' if ws { }
_ { return none; }
' ' if ws => (),
_ => return none
}
i += 1u;
}
@ -209,83 +209,73 @@ fn strptime(s: ~str, format: ~str) -> result<tm, ~str> {
fn parse_type(s: ~str, pos: uint, ch: char, tm: tm_mut)
-> result<uint, ~str> {
alt ch {
'A' {
alt match_strs(s, pos, ~[
(~"Sunday", 0_i32),
(~"Monday", 1_i32),
(~"Tuesday", 2_i32),
(~"Wednesday", 3_i32),
(~"Thursday", 4_i32),
(~"Friday", 5_i32),
(~"Saturday", 6_i32)
]) {
some(item) { let (v, pos) = item; tm.tm_wday = v; ok(pos) }
none { err(~"Invalid day") }
}
'A' => alt match_strs(s, pos, ~[
(~"Sunday", 0_i32),
(~"Monday", 1_i32),
(~"Tuesday", 2_i32),
(~"Wednesday", 3_i32),
(~"Thursday", 4_i32),
(~"Friday", 5_i32),
(~"Saturday", 6_i32)
]) {
some(item) => { let (v, pos) = item; tm.tm_wday = v; ok(pos) }
none => err(~"Invalid day")
}
'a' {
alt match_strs(s, pos, ~[
(~"Sun", 0_i32),
(~"Mon", 1_i32),
(~"Tue", 2_i32),
(~"Wed", 3_i32),
(~"Thu", 4_i32),
(~"Fri", 5_i32),
(~"Sat", 6_i32)
]) {
some(item) { let (v, pos) = item; tm.tm_wday = v; ok(pos) }
none { err(~"Invalid day") }
}
'a' => alt match_strs(s, pos, ~[
(~"Sun", 0_i32),
(~"Mon", 1_i32),
(~"Tue", 2_i32),
(~"Wed", 3_i32),
(~"Thu", 4_i32),
(~"Fri", 5_i32),
(~"Sat", 6_i32)
]) {
some(item) => { let (v, pos) = item; tm.tm_wday = v; ok(pos) }
none => err(~"Invalid day")
}
'B' {
alt match_strs(s, pos, ~[
(~"January", 0_i32),
(~"February", 1_i32),
(~"March", 2_i32),
(~"April", 3_i32),
(~"May", 4_i32),
(~"June", 5_i32),
(~"July", 6_i32),
(~"August", 7_i32),
(~"September", 8_i32),
(~"October", 9_i32),
(~"November", 10_i32),
(~"December", 11_i32)
]) {
some(item) { let (v, pos) = item; tm.tm_mon = v; ok(pos) }
none { err(~"Invalid month") }
}
'B' => alt match_strs(s, pos, ~[
(~"January", 0_i32),
(~"February", 1_i32),
(~"March", 2_i32),
(~"April", 3_i32),
(~"May", 4_i32),
(~"June", 5_i32),
(~"July", 6_i32),
(~"August", 7_i32),
(~"September", 8_i32),
(~"October", 9_i32),
(~"November", 10_i32),
(~"December", 11_i32)
]) {
some(item) => { let (v, pos) = item; tm.tm_mon = v; ok(pos) }
none => err(~"Invalid month")
}
'b' | 'h' {
alt match_strs(s, pos, ~[
(~"Jan", 0_i32),
(~"Feb", 1_i32),
(~"Mar", 2_i32),
(~"Apr", 3_i32),
(~"May", 4_i32),
(~"Jun", 5_i32),
(~"Jul", 6_i32),
(~"Aug", 7_i32),
(~"Sep", 8_i32),
(~"Oct", 9_i32),
(~"Nov", 10_i32),
(~"Dec", 11_i32)
]) {
some(item) { let (v, pos) = item; tm.tm_mon = v; ok(pos) }
none { err(~"Invalid month") }
}
'b' | 'h' => alt match_strs(s, pos, ~[
(~"Jan", 0_i32),
(~"Feb", 1_i32),
(~"Mar", 2_i32),
(~"Apr", 3_i32),
(~"May", 4_i32),
(~"Jun", 5_i32),
(~"Jul", 6_i32),
(~"Aug", 7_i32),
(~"Sep", 8_i32),
(~"Oct", 9_i32),
(~"Nov", 10_i32),
(~"Dec", 11_i32)
]) {
some(item) => { let (v, pos) = item; tm.tm_mon = v; ok(pos) }
none => err(~"Invalid month")
}
'C' {
alt match_digits(s, pos, 2u, false) {
some(item) {
'C' => alt match_digits(s, pos, 2u, false) {
some(item) => {
let (v, pos) = item;
tm.tm_year += (v * 100_i32) - 1900_i32;
ok(pos)
tm.tm_year += (v * 100_i32) - 1900_i32;
ok(pos)
}
none { err(~"Invalid year") }
}
none => err(~"Invalid year")
}
'c' {
'c' => {
parse_type(s, pos, 'a', tm)
.chain(|pos| parse_char(s, pos, ' '))
.chain(|pos| parse_type(s, pos, 'b', tm))
@ -296,116 +286,108 @@ fn strptime(s: ~str, format: ~str) -> result<tm, ~str> {
.chain(|pos| parse_char(s, pos, ' '))
.chain(|pos| parse_type(s, pos, 'Y', tm))
}
'D' | 'x' {
'D' | 'x' => {
parse_type(s, pos, 'm', tm)
.chain(|pos| parse_char(s, pos, '/'))
.chain(|pos| parse_type(s, pos, 'd', tm))
.chain(|pos| parse_char(s, pos, '/'))
.chain(|pos| parse_type(s, pos, 'y', tm))
}
'd' {
alt match_digits(s, pos, 2u, false) {
some(item) { let (v, pos) = item; tm.tm_mday = v; ok(pos) }
none { err(~"Invalid day of the month") }
}
'd' => alt match_digits(s, pos, 2u, false) {
some(item) => { let (v, pos) = item; tm.tm_mday = v; ok(pos) }
none => err(~"Invalid day of the month")
}
'e' {
alt match_digits(s, pos, 2u, true) {
some(item) { let (v, pos) = item; tm.tm_mday = v; ok(pos) }
none { err(~"Invalid day of the month") }
}
'e' => alt match_digits(s, pos, 2u, true) {
some(item) => { let (v, pos) = item; tm.tm_mday = v; ok(pos) }
none => err(~"Invalid day of the month")
}
'F' {
'F' => {
parse_type(s, pos, 'Y', tm)
.chain(|pos| parse_char(s, pos, '-'))
.chain(|pos| parse_type(s, pos, 'm', tm))
.chain(|pos| parse_char(s, pos, '-'))
.chain(|pos| parse_type(s, pos, 'd', tm))
}
'H' {
'H' => {
// FIXME (#2350): range check.
alt match_digits(s, pos, 2u, false) {
some(item) { let (v, pos) = item; tm.tm_hour = v; ok(pos) }
none { err(~"Invalid hour") }
some(item) => { let (v, pos) = item; tm.tm_hour = v; ok(pos) }
none => err(~"Invalid hour")
}
}
'I' {
'I' => {
// FIXME (#2350): range check.
alt match_digits(s, pos, 2u, false) {
some(item) {
some(item) => {
let (v, pos) = item;
tm.tm_hour = if v == 12_i32 { 0_i32 } else { v };
ok(pos)
}
none { err(~"Invalid hour") }
none => err(~"Invalid hour")
}
}
'j' {
'j' => {
// FIXME (#2350): range check.
alt match_digits(s, pos, 3u, false) {
some(item) {
some(item) => {
let (v, pos) = item;
tm.tm_yday = v - 1_i32;
ok(pos)
}
none { err(~"Invalid year") }
none => err(~"Invalid year")
}
}
'k' {
'k' => {
// FIXME (#2350): range check.
alt match_digits(s, pos, 2u, true) {
some(item) { let (v, pos) = item; tm.tm_hour = v; ok(pos) }
none { err(~"Invalid hour") }
some(item) => { let (v, pos) = item; tm.tm_hour = v; ok(pos) }
none => err(~"Invalid hour")
}
}
'l' {
'l' => {
// FIXME (#2350): range check.
alt match_digits(s, pos, 2u, true) {
some(item) {
some(item) => {
let (v, pos) = item;
tm.tm_hour = if v == 12_i32 { 0_i32 } else { v };
ok(pos)
}
none { err(~"Invalid hour") }
none => err(~"Invalid hour")
}
}
'M' {
'M' => {
// FIXME (#2350): range check.
alt match_digits(s, pos, 2u, false) {
some(item) { let (v, pos) = item; tm.tm_min = v; ok(pos) }
none { err(~"Invalid minute") }
some(item) => { let (v, pos) = item; tm.tm_min = v; ok(pos) }
none => err(~"Invalid minute")
}
}
'm' {
'm' => {
// FIXME (#2350): range check.
alt match_digits(s, pos, 2u, false) {
some(item) {
some(item) => {
let (v, pos) = item;
tm.tm_mon = v - 1_i32;
ok(pos)
}
none { err(~"Invalid month") }
none => err(~"Invalid month")
}
}
'n' { parse_char(s, pos, '\n') }
'P' {
alt match_strs(s, pos, ~[(~"am", 0_i32), (~"pm", 12_i32)]) {
some(item) { let (v, pos) = item; tm.tm_hour += v; ok(pos) }
none { err(~"Invalid hour") }
}
'n' => parse_char(s, pos, '\n'),
'P' => alt match_strs(s, pos, ~[(~"am", 0_i32), (~"pm", 12_i32)]) {
some(item) => { let (v, pos) = item; tm.tm_hour += v; ok(pos) }
none => err(~"Invalid hour")
}
'p' {
alt match_strs(s, pos, ~[(~"AM", 0_i32), (~"PM", 12_i32)]) {
some(item) { let (v, pos) = item; tm.tm_hour += v; ok(pos) }
none { err(~"Invalid hour") }
}
'p' => alt match_strs(s, pos, ~[(~"AM", 0_i32), (~"PM", 12_i32)]) {
some(item) => { let (v, pos) = item; tm.tm_hour += v; ok(pos) }
none => err(~"Invalid hour")
}
'R' {
'R' => {
parse_type(s, pos, 'H', tm)
.chain(|pos| parse_char(s, pos, ':'))
.chain(|pos| parse_type(s, pos, 'M', tm))
}
'r' {
'r' => {
parse_type(s, pos, 'I', tm)
.chain(|pos| parse_char(s, pos, ':'))
.chain(|pos| parse_type(s, pos, 'M', tm))
@ -414,38 +396,38 @@ fn strptime(s: ~str, format: ~str) -> result<tm, ~str> {
.chain(|pos| parse_char(s, pos, ' '))
.chain(|pos| parse_type(s, pos, 'p', tm))
}
'S' {
'S' => {
// FIXME (#2350): range check.
alt match_digits(s, pos, 2u, false) {
some(item) {
some(item) => {
let (v, pos) = item;
tm.tm_sec = v;
ok(pos)
}
none { err(~"Invalid second") }
none => err(~"Invalid second")
}
}
//'s' {}
'T' | 'X' {
'T' | 'X' => {
parse_type(s, pos, 'H', tm)
.chain(|pos| parse_char(s, pos, ':'))
.chain(|pos| parse_type(s, pos, 'M', tm))
.chain(|pos| parse_char(s, pos, ':'))
.chain(|pos| parse_type(s, pos, 'S', tm))
}
't' { parse_char(s, pos, '\t') }
'u' {
't' => parse_char(s, pos, '\t'),
'u' => {
// FIXME (#2350): range check.
alt match_digits(s, pos, 1u, false) {
some(item) {
some(item) => {
let (v, pos) = item;
tm.tm_wday = v;
ok(pos)
}
none { err(~"Invalid weekday") }
none => err(~"Invalid weekday")
}
}
'v' {
'v' => {
parse_type(s, pos, 'e', tm)
.chain(|pos| parse_char(s, pos, '-'))
.chain(|pos| parse_type(s, pos, 'b', tm))
@ -453,38 +435,38 @@ fn strptime(s: ~str, format: ~str) -> result<tm, ~str> {
.chain(|pos| parse_type(s, pos, 'Y', tm))
}
//'W' {}
'w' {
'w' => {
// FIXME (#2350): range check.
alt match_digits(s, pos, 1u, false) {
some(item) { let (v, pos) = item; tm.tm_wday = v; ok(pos) }
none { err(~"Invalid weekday") }
some(item) => { let (v, pos) = item; tm.tm_wday = v; ok(pos) }
none => err(~"Invalid weekday")
}
}
//'X' {}
//'x' {}
'Y' {
'Y' => {
// FIXME (#2350): range check.
alt match_digits(s, pos, 4u, false) {
some(item) {
some(item) => {
let (v, pos) = item;
tm.tm_year = v - 1900_i32;
ok(pos)
}
none { err(~"Invalid weekday") }
none => err(~"Invalid weekday")
}
}
'y' {
'y' => {
// FIXME (#2350): range check.
alt match_digits(s, pos, 2u, false) {
some(item) {
some(item) => {
let (v, pos) = item;
tm.tm_year = v - 1900_i32;
ok(pos)
}
none { err(~"Invalid weekday") }
none => err(~"Invalid weekday")
}
}
'Z' {
'Z' => {
if match_str(s, pos, ~"UTC") || match_str(s, pos, ~"GMT") {
tm.tm_gmtoff = 0_i32;
tm.tm_zone = ~"UTC";
@ -503,12 +485,12 @@ fn strptime(s: ~str, format: ~str) -> result<tm, ~str> {
ok(pos)
}
}
'z' {
'z' => {
let {ch, next} = str::char_range_at(s, pos);
if ch == '+' || ch == '-' {
alt match_digits(s, next, 4u, false) {
some(item) {
some(item) => {
let (v, pos) = item;
if v == 0_i32 {
tm.tm_gmtoff = 0_i32;
@ -517,14 +499,14 @@ fn strptime(s: ~str, format: ~str) -> result<tm, ~str> {
ok(pos)
}
none { err(~"Invalid zone offset") }
none => err(~"Invalid zone offset")
}
} else {
err(~"Invalid zone offset")
}
}
'%' { parse_char(s, pos, '%') }
ch {
'%' => parse_char(s, pos, '%'),
ch => {
err(fmt!{"unknown formatting type: %?", str::from_char(ch)})
}
}
@ -553,13 +535,11 @@ fn strptime(s: ~str, format: ~str) -> result<tm, ~str> {
let {ch, next} = str::char_range_at(s, pos);
alt rdr.read_char() {
'%' {
alt parse_type(s, pos, rdr.read_char(), tm) {
ok(next) { pos = next; }
err(e) { result = err(e); break; }
}
'%' => alt parse_type(s, pos, rdr.read_char(), tm) {
ok(next) => pos = next,
err(e) => { result = err(e); break; }
}
c {
c => {
if c != ch { break }
pos = next;
}
@ -589,62 +569,54 @@ fn strftime(format: ~str, tm: tm) -> ~str {
fn parse_type(ch: char, tm: tm) -> ~str {
//FIXME (#2350): Implement missing types.
alt check ch {
'A' {
alt check tm.tm_wday as int {
0 { ~"Sunday" }
1 { ~"Monday" }
2 { ~"Tuesday" }
3 { ~"Wednesday" }
4 { ~"Thursday" }
5 { ~"Friday" }
6 { ~"Saturday" }
}
'A' => alt check tm.tm_wday as int {
0 => ~"Sunday",
1 => ~"Monday",
2 => ~"Tuesday",
3 => ~"Wednesday",
4 => ~"Thursday",
5 => ~"Friday",
6 => ~"Saturday"
}
'a' {
alt check tm.tm_wday as int {
0 { ~"Sun" }
1 { ~"Mon" }
2 { ~"Tue" }
3 { ~"Wed" }
4 { ~"Thu" }
5 { ~"Fri" }
6 { ~"Sat" }
}
'a' => alt check tm.tm_wday as int {
0 => ~"Sun",
1 => ~"Mon",
2 => ~"Tue",
3 => ~"Wed",
4 => ~"Thu",
5 => ~"Fri",
6 => ~"Sat"
}
'B' {
alt check tm.tm_mon as int {
0 { ~"January" }
1 { ~"February" }
2 { ~"March" }
3 { ~"April" }
4 { ~"May" }
5 { ~"June" }
6 { ~"July" }
7 { ~"August" }
8 { ~"September" }
9 { ~"October" }
10 { ~"November" }
11 { ~"December" }
}
'B' => alt check tm.tm_mon as int {
0 => ~"January",
1 => ~"February",
2 => ~"March",
3 => ~"April",
4 => ~"May",
5 => ~"June",
6 => ~"July",
7 => ~"August",
8 => ~"September",
9 => ~"October",
10 => ~"November",
11 => ~"December"
}
'b' | 'h' {
alt check tm.tm_mon as int {
0 { ~"Jan" }
1 { ~"Feb" }
2 { ~"Mar" }
3 { ~"Apr" }
4 { ~"May" }
5 { ~"Jun" }
6 { ~"Jul" }
7 { ~"Aug" }
8 { ~"Sep" }
9 { ~"Oct" }
10 { ~"Nov" }
11 { ~"Dec" }
}
'b' | 'h' => alt check tm.tm_mon as int {
0 => ~"Jan",
1 => ~"Feb",
2 => ~"Mar",
3 => ~"Apr",
4 => ~"May",
5 => ~"Jun",
6 => ~"Jul",
7 => ~"Aug",
8 => ~"Sep",
9 => ~"Oct",
10 => ~"Nov",
11 => ~"Dec",
}
'C' { fmt!{"%02d", (tm.tm_year as int + 1900) / 100} }
'c' {
'C' => fmt!{"%02d", (tm.tm_year as int + 1900) / 100},
'c' => {
fmt!{"%s %s %s %s %s",
parse_type('a', tm),
parse_type('b', tm),
@ -652,15 +624,15 @@ fn strftime(format: ~str, tm: tm) -> ~str {
parse_type('T', tm),
parse_type('Y', tm)}
}
'D' | 'x' {
'D' | 'x' => {
fmt!{"%s/%s/%s",
parse_type('m', tm),
parse_type('d', tm),
parse_type('y', tm)}
}
'd' { fmt!{"%02d", tm.tm_mday as int} }
'e' { fmt!{"%2d", tm.tm_mday as int} }
'F' {
'd' => fmt!{"%02d", tm.tm_mday as int},
'e' => fmt!{"%2d", tm.tm_mday as int},
'F' => {
fmt!{"%s-%s-%s",
parse_type('Y', tm),
parse_type('m', tm),
@ -668,67 +640,67 @@ fn strftime(format: ~str, tm: tm) -> ~str {
}
//'G' {}
//'g' {}
'H' { fmt!{"%02d", tm.tm_hour as int} }
'I' {
'H' => fmt!{"%02d", tm.tm_hour as int},
'I' => {
let mut h = tm.tm_hour as int;
if h == 0 { h = 12 }
if h > 12 { h -= 12 }
fmt!{"%02d", h}
}
'j' { fmt!{"%03d", tm.tm_yday as int + 1} }
'k' { fmt!{"%2d", tm.tm_hour as int} }
'l' {
'j' => fmt!{"%03d", tm.tm_yday as int + 1},
'k' => fmt!{"%2d", tm.tm_hour as int},
'l' => {
let mut h = tm.tm_hour as int;
if h == 0 { h = 12 }
if h > 12 { h -= 12 }
fmt!{"%2d", h}
}
'M' { fmt!{"%02d", tm.tm_min as int} }
'm' { fmt!{"%02d", tm.tm_mon as int + 1} }
'n' { ~"\n" }
'P' { if tm.tm_hour as int < 12 { ~"am" } else { ~"pm" } }
'p' { if tm.tm_hour as int < 12 { ~"AM" } else { ~"PM" } }
'R' {
'M' => fmt!{"%02d", tm.tm_min as int},
'm' => fmt!{"%02d", tm.tm_mon as int + 1},
'n' => ~"\n",
'P' => if tm.tm_hour as int < 12 { ~"am" } else { ~"pm" }
'p' => if tm.tm_hour as int < 12 { ~"AM" } else { ~"PM" }
'R' => {
fmt!{"%s:%s",
parse_type('H', tm),
parse_type('M', tm)}
}
'r' {
'r' => {
fmt!{"%s:%s:%s %s",
parse_type('I', tm),
parse_type('M', tm),
parse_type('S', tm),
parse_type('p', tm)}
}
'S' { fmt!{"%02d", tm.tm_sec as int} }
's' { fmt!{"%d", tm.to_timespec().sec as int} }
'T' | 'X' {
'S' => fmt!{"%02d", tm.tm_sec as int},
's' => fmt!{"%d", tm.to_timespec().sec as int},
'T' | 'X' => {
fmt!{"%s:%s:%s",
parse_type('H', tm),
parse_type('M', tm),
parse_type('S', tm)}
}
't' { ~"\t" }
't' => ~"\t",
//'U' {}
'u' {
'u' => {
let i = tm.tm_wday as int;
int::str(if i == 0 { 7 } else { i })
}
//'V' {}
'v' {
'v' => {
fmt!{"%s-%s-%s",
parse_type('e', tm),
parse_type('b', tm),
parse_type('Y', tm)}
}
//'W' {}
'w' { int::str(tm.tm_wday as int) }
'w' => int::str(tm.tm_wday as int),
//'X' {}
//'x' {}
'Y' { int::str(tm.tm_year as int + 1900) }
'y' { fmt!{"%02d", (tm.tm_year as int + 1900) % 100} }
'Z' { tm.tm_zone }
'z' {
'Y' => int::str(tm.tm_year as int + 1900),
'y' => fmt!{"%02d", (tm.tm_year as int + 1900) % 100},
'Z' => tm.tm_zone,
'z' => {
let sign = if tm.tm_gmtoff > 0_i32 { '+' } else { '-' };
let mut m = i32::abs(tm.tm_gmtoff) / 60_i32;
let h = m / 60_i32;
@ -736,7 +708,7 @@ fn strftime(format: ~str, tm: tm) -> ~str {
fmt!{"%c%02d%02d", sign, h as int, m as int}
}
//'+' {}
'%' { ~"%" }
'%' => ~"%"
}
}
@ -745,8 +717,8 @@ fn strftime(format: ~str, tm: tm) -> ~str {
do io::with_str_reader(format) |rdr| {
while !rdr.eof() {
alt rdr.read_char() {
'%' { buf += parse_type(rdr.read_char(), tm); }
ch { str::push_char(buf, ch); }
'%' => buf += parse_type(rdr.read_char(), tm),
ch => str::push_char(buf, ch)
}
}
}
@ -961,7 +933,7 @@ mod tests {
tzset();
alt strptime(~"", ~"") {
ok(tm) {
ok(tm) => {
assert tm.tm_sec == 0_i32;
assert tm.tm_min == 0_i32;
assert tm.tm_hour == 0_i32;
@ -974,7 +946,7 @@ mod tests {
assert tm.tm_zone == ~"";
assert tm.tm_nsec == 0_i32;
}
err(_) {}
err(_) => ()
}
let format = ~"%a %b %e %T %Y";
@ -983,8 +955,8 @@ mod tests {
== err(~"Invalid time");
alt strptime(~"Fri Feb 13 15:31:30 2009", format) {
err(e) { fail e }
ok(tm) {
err(e) => fail e,
ok(tm) => {
assert tm.tm_sec == 30_i32;
assert tm.tm_min == 31_i32;
assert tm.tm_hour == 15_i32;
@ -1002,8 +974,8 @@ mod tests {
fn test(s: ~str, format: ~str) -> bool {
alt strptime(s, format) {
ok(tm) { tm.strftime(format) == s }
err(e) { fail e }
ok(tm) => tm.strftime(format) == s,
err(e) => fail e
}
}

View file

@ -216,8 +216,11 @@ mod test {
};
alt recv_timeout(hl_loop, 10u, test_po) {
some(val) { assert val == expected; successes += 1; }
_ { failures += 1; }
some(val) => {
assert val == expected;
successes += 1;
}
_ => failures += 1
};
}
@ -241,8 +244,8 @@ mod test {
};
alt recv_timeout(hl_loop, 1u, test_po) {
none { successes += 1; }
_ { failures += 1; }
none => successes += 1,
_ => failures += 1
};
}

View file

@ -31,14 +31,14 @@ fn treemap<K, V>() -> treemap<K, V> { @mut none }
/// Insert a value into the map
fn insert<K: copy, V: copy>(m: &mut tree_edge<K, V>, k: K, v: V) {
alt copy *m {
none {
none => {
*m = some(@tree_node({key: k,
mut value: v,
mut left: none,
mut right: none}));
return;
}
some(node) {
some(node) => {
if k == node.key {
node.value = v;
} else if k < node.key {
@ -53,10 +53,10 @@ fn insert<K: copy, V: copy>(m: &mut tree_edge<K, V>, k: K, v: V) {
/// Find a value based on the key
fn find<K: copy, V: copy>(m: &const tree_edge<K, V>, k: K) -> option<V> {
alt copy *m {
none { none }
none => none,
// FIXME (#2808): was that an optimization?
some(node) {
some(node) => {
if k == node.key {
some(node.value)
} else if k < node.key {
@ -71,8 +71,8 @@ fn find<K: copy, V: copy>(m: &const tree_edge<K, V>, k: K) -> option<V> {
/// Visit all pairs in the map in order.
fn traverse<K, V: copy>(m: &const tree_edge<K, V>, f: fn(K, V)) {
alt copy *m {
none { }
some(node) {
none => (),
some(node) => {
traverse(&const node.left, f);
// copy of value is req'd as f() requires an immutable ptr
f(node.key, copy node.value);

View file

@ -57,14 +57,14 @@ fn get_monitor_task_gl() -> iotask unsafe {
loop {
debug!{"in outer_loop..."};
alt select2(weak_exit_po, msg_po) {
left(weak_exit) {
left(weak_exit) => {
// all normal tasks have ended, tell the
// libuv loop to tear_down, then exit
debug!{"weak_exit_po recv'd msg: %?", weak_exit};
iotask::exit(hl_loop);
break;
}
right(fetch_ch) {
right(fetch_ch) => {
debug!{"hl_loop req recv'd: %?", fetch_ch};
fetch_ch.send(hl_loop);
}

View file

@ -145,12 +145,8 @@ extern fn wake_up_cb(async_handle: *ll::uv_async_t,
while msg_po.peek() {
alt msg_po.recv() {
interaction(cb) {
cb(loop_ptr);
}
teardown_loop {
begin_teardown(data);
}
interaction(cb) => cb(loop_ptr),
teardown_loop => begin_teardown(data)
}
}
}

View file

@ -849,12 +849,8 @@ unsafe fn ip6_name(src: &sockaddr_in6) -> ~str {
let result = rustrt::rust_uv_ip6_name(src_unsafe_ptr,
dst_buf, size as libc::size_t);
alt result {
0i32 {
str::unsafe::from_buf(dst_buf)
}
_ {
~""
}
0i32 => str::unsafe::from_buf(dst_buf),
_ => ~""
}
}
}

View file

@ -194,8 +194,8 @@ enum vstore {
pure fn is_blockish(p: ast::proto) -> bool {
alt p {
proto_block { true }
proto_bare | proto_uniq | proto_box { false }
proto_block => true,
proto_bare | proto_uniq | proto_box => false
}
}

View file

@ -13,8 +13,8 @@ type path = ~[path_elt];
fn path_to_str_with_sep(p: path, sep: ~str) -> ~str {
let strs = do vec::map(p) |e| {
alt e {
path_mod(s) { /* FIXME (#2543) */ copy *s }
path_name(s) { /* FIXME (#2543) */ copy *s }
path_mod(s) => /* FIXME (#2543) */ copy *s,
path_name(s) => /* FIXME (#2543) */ copy *s
}
};
str::connect(strs, sep)
@ -105,12 +105,12 @@ fn map_decoded_item(diag: span_handler,
// don't decode and instantiate the impl, but just the method, we have to
// add it to the table now:
alt ii {
ii_item(*) | ii_ctor(*) | ii_dtor(*) { /* fallthrough */ }
ii_foreign(i) {
ii_item(*) | ii_ctor(*) | ii_dtor(*) => { /* fallthrough */ }
ii_foreign(i) => {
cx.map.insert(i.id, node_foreign_item(i, foreign_abi_rust_intrinsic,
@path));
}
ii_method(impl_did, m) {
ii_method(impl_did, m) => {
map_method(impl_did, @path, m, cx);
}
}
@ -128,7 +128,7 @@ fn map_fn(fk: visit::fn_kind, decl: fn_decl, body: blk,
cx.local_id += 1u;
}
alt fk {
visit::fk_ctor(nm, attrs, tps, self_id, parent_id) {
visit::fk_ctor(nm, attrs, tps, self_id, parent_id) => {
let ct = @{node: {id: id,
attrs: attrs,
self_id: self_id,
@ -140,14 +140,14 @@ fn map_fn(fk: visit::fn_kind, decl: fn_decl, body: blk,
ct, parent_id,
@/* FIXME (#2543) */ copy cx.path));
}
visit::fk_dtor(tps, attrs, self_id, parent_id) {
visit::fk_dtor(tps, attrs, self_id, parent_id) => {
let dt = @{node: {id: id, attrs: attrs, self_id: self_id,
body: /* FIXME (#2543) */ copy body}, span: sp};
cx.map.insert(id, node_dtor(/* FIXME (#2543) */ copy tps, dt,
parent_id,
@/* FIXME (#2543) */ copy cx.path));
}
_ {}
_ => ()
}
visit::visit_fn(fk, decl, body, sp, id, cx, v);
}
@ -160,11 +160,11 @@ fn map_block(b: blk, cx: ctx, v: vt) {
fn number_pat(cx: ctx, pat: @pat) {
do ast_util::walk_pat(pat) |p| {
alt p.node {
pat_ident(*) {
pat_ident(*) => {
cx.map.insert(p.id, node_local(cx.local_id));
cx.local_id += 1u;
}
_ {}
_ => ()
}
};
}
@ -190,24 +190,24 @@ fn map_item(i: @item, cx: ctx, v: vt) {
let item_path = @/* FIXME (#2543) */ copy cx.path;
cx.map.insert(i.id, node_item(i, item_path));
alt i.node {
item_impl(_, opt_ir, _, ms) {
item_impl(_, opt_ir, _, ms) => {
let impl_did = ast_util::local_def(i.id);
for ms.each |m| {
map_method(impl_did, extend(cx, i.ident), m,
cx);
}
}
item_enum(vs, _) {
item_enum(vs, _) => {
for vs.each |v| {
cx.map.insert(v.node.id, node_variant(
/* FIXME (#2543) */ copy v, i,
extend(cx, i.ident)));
}
}
item_foreign_mod(nm) {
item_foreign_mod(nm) => {
let abi = alt attr::foreign_abi(i.attrs) {
either::left(msg) { cx.diag.span_fatal(i.span, msg); }
either::right(abi) { abi }
either::left(msg) => cx.diag.span_fatal(i.span, msg),
either::right(abi) => abi
};
for nm.items.each |nitem| {
cx.map.insert(nitem.id,
@ -216,7 +216,7 @@ fn map_item(i: @item, cx: ctx, v: vt) {
extend(cx, i.ident)));
}
}
item_class(tps, traits, items, ctor, dtor) {
item_class(tps, traits, items, ctor, dtor) => {
let (_, ms) = ast_util::split_class_items(items);
// Map trait refs to their parent classes. This is
// so we can find the self_ty
@ -231,7 +231,7 @@ fn map_item(i: @item, cx: ctx, v: vt) {
// only need to handle methods
do vec::iter(ms) |m| { map_method(d_id, p, m, cx); }
}
item_trait(tps, traits, methods) {
item_trait(tps, traits, methods) => {
// Map trait refs to their parent classes. This is
// so we can find the self_ty
for traits.each |p| {
@ -246,13 +246,13 @@ fn map_item(i: @item, cx: ctx, v: vt) {
cx.map.insert(id, node_trait_method(@tm, d_id, item_path));
}
}
_ { }
_ => ()
}
alt i.node {
item_mod(_) | item_foreign_mod(_) {
item_mod(_) | item_foreign_mod(_) => {
vec::push(cx.path, path_mod(i.ident));
}
_ { vec::push(cx.path, path_name(i.ident)); }
_ => vec::push(cx.path, path_name(i.ident))
}
visit::visit_item(i, cx, v);
vec::pop(cx.path);
@ -260,20 +260,18 @@ fn map_item(i: @item, cx: ctx, v: vt) {
fn map_view_item(vi: @view_item, cx: ctx, _v: vt) {
alt vi.node {
view_item_export(vps) {
for vps.each |vp| {
let (id, name) = alt vp.node {
view_path_simple(nm, _, id) {
(id, /* FIXME (#2543) */ copy nm)
}
view_path_glob(pth, id) | view_path_list(pth, _, id) {
(id, path_to_ident(pth))
}
};
cx.map.insert(id, node_export(vp, extend(cx, name)));
}
view_item_export(vps) => for vps.each |vp| {
let (id, name) = alt vp.node {
view_path_simple(nm, _, id) => {
(id, /* FIXME (#2543) */ copy nm)
}
view_path_glob(pth, id) | view_path_list(pth, _, id) => {
(id, path_to_ident(pth))
}
};
cx.map.insert(id, node_export(vp, extend(cx, name)));
}
_ {}
_ => ()
}
}
@ -284,51 +282,51 @@ fn map_expr(ex: @expr, cx: ctx, v: vt) {
fn node_id_to_str(map: map, id: node_id) -> ~str {
alt map.find(id) {
none {
none => {
fmt!{"unknown node (id=%d)", id}
}
some(node_item(item, path)) {
some(node_item(item, path)) => {
fmt!{"item %s (id=%?)", path_ident_to_str(*path, item.ident), id}
}
some(node_foreign_item(item, abi, path)) {
some(node_foreign_item(item, abi, path)) => {
fmt!{"foreign item %s with abi %? (id=%?)",
path_ident_to_str(*path, item.ident), abi, id}
}
some(node_method(m, impl_did, path)) {
some(node_method(m, impl_did, path)) => {
fmt!{"method %s in %s (id=%?)",
*m.ident, path_to_str(*path), id}
}
some(node_trait_method(tm, impl_did, path)) {
some(node_trait_method(tm, impl_did, path)) => {
let m = ast_util::trait_method_to_ty_method(*tm);
fmt!{"method %s in %s (id=%?)",
*m.ident, path_to_str(*path), id}
}
some(node_variant(variant, def_id, path)) {
some(node_variant(variant, def_id, path)) => {
fmt!{"variant %s in %s (id=%?)",
*variant.node.name, path_to_str(*path), id}
}
some(node_expr(expr)) {
some(node_expr(expr)) => {
fmt!{"expr %s (id=%?)",
pprust::expr_to_str(expr), id}
}
// FIXMEs are as per #2410
some(node_export(_, path)) {
some(node_export(_, path)) => {
fmt!{"export %s (id=%?)", // add more info here
path_to_str(*path), id}
}
some(node_arg(_, _)) { // add more info here
some(node_arg(_, _)) => { // add more info here
fmt!{"arg (id=%?)", id}
}
some(node_local(_)) { // add more info here
some(node_local(_)) => { // add more info here
fmt!{"local (id=%?)", id}
}
some(node_ctor(*)) { // add more info here
some(node_ctor(*)) => { // add more info here
fmt!{"node_ctor (id=%?)", id}
}
some(node_dtor(*)) { // add more info here
some(node_dtor(*)) => { // add more info here
fmt!{"node_dtor (id=%?)", id}
}
some(node_block(_)) {
some(node_block(_)) => {
fmt!{"block"}
}
}

View file

@ -36,16 +36,19 @@ pure fn is_local(did: ast::def_id) -> bool { did.crate == local_crate }
pure fn stmt_id(s: stmt) -> node_id {
alt s.node {
stmt_decl(_, id) { id }
stmt_expr(_, id) { id }
stmt_semi(_, id) { id }
stmt_decl(_, id) => id,
stmt_expr(_, id) => id,
stmt_semi(_, id) => id
}
}
fn variant_def_ids(d: def) -> {enm: def_id, var: def_id} {
alt d { def_variant(enum_id, var_id) {
return {enm: enum_id, var: var_id}; }
_ { fail ~"non-variant in variant_def_ids"; } }
alt d {
def_variant(enum_id, var_id) => {
return {enm: enum_id, var: var_id}
}
_ => fail ~"non-variant in variant_def_ids"
}
}
pure fn def_id_of_def(d: def) -> def_id {
@ -53,117 +56,129 @@ pure fn def_id_of_def(d: def) -> def_id {
def_fn(id, _) | def_mod(id) |
def_foreign_mod(id) | def_const(id) |
def_variant(_, id) | def_ty(id) | def_ty_param(id, _) |
def_use(id) | def_class(id, _) { id }
def_use(id) | def_class(id, _) => {
id
}
def_arg(id, _) | def_local(id, _) | def_self(id) |
def_upvar(id, _, _) | def_binding(id, _) | def_region(id)
| def_typaram_binder(id) {
| def_typaram_binder(id) => {
local_def(id)
}
def_prim_ty(_) { fail; }
def_prim_ty(_) => fail
}
}
pure fn binop_to_str(op: binop) -> ~str {
alt op {
add { return ~"+"; }
subtract { return ~"-"; }
mul { return ~"*"; }
div { return ~"/"; }
rem { return ~"%"; }
and { return ~"&&"; }
or { return ~"||"; }
bitxor { return ~"^"; }
bitand { return ~"&"; }
bitor { return ~"|"; }
shl { return ~"<<"; }
shr { return ~">>"; }
eq { return ~"=="; }
lt { return ~"<"; }
le { return ~"<="; }
ne { return ~"!="; }
ge { return ~">="; }
gt { return ~">"; }
add => return ~"+",
subtract => return ~"-",
mul => return ~"*",
div => return ~"/",
rem => return ~"%",
and => return ~"&&",
or => return ~"||",
bitxor => return ~"^",
bitand => return ~"&",
bitor => return ~"|",
shl => return ~"<<",
shr => return ~">>",
eq => return ~"==",
lt => return ~"<",
le => return ~"<=",
ne => return ~"!=",
ge => return ~">=",
gt => return ~">"
}
}
pure fn binop_to_method_name(op: binop) -> option<~str> {
alt op {
add { return some(~"add"); }
subtract { return some(~"sub"); }
mul { return some(~"mul"); }
div { return some(~"div"); }
rem { return some(~"modulo"); }
bitxor { return some(~"bitxor"); }
bitand { return some(~"bitand"); }
bitor { return some(~"bitor"); }
shl { return some(~"shl"); }
shr { return some(~"shr"); }
and | or | eq | lt | le | ne | ge | gt { return none; }
add => return some(~"add"),
subtract => return some(~"sub"),
mul => return some(~"mul"),
div => return some(~"div"),
rem => return some(~"modulo"),
bitxor => return some(~"bitxor"),
bitand => return some(~"bitand"),
bitor => return some(~"bitor"),
shl => return some(~"shl"),
shr => return some(~"shr"),
and | or | eq | lt | le | ne | ge | gt => return none
}
}
pure fn lazy_binop(b: binop) -> bool {
alt b { and { true } or { true } _ { false } }
alt b {
and => true,
or => true,
_ => false
}
}
pure fn is_shift_binop(b: binop) -> bool {
alt b {
shl { true }
shr { true }
_ { false }
shl => true,
shr => true,
_ => false
}
}
pure fn unop_to_str(op: unop) -> ~str {
alt op {
box(mt) { if mt == m_mutbl { ~"@mut " } else { ~"@" } }
uniq(mt) { if mt == m_mutbl { ~"~mut " } else { ~"~" } }
deref { ~"*" }
not { ~"!" }
neg { ~"-" }
box(mt) => if mt == m_mutbl { ~"@mut " } else { ~"@" },
uniq(mt) => if mt == m_mutbl { ~"~mut " } else { ~"~" },
deref => ~"*",
not => ~"!",
neg => ~"-"
}
}
pure fn is_path(e: @expr) -> bool {
return alt e.node { expr_path(_) { true } _ { false } };
return alt e.node { expr_path(_) => true, _ => false };
}
pure fn int_ty_to_str(t: int_ty) -> ~str {
alt t {
ty_char { ~"u8" } // ???
ty_i { ~"" } ty_i8 { ~"i8" } ty_i16 { ~"i16" }
ty_i32 { ~"i32" } ty_i64 { ~"i64" }
ty_char => ~"u8", // ???
ty_i => ~"",
ty_i8 => ~"i8",
ty_i16 => ~"i16",
ty_i32 => ~"i32",
ty_i64 => ~"i64"
}
}
pure fn int_ty_max(t: int_ty) -> u64 {
alt t {
ty_i8 { 0x80u64 }
ty_i16 { 0x8000u64 }
ty_i | ty_char | ty_i32 { 0x80000000u64 } // actually ni about ty_i
ty_i64 { 0x8000000000000000u64 }
ty_i8 => 0x80u64,
ty_i16 => 0x8000u64,
ty_i | ty_char | ty_i32 => 0x80000000u64, // actually ni about ty_i
ty_i64 => 0x8000000000000000u64
}
}
pure fn uint_ty_to_str(t: uint_ty) -> ~str {
alt t {
ty_u { ~"u" } ty_u8 { ~"u8" } ty_u16 { ~"u16" }
ty_u32 { ~"u32" } ty_u64 { ~"u64" }
ty_u => ~"u",
ty_u8 => ~"u8",
ty_u16 => ~"u16",
ty_u32 => ~"u32",
ty_u64 => ~"u64"
}
}
pure fn uint_ty_max(t: uint_ty) -> u64 {
alt t {
ty_u8 { 0xffu64 }
ty_u16 { 0xffffu64 }
ty_u | ty_u32 { 0xffffffffu64 } // actually ni about ty_u
ty_u64 { 0xffffffffffffffffu64 }
ty_u8 => 0xffu64,
ty_u16 => 0xffffu64,
ty_u | ty_u32 => 0xffffffffu64, // actually ni about ty_u
ty_u64 => 0xffffffffffffffffu64
}
}
pure fn float_ty_to_str(t: float_ty) -> ~str {
alt t { ty_f { ~"f" } ty_f32 { ~"f32" } ty_f64 { ~"f64" } }
alt t { ty_f => ~"f", ty_f32 => ~"f32", ty_f64 => ~"f64" }
}
fn is_exported(i: ident, m: _mod) -> bool {
@ -172,36 +187,34 @@ fn is_exported(i: ident, m: _mod) -> bool {
for m.items.each |it| {
if it.ident == i { local = true; }
alt it.node {
item_enum(variants, _) {
for variants.each |v| {
if v.node.name == i {
local = true;
parent_enum = some(/* FIXME (#2543) */ copy it.ident);
}
item_enum(variants, _) => for variants.each |v| {
if v.node.name == i {
local = true;
parent_enum = some(/* FIXME (#2543) */ copy it.ident);
}
}
_ { }
_ => ()
}
if local { break; }
}
let mut has_explicit_exports = false;
for m.view_items.each |vi| {
alt vi.node {
view_item_export(vps) {
view_item_export(vps) => {
has_explicit_exports = true;
for vps.each |vp| {
alt vp.node {
ast::view_path_simple(id, _, _) {
ast::view_path_simple(id, _, _) => {
if id == i { return true; }
alt parent_enum {
some(parent_enum_id) {
some(parent_enum_id) => {
if id == parent_enum_id { return true; }
}
_ {}
_ => ()
}
}
ast::view_path_list(path, ids, _) {
ast::view_path_list(path, ids, _) => {
if vec::len(path.idents) == 1u {
if i == path.idents[0] { return true; }
for ids.each |id| {
@ -213,11 +226,11 @@ fn is_exported(i: ident, m: _mod) -> bool {
}
// FIXME: glob-exports aren't supported yet. (#2006)
_ {}
_ => ()
}
}
}
_ {}
_ => ()
}
}
// If there are no declared exports then
@ -227,7 +240,7 @@ fn is_exported(i: ident, m: _mod) -> bool {
}
pure fn is_call_expr(e: @expr) -> bool {
alt e.node { expr_call(_, _, _) { true } _ { false } }
alt e.node { expr_call(_, _, _) => true, _ => false }
}
pure fn eq_ty(a: &@ty, b: &@ty) -> bool { box::ptr_eq(*a, *b) }
@ -272,8 +285,8 @@ fn ident_to_path(s: span, +i: ident) -> @path {
pure fn is_unguarded(&&a: arm) -> bool {
alt a.guard {
none { true }
_ { false }
none => true,
_ => false
}
}
@ -283,8 +296,8 @@ pure fn unguarded_pat(a: arm) -> option<~[@pat]> {
pure fn class_item_ident(ci: @class_member) -> ident {
alt ci.node {
instance_var(i,_,_,_,_) { /* FIXME (#2543) */ copy i }
class_method(it) { /* FIXME (#2543) */ copy it.ident }
instance_var(i,_,_,_,_) => /* FIXME (#2543) */ copy i,
class_method(it) => /* FIXME (#2543) */ copy it.ident
}
}
@ -294,8 +307,8 @@ type ivar = {ident: ident, ty: @ty, cm: class_mutability,
fn public_methods(ms: ~[@method]) -> ~[@method] {
vec::filter(ms,
|m| alt m.vis {
public { true }
_ { false }
public => true,
_ => false
})
}
@ -303,14 +316,14 @@ fn split_class_items(cs: ~[@class_member]) -> (~[ivar], ~[@method]) {
let mut vs = ~[], ms = ~[];
for cs.each |c| {
alt c.node {
instance_var(i, t, cm, id, vis) {
instance_var(i, t, cm, id, vis) => {
vec::push(vs, {ident: /* FIXME (#2543) */ copy i,
ty: t,
cm: cm,
id: id,
vis: vis});
}
class_method(m) { vec::push(ms, m); }
class_method(m) => vec::push(ms, m)
}
};
(vs, ms)
@ -320,8 +333,8 @@ fn split_class_items(cs: ~[@class_member]) -> (~[ivar], ~[@method]) {
// a default, pull out the useful fields to make a ty_method
fn trait_method_to_ty_method(method: trait_method) -> ty_method {
alt method {
required(m) { m }
provided(m) {
required(m) => m,
provided(m) => {
{ident: m.ident, attrs: m.attrs,
decl: m.decl, tps: m.tps, self_ty: m.self_ty,
id: m.id, span: m.span}
@ -334,8 +347,8 @@ fn split_trait_methods(trait_methods: ~[trait_method])
let mut reqd = ~[], provd = ~[];
for trait_methods.each |trt_method| {
alt trt_method {
required(tm) { vec::push(reqd, tm); }
provided(m) { vec::push(provd, m); }
required(tm) => vec::push(reqd, tm),
provided(m) => vec::push(provd, m)
}
};
(reqd, provd)
@ -343,8 +356,8 @@ fn split_trait_methods(trait_methods: ~[trait_method])
pure fn class_member_visibility(ci: @class_member) -> visibility {
alt ci.node {
instance_var(_, _, _, _, vis) { vis }
class_method(m) { m.vis }
instance_var(_, _, _, _, vis) => vis,
class_method(m) => m.vis
}
}
@ -357,33 +370,33 @@ trait inlined_item_utils {
impl inlined_item_methods of inlined_item_utils for inlined_item {
fn ident() -> ident {
alt self {
ii_item(i) { /* FIXME (#2543) */ copy i.ident }
ii_foreign(i) { /* FIXME (#2543) */ copy i.ident }
ii_method(_, m) { /* FIXME (#2543) */ copy m.ident }
ii_ctor(_, nm, _, _) { /* FIXME (#2543) */ copy nm }
ii_dtor(_, nm, _, _) { /* FIXME (#2543) */ copy nm }
ii_item(i) => /* FIXME (#2543) */ copy i.ident,
ii_foreign(i) => /* FIXME (#2543) */ copy i.ident,
ii_method(_, m) => /* FIXME (#2543) */ copy m.ident,
ii_ctor(_, nm, _, _) => /* FIXME (#2543) */ copy nm,
ii_dtor(_, nm, _, _) => /* FIXME (#2543) */ copy nm
}
}
fn id() -> ast::node_id {
alt self {
ii_item(i) { i.id }
ii_foreign(i) { i.id }
ii_method(_, m) { m.id }
ii_ctor(ctor, _, _, _) { ctor.node.id }
ii_dtor(dtor, _, _, _) { dtor.node.id }
ii_item(i) => i.id,
ii_foreign(i) => i.id,
ii_method(_, m) => m.id,
ii_ctor(ctor, _, _, _) => ctor.node.id,
ii_dtor(dtor, _, _, _) => dtor.node.id
}
}
fn accept<E>(e: E, v: visit::vt<E>) {
alt self {
ii_item(i) { v.visit_item(i, e, v) }
ii_foreign(i) { v.visit_foreign_item(i, e, v) }
ii_method(_, m) { visit::visit_method_helper(m, e, v) }
ii_ctor(ctor, nm, tps, parent_id) {
ii_item(i) => v.visit_item(i, e, v),
ii_foreign(i) => v.visit_foreign_item(i, e, v),
ii_method(_, m) => visit::visit_method_helper(m, e, v),
ii_ctor(ctor, nm, tps, parent_id) => {
visit::visit_class_ctor_helper(ctor, nm, tps, parent_id, e, v);
}
ii_dtor(dtor, nm, tps, parent_id) {
ii_dtor(dtor, nm, tps, parent_id) => {
visit::visit_class_dtor_helper(dtor, tps, parent_id, e, v);
}
}
@ -394,26 +407,26 @@ impl inlined_item_methods of inlined_item_utils for inlined_item {
referring to a def_self */
fn is_self(d: ast::def) -> bool {
alt d {
def_self(_) { true }
def_upvar(_, d, _) { is_self(*d) }
_ { false }
def_self(_) => true,
def_upvar(_, d, _) => is_self(*d),
_ => false
}
}
/// Maps a binary operator to its precedence
fn operator_prec(op: ast::binop) -> uint {
alt op {
mul | div | rem { 12u }
mul | div | rem => 12u,
// 'as' sits between here with 11
add | subtract { 10u }
shl | shr { 9u }
bitand { 8u }
bitxor { 7u }
bitor { 6u }
lt | le | ge | gt { 4u }
eq | ne { 3u }
and { 2u }
or { 1u }
add | subtract => 10u,
shl | shr => 9u,
bitand => 8u,
bitxor => 7u,
bitor => 6u,
lt | le | ge | gt => 4u,
eq | ne => 3u,
and => 2u,
or => 1u
}
}
@ -443,13 +456,13 @@ fn id_visitor(vfn: fn@(node_id)) -> visit::vt<()> {
visit_view_item: fn@(vi: @view_item) {
alt vi.node {
view_item_use(_, _, id) { vfn(id) }
view_item_import(vps) | view_item_export(vps) {
view_item_use(_, _, id) => vfn(id),
view_item_import(vps) | view_item_export(vps) => {
do vec::iter(vps) |vp| {
alt vp.node {
view_path_simple(_, _, id) { vfn(id) }
view_path_glob(_, id) { vfn(id) }
view_path_list(_, _, id) { vfn(id) }
view_path_simple(_, _, id) => vfn(id),
view_path_glob(_, id) => vfn(id),
view_path_list(_, _, id) => vfn(id)
}
}
}
@ -463,8 +476,8 @@ fn id_visitor(vfn: fn@(node_id)) -> visit::vt<()> {
visit_item: fn@(i: @item) {
vfn(i.id);
alt i.node {
item_enum(vs, _) { for vs.each |v| { vfn(v.node.id); } }
_ {}
item_enum(vs, _) => for vs.each |v| { vfn(v.node.id); }
_ => ()
}
},
@ -499,10 +512,8 @@ fn id_visitor(vfn: fn@(node_id)) -> visit::vt<()> {
visit_ty: fn@(t: @ty) {
alt t.node {
ty_path(_, id) {
vfn(id)
}
_ { /* fall through */ }
ty_path(_, id) => vfn(id),
_ => { /* fall through */ }
}
},
@ -515,27 +526,27 @@ fn id_visitor(vfn: fn@(node_id)) -> visit::vt<()> {
vfn(id);
alt fk {
visit::fk_ctor(nm, _, tps, self_id, parent_id) {
visit::fk_ctor(nm, _, tps, self_id, parent_id) => {
vec::iter(tps, |tp| vfn(tp.id));
vfn(id);
vfn(self_id);
vfn(parent_id.node);
}
visit::fk_dtor(tps, _, self_id, parent_id) {
visit::fk_dtor(tps, _, self_id, parent_id) => {
vec::iter(tps, |tp| vfn(tp.id));
vfn(id);
vfn(self_id);
vfn(parent_id.node);
}
visit::fk_item_fn(_, tps) {
visit::fk_item_fn(_, tps) => {
vec::iter(tps, |tp| vfn(tp.id));
}
visit::fk_method(_, tps, m) {
visit::fk_method(_, tps, m) => {
vfn(m.self_id);
vec::iter(tps, |tp| vfn(tp.id));
}
visit::fk_anon(_, capture_clause)
| visit::fk_fn_block(capture_clause) {
| visit::fk_fn_block(capture_clause) => {
for vec::each(*capture_clause) |clause| {
vfn(clause.id);
}
@ -555,11 +566,8 @@ fn id_visitor(vfn: fn@(node_id)) -> visit::vt<()> {
visit_class_item: fn@(c: @class_member) {
alt c.node {
instance_var(_, _, _, id,_) {
vfn(id)
}
class_method(_) {
}
instance_var(_, _, _, id,_) => vfn(id),
class_method(_) => ()
}
}
})
@ -585,31 +593,29 @@ fn compute_id_range_for_inlined_item(item: inlined_item) -> id_range {
pure fn is_item_impl(item: @ast::item) -> bool {
alt item.node {
item_impl(*) { true }
_ { false }
item_impl(*) => true,
_ => false
}
}
fn walk_pat(pat: @pat, it: fn(@pat)) {
it(pat);
alt pat.node {
pat_ident(_, pth, some(p)) { walk_pat(p, it); }
pat_rec(fields, _) {
for fields.each |f| { walk_pat(f.pat, it); }
pat_ident(_, pth, some(p)) => walk_pat(p, it),
pat_rec(fields, _) => for fields.each |f| { walk_pat(f.pat, it) }
pat_enum(_, some(s)) | pat_tup(s) => for s.each |p| {
walk_pat(p, it)
}
pat_enum(_, some(s)) | pat_tup(s) {
for s.each |p| { walk_pat(p, it); }
}
pat_box(s) | pat_uniq(s) { walk_pat(s, it); }
pat_box(s) | pat_uniq(s) => walk_pat(s, it),
pat_wild | pat_lit(_) | pat_range(_, _) | pat_ident(_, _, _)
| pat_enum(_, _) {}
| pat_enum(_, _) => ()
}
}
fn view_path_id(p: @view_path) -> node_id {
alt p.node {
view_path_simple(_, _, id) | view_path_glob(_, id) |
view_path_list(_, _, id) { id }
view_path_list(_, _, id) => id
}
}

View file

@ -115,9 +115,9 @@ fn get_attr_name(attr: ast::attribute) -> ast::ident {
// All "bad" FIXME copies are as per #2543
fn get_meta_item_name(meta: @ast::meta_item) -> ast::ident {
alt meta.node {
ast::meta_word(n) { /* FIXME (#2543) */ copy n }
ast::meta_name_value(n, _) { /* FIXME (#2543) */ copy n }
ast::meta_list(n, _) { /* FIXME (#2543) */ copy n }
ast::meta_word(n) => /* FIXME (#2543) */ copy n,
ast::meta_name_value(n, _) => /* FIXME (#2543) */ copy n,
ast::meta_list(n, _) => /* FIXME (#2543) */ copy n
}
}
@ -127,25 +127,19 @@ fn get_meta_item_name(meta: @ast::meta_item) -> ast::ident {
*/
fn get_meta_item_value_str(meta: @ast::meta_item) -> option<@~str> {
alt meta.node {
ast::meta_name_value(_, v) {
alt v.node {
ast::lit_str(s) {
option::some(s)
}
_ {
option::none
}
}
ast::meta_name_value(_, v) => alt v.node {
ast::lit_str(s) => option::some(s),
_ => option::none
}
_ { option::none }
_ => option::none
}
}
/// Gets a list of inner meta items from a list meta_item type
fn get_meta_item_list(meta: @ast::meta_item) -> option<~[@ast::meta_item]> {
alt meta.node {
ast::meta_list(_, l) { option::some(/* FIXME (#2543) */ copy l) }
_ { option::none }
ast::meta_list(_, l) => option::some(/* FIXME (#2543) */ copy l),
_ => option::none
}
}
@ -157,11 +151,11 @@ fn get_name_value_str_pair(
item: @ast::meta_item
) -> option<(ast::ident, @~str)> {
alt attr::get_meta_item_value_str(item) {
some(value) {
some(value) => {
let name = attr::get_meta_item_name(item);
some((name, value))
}
none { none }
none => none
}
}
@ -210,16 +204,15 @@ fn contains(haystack: ~[@ast::meta_item], needle: @ast::meta_item) -> bool {
fn eq(a: @ast::meta_item, b: @ast::meta_item) -> bool {
return alt a.node {
ast::meta_word(na) {
alt b.node { ast::meta_word(nb) { na == nb } _ { false } }
ast::meta_word(na) => alt b.node {
ast::meta_word(nb) => na == nb,
_ => false
}
ast::meta_name_value(na, va) {
alt b.node {
ast::meta_name_value(nb, vb) { na == nb && va.node == vb.node }
_ { false }
}
ast::meta_name_value(na, va) => alt b.node {
ast::meta_name_value(nb, vb) => na == nb && va.node == vb.node,
_ => false
}
ast::meta_list(na, la) {
ast::meta_list(na, la) => {
// ~[Fixme-sorting]
// FIXME (#607): Needs implementing
@ -261,13 +254,11 @@ fn last_meta_item_value_str_by_name(
+name: ~str
) -> option<@~str> {
alt last_meta_item_by_name(items, name) {
some(item) {
alt attr::get_meta_item_value_str(item) {
some(value) { some(value) }
none { none }
}
some(item) => alt attr::get_meta_item_value_str(item) {
some(value) => some(value),
none => none
}
none { none }
none => none
}
}
@ -276,10 +267,8 @@ fn last_meta_item_list_by_name(
+name: ~str
) -> option<~[@ast::meta_item]> {
alt last_meta_item_by_name(items, name) {
some(item) {
attr::get_meta_item_list(item)
}
none { none }
some(item) => attr::get_meta_item_list(item),
none => none
}
}
@ -292,9 +281,9 @@ fn sort_meta_items(+items: ~[@ast::meta_item]) -> ~[@ast::meta_item] {
pure fn lteq(ma: &@ast::meta_item, mb: &@ast::meta_item) -> bool {
pure fn key(m: &ast::meta_item) -> ast::ident {
alt m.node {
ast::meta_word(name) { /* FIXME (#2543) */ copy name }
ast::meta_name_value(name, _) { /* FIXME (#2543) */ copy name }
ast::meta_list(name, _) { /* FIXME (#2543) */ copy name }
ast::meta_word(name) => /* FIXME (#2543) */ copy name,
ast::meta_name_value(name, _) => /* FIXME (#2543) */ copy name,
ast::meta_list(name, _) => /* FIXME (#2543) */ copy name
}
}
key(*ma) <= key(*mb)
@ -322,8 +311,8 @@ fn find_linkage_attrs(attrs: ~[ast::attribute]) -> ~[ast::attribute] {
let mut found = ~[];
for find_attrs_by_name(attrs, ~"link").each |attr| {
alt attr.node.value.node {
ast::meta_list(_, _) { vec::push(found, attr) }
_ { debug!{"ignoring link attribute that has incorrect type"}; }
ast::meta_list(_, _) => vec::push(found, attr),
_ => debug!{"ignoring link attribute that has incorrect type"}
}
}
return found;
@ -336,26 +325,26 @@ fn find_linkage_attrs(attrs: ~[ast::attribute]) -> ~[ast::attribute] {
fn find_linkage_metas(attrs: ~[ast::attribute]) -> ~[@ast::meta_item] {
do find_linkage_attrs(attrs).flat_map |attr| {
alt check attr.node.value.node {
ast::meta_list(_, items) { /* FIXME (#2543) */ copy items }
ast::meta_list(_, items) => /* FIXME (#2543) */ copy items
}
}
}
fn foreign_abi(attrs: ~[ast::attribute]) -> either<~str, ast::foreign_abi> {
return alt attr::first_attr_value_str_by_name(attrs, ~"abi") {
option::none {
option::none => {
either::right(ast::foreign_abi_cdecl)
}
option::some(@~"rust-intrinsic") {
option::some(@~"rust-intrinsic") => {
either::right(ast::foreign_abi_rust_intrinsic)
}
option::some(@~"cdecl") {
option::some(@~"cdecl") => {
either::right(ast::foreign_abi_cdecl)
}
option::some(@~"stdcall") {
option::some(@~"stdcall") => {
either::right(ast::foreign_abi_stdcall)
}
option::some(t) {
option::some(t) => {
either::left(~"unsupported abi: " + *t)
}
};
@ -373,8 +362,8 @@ fn find_inline_attr(attrs: ~[ast::attribute]) -> inline_attr {
// FIXME (#2809)---validate the usage of #[inline] and #[inline(always)]
do vec::foldl(ia_none, attrs) |ia,attr| {
alt attr.node.value.node {
ast::meta_word(@~"inline") { ia_hint }
ast::meta_list(@~"inline", items) {
ast::meta_word(@~"inline") => ia_hint,
ast::meta_list(@~"inline", items) => {
if !vec::is_empty(find_meta_items_by_name(items, ~"always")) {
ia_always
} else if !vec::is_empty(
@ -384,7 +373,7 @@ fn find_inline_attr(attrs: ~[ast::attribute]) -> inline_attr {
ia_hint
}
}
_ { ia }
_ => ia
}
}
}

View file

@ -125,16 +125,16 @@ fn lookup_char_pos_adj(map: codemap, pos: uint)
{
let loc = lookup_char_pos(map, pos);
alt (loc.file.substr) {
fss_none {
fss_none => {
{filename: /* FIXME (#2543) */ copy loc.file.name,
line: loc.line,
col: loc.col,
file: some(loc.file)}
}
fss_internal(sp) {
fss_internal(sp) => {
lookup_char_pos_adj(map, sp.lo + (pos - loc.file.start_pos.ch))
}
fss_external(eloc) {
fss_external(eloc) => {
{filename: /* FIXME (#2543) */ copy eloc.filename,
line: eloc.line + loc.line - 1u,
col: if loc.line == 1u {eloc.col + loc.col} else {loc.col},
@ -147,12 +147,12 @@ fn adjust_span(map: codemap, sp: span) -> span {
pure fn lookup(pos: file_pos) -> uint { return pos.ch; }
let line = lookup_line(map, sp.lo, lookup);
alt (line.fm.substr) {
fss_none {sp}
fss_internal(s) {
fss_none => sp,
fss_internal(s) => {
adjust_span(map, {lo: s.lo + (sp.lo - line.fm.start_pos.ch),
hi: s.lo + (sp.hi - line.fm.start_pos.ch),
expn_info: sp.expn_info})}
fss_external(_) {sp}
fss_external(_) => sp
}
}
@ -197,8 +197,8 @@ fn span_to_lines(sp: span, cm: codemap::codemap) -> @file_lines {
fn get_line(fm: filemap, line: int) -> ~str unsafe {
let begin: uint = fm.lines[line].byte - fm.start_pos.byte;
let end = alt str::find_char_from(*fm.src, '\n', begin) {
some(e) { e }
none { str::len(*fm.src) }
some(e) => e,
none => str::len(*fm.src)
};
str::slice(*fm.src, begin, end)
}

View file

@ -88,10 +88,12 @@ impl codemap_handler of handler for handler_t {
fn abort_if_errors() {
let s;
alt self.err_count {
0u { return; }
1u { s = ~"aborting due to previous error"; }
_ { s = fmt!{"aborting due to %u previous errors",
self.err_count}; }
0u => return,
1u => s = ~"aborting due to previous error",
_ => {
s = fmt!{"aborting due to %u previous errors",
self.err_count};
}
}
self.fatal(s);
}
@ -121,8 +123,8 @@ fn mk_span_handler(handler: handler, cm: codemap::codemap) -> span_handler {
fn mk_handler(emitter: option<emitter>) -> handler {
let emit = alt emitter {
some(e) { e }
none {
some(e) => e,
none => {
let f = fn@(cmsp: option<(codemap::codemap, span)>,
msg: ~str, t: level) {
emit(cmsp, msg, t);
@ -146,19 +148,19 @@ enum level {
fn diagnosticstr(lvl: level) -> ~str {
alt lvl {
fatal { ~"error" }
error { ~"error" }
warning { ~"warning" }
note { ~"note" }
fatal => ~"error",
error => ~"error",
warning => ~"warning",
note => ~"note"
}
}
fn diagnosticcolor(lvl: level) -> u8 {
alt lvl {
fatal { term::color_bright_red }
error { term::color_bright_red }
warning { term::color_bright_yellow }
note { term::color_bright_green }
fatal => term::color_bright_red,
error => term::color_bright_red,
warning => term::color_bright_yellow,
note => term::color_bright_green
}
}
@ -181,7 +183,7 @@ fn print_diagnostic(topic: ~str, lvl: level, msg: ~str) {
fn emit(cmsp: option<(codemap::codemap, span)>,
msg: ~str, lvl: level) {
alt cmsp {
some((cm, sp)) {
some((cm, sp)) => {
let sp = codemap::adjust_span(cm,sp);
let ss = codemap::span_to_str(sp, cm);
let lines = codemap::span_to_lines(sp, cm);
@ -189,7 +191,7 @@ fn emit(cmsp: option<(codemap::codemap, span)>,
highlight_lines(cm, sp, lines);
print_macro_backtrace(cm, sp);
}
none {
none => {
print_diagnostic(~"", lvl, msg);
}
}
@ -265,7 +267,7 @@ fn print_macro_backtrace(cm: codemap::codemap, sp: span) {
fn expect<T: copy>(diag: span_handler,
opt: option<T>, msg: fn() -> ~str) -> T {
alt opt {
some(t) { t }
none { diag.handler().bug(msg()); }
some(t) => t,
none => diag.handler().bug(msg())
}
}

View file

@ -102,18 +102,18 @@ fn expand(cx: ext_ctxt,
do vec::flat_map(in_items) |in_item| {
alt in_item.node {
ast::item_ty(ty, tps) {
ast::item_ty(ty, tps) => {
vec::append(~[filter_attrs(in_item)],
ty_fns(cx, in_item.ident, ty, tps))
}
ast::item_enum(variants, tps) {
ast::item_enum(variants, tps) => {
vec::append(~[filter_attrs(in_item)],
enum_fns(cx, in_item.ident,
in_item.span, variants, tps))
}
_ {
_ => {
cx.span_err(span, ~"#[auto_serialize] can only be \
applied to type and enum \
definitions");
@ -376,12 +376,12 @@ fn ser_lambda(cx: ext_ctxt, tps: ser_tps_map, ty: @ast::ty,
fn is_vec_or_str(ty: @ast::ty) -> bool {
alt ty.node {
ast::ty_vec(_) { true }
ast::ty_vec(_) => true,
// This may be wrong if the user has shadowed (!) str
ast::ty_path(@{span: _, global: _, idents: ids,
rp: none, types: _}, _)
if ids == ~[@~"str"] { true }
_ { false }
if ids == ~[@~"str"] => true,
_ => false
}
}
@ -392,37 +392,37 @@ fn ser_ty(cx: ext_ctxt, tps: ser_tps_map,
let ext_cx = cx; // required for #ast{}
alt ty.node {
ast::ty_nil {
ast::ty_nil => {
~[#ast[stmt]{$(s).emit_nil()}]
}
ast::ty_bot {
ast::ty_bot => {
cx.span_err(
ty.span, fmt!{"Cannot serialize bottom type"});
~[]
}
ast::ty_box(mt) {
ast::ty_box(mt) => {
let l = ser_lambda(cx, tps, mt.ty, cx.clone(s), #ast{ *$(v) });
~[#ast[stmt]{$(s).emit_box($(l));}]
}
// For unique evecs/estrs, just pass through to underlying vec or str
ast::ty_uniq(mt) if is_vec_or_str(mt.ty) {
ast::ty_uniq(mt) if is_vec_or_str(mt.ty) => {
ser_ty(cx, tps, mt.ty, s, v)
}
ast::ty_uniq(mt) {
ast::ty_uniq(mt) => {
let l = ser_lambda(cx, tps, mt.ty, cx.clone(s), #ast{ *$(v) });
~[#ast[stmt]{$(s).emit_uniq($(l));}]
}
ast::ty_ptr(_) | ast::ty_rptr(_, _) {
ast::ty_ptr(_) | ast::ty_rptr(_, _) => {
cx.span_err(ty.span, ~"cannot serialize pointer types");
~[]
}
ast::ty_rec(flds) {
ast::ty_rec(flds) => {
let fld_stmts = do vec::from_fn(vec::len(flds)) |fidx| {
let fld = flds[fidx];
let vf = cx.expr(fld.span,
@ -439,12 +439,12 @@ fn ser_ty(cx: ext_ctxt, tps: ser_tps_map,
~[#ast[stmt]{$(s).emit_rec($(fld_lambda));}]
}
ast::ty_fn(_, _) {
ast::ty_fn(_, _) => {
cx.span_err(ty.span, ~"cannot serialize function types");
~[]
}
ast::ty_tup(tys) {
ast::ty_tup(tys) => {
// Generate code like
//
// alt v {
@ -478,31 +478,31 @@ fn ser_ty(cx: ext_ctxt, tps: ser_tps_map,
~[cx.alt_stmt(arms, ty.span, v)]
}
ast::ty_path(path, _) {
ast::ty_path(path, _) => {
if vec::len(path.idents) == 1u &&
vec::is_empty(path.types) {
let ident = path.idents[0];
alt tps.find(*ident) {
some(f) { f(v) }
none { ser_path(cx, tps, path, s, v) }
some(f) => f(v),
none => ser_path(cx, tps, path, s, v)
}
} else {
ser_path(cx, tps, path, s, v)
}
}
ast::ty_mac(_) {
ast::ty_mac(_) => {
cx.span_err(ty.span, ~"cannot serialize macro types");
~[]
}
ast::ty_infer {
ast::ty_infer => {
cx.span_err(ty.span, ~"cannot serialize inferred types");
~[]
}
ast::ty_vec(mt) {
ast::ty_vec(mt) => {
let ser_e =
cx.expr(
ty.span,
@ -519,7 +519,7 @@ fn ser_ty(cx: ext_ctxt, tps: ser_tps_map,
}]
}
ast::ty_fixed_length(_, _) {
ast::ty_fixed_length(_, _) => {
cx.span_unimpl(ty.span, ~"serialization for fixed length types");
}
}
@ -635,34 +635,34 @@ fn deser_ty(cx: ext_ctxt, tps: deser_tps_map,
let ext_cx = cx; // required for #ast{}
alt ty.node {
ast::ty_nil {
ast::ty_nil => {
#ast{ $(d).read_nil() }
}
ast::ty_bot {
ast::ty_bot => {
#ast{ fail }
}
ast::ty_box(mt) {
ast::ty_box(mt) => {
let l = deser_lambda(cx, tps, mt.ty, cx.clone(d));
#ast{ @$(d).read_box($(l)) }
}
// For unique evecs/estrs, just pass through to underlying vec or str
ast::ty_uniq(mt) if is_vec_or_str(mt.ty) {
ast::ty_uniq(mt) if is_vec_or_str(mt.ty) => {
deser_ty(cx, tps, mt.ty, d)
}
ast::ty_uniq(mt) {
ast::ty_uniq(mt) => {
let l = deser_lambda(cx, tps, mt.ty, cx.clone(d));
#ast{ ~$(d).read_uniq($(l)) }
}
ast::ty_ptr(_) | ast::ty_rptr(_, _) {
ast::ty_ptr(_) | ast::ty_rptr(_, _) => {
#ast{ fail }
}
ast::ty_rec(flds) {
ast::ty_rec(flds) => {
let fields = do vec::from_fn(vec::len(flds)) |fidx| {
let fld = flds[fidx];
let d = cx.clone(d);
@ -679,11 +679,11 @@ fn deser_ty(cx: ext_ctxt, tps: deser_tps_map,
#ast{ $(d).read_rec($(fld_lambda)) }
}
ast::ty_fn(_, _) {
ast::ty_fn(_, _) => {
#ast{ fail }
}
ast::ty_tup(tys) {
ast::ty_tup(tys) => {
// Generate code like
//
// d.read_tup(3u) {||
@ -704,34 +704,34 @@ fn deser_ty(cx: ext_ctxt, tps: deser_tps_map,
#ast{ $(d).read_tup($(sz), $(body)) }
}
ast::ty_path(path, _) {
ast::ty_path(path, _) => {
if vec::len(path.idents) == 1u &&
vec::is_empty(path.types) {
let ident = path.idents[0];
alt tps.find(*ident) {
some(f) { f() }
none { deser_path(cx, tps, path, d) }
some(f) => f(),
none => deser_path(cx, tps, path, d)
}
} else {
deser_path(cx, tps, path, d)
}
}
ast::ty_mac(_) {
ast::ty_mac(_) => {
#ast{ fail }
}
ast::ty_infer {
ast::ty_infer => {
#ast{ fail }
}
ast::ty_vec(mt) {
ast::ty_vec(mt) => {
let l = deser_lambda(cx, tps, mt.ty, cx.clone(d));
#ast{ std::serialization::read_to_vec($(d), $(l)) }
}
ast::ty_fixed_length(_, _) {
ast::ty_fixed_length(_, _) => {
cx.span_unimpl(ty.span, ~"deserialization for fixed length types");
}
}

View file

@ -151,7 +151,7 @@ fn mk_ctxt(parse_sess: parse::parse_sess,
fn mod_path() -> ~[ast::ident] { return self.mod_path; }
fn bt_push(ei: codemap::expn_info_) {
alt ei {
expanded_from({call_site: cs, callie: callie}) {
expanded_from({call_site: cs, callie: callie}) => {
self.backtrace =
some(@expanded_from({
call_site: {lo: cs.lo, hi: cs.hi,
@ -162,10 +162,10 @@ fn mk_ctxt(parse_sess: parse::parse_sess,
}
fn bt_pop() {
alt self.backtrace {
some(@expanded_from({call_site: {expn_info: prev, _}, _})) {
some(@expanded_from({call_site: {expn_info: prev, _}, _})) => {
self.backtrace = prev
}
_ { self.bug(~"tried to pop without a push"); }
_ => self.bug(~"tried to pop without a push")
}
}
fn span_fatal(sp: span, msg: ~str) -> ! {
@ -207,24 +207,22 @@ fn mk_ctxt(parse_sess: parse::parse_sess,
fn expr_to_str(cx: ext_ctxt, expr: @ast::expr, error: ~str) -> ~str {
alt expr.node {
ast::expr_lit(l) {
alt l.node {
ast::lit_str(s) { return *s; }
_ { cx.span_fatal(l.span, error); }
}
ast::expr_lit(l) => alt l.node {
ast::lit_str(s) => return *s,
_ => cx.span_fatal(l.span, error)
}
_ { cx.span_fatal(expr.span, error); }
_ => cx.span_fatal(expr.span, error)
}
}
fn expr_to_ident(cx: ext_ctxt, expr: @ast::expr, error: ~str) -> ast::ident {
alt expr.node {
ast::expr_path(p) {
ast::expr_path(p) => {
if vec::len(p.types) > 0u || vec::len(p.idents) != 1u {
cx.span_fatal(expr.span, error);
} else { return p.idents[0]; }
}
_ { cx.span_fatal(expr.span, error); }
_ => cx.span_fatal(expr.span, error)
}
}
@ -236,29 +234,27 @@ fn get_mac_args_no_max(cx: ext_ctxt, sp: span, arg: ast::mac_arg,
fn get_mac_args(cx: ext_ctxt, sp: span, arg: ast::mac_arg,
min: uint, max: option<uint>, name: ~str) -> ~[@ast::expr] {
alt arg {
some(expr) {
alt expr.node {
ast::expr_vec(elts, _) {
some(expr) => alt expr.node {
ast::expr_vec(elts, _) => {
let elts_len = vec::len(elts);
alt max {
some(max) if ! (min <= elts_len && elts_len <= max) {
cx.span_fatal(sp,
fmt!{"#%s takes between %u and %u arguments.",
name, min, max});
alt max {
some(max) if ! (min <= elts_len && elts_len <= max) => {
cx.span_fatal(sp,
fmt!{"#%s takes between %u and %u arguments.",
name, min, max});
}
none if ! (min <= elts_len) => {
cx.span_fatal(sp, fmt!{"#%s needs at least %u arguments.",
name, min});
}
_ => return elts /* we're good */
}
none if ! (min <= elts_len) {
cx.span_fatal(sp, fmt!{"#%s needs at least %u arguments.",
name, min});
}
_ { return elts; /* we're good */}
}
}
_ {
_ => {
cx.span_fatal(sp, fmt!{"#%s: malformed invocation", name})
}
}
}
none {cx.span_fatal(sp, fmt!{"#%s: missing arguments", name})}
none => cx.span_fatal(sp, fmt!{"#%s: missing arguments", name})
}
}
@ -266,8 +262,8 @@ fn get_mac_body(cx: ext_ctxt, sp: span, args: ast::mac_body)
-> ast::mac_body_
{
alt (args) {
some(body) {body}
none {cx.span_fatal(sp, ~"missing macro body")}
some(body) => body,
none => cx.span_fatal(sp, ~"missing macro body")
}
}
@ -295,17 +291,15 @@ fn tt_args_to_original_flavor(cx: ext_ctxt, sp: span, arg: ~[ast::token_tree])
let args =
alt parse_or_else(cx.parse_sess(), cx.cfg(), arg_reader as reader,
argument_gram).get(@~"arg") {
@matched_seq(s, _) {
do s.map() |lf| {
alt lf {
@matched_nonterminal(parse::token::nt_expr(arg)) {
arg /* whew! list of exprs, here we come! */
}
_ { fail ~"badly-structured parse result"; }
}
@matched_seq(s, _) => do s.map() |lf| {
alt lf {
@matched_nonterminal(parse::token::nt_expr(arg)) => {
arg /* whew! list of exprs, here we come! */
}
_ => fail ~"badly-structured parse result"
}
}
_ { fail ~"badly-structured parse result"; }
_ => fail ~"badly-structured parse result"
};
return some(@{id: parse::next_node_id(cx.parse_sess()),

View file

@ -17,8 +17,8 @@ fn expand_syntax_ext(cx: ext_ctxt, sp: codemap::span, arg: ast::mac_arg,
let var = expr_to_str(cx, args[0], ~"#env requires a string");
alt os::getenv(var) {
option::none { return mk_uniq_str(cx, sp, ~""); }
option::some(s) { return mk_uniq_str(cx, sp, s); }
option::none => return mk_uniq_str(cx, sp, ~""),
option::some(s) => return mk_uniq_str(cx, sp, s)
}
}

View file

@ -18,25 +18,25 @@ fn expand_expr(exts: hashmap<~str, syntax_extension>, cx: ext_ctxt,
return alt e {
// expr_mac should really be expr_ext or something; it's the
// entry-point for all syntax extensions.
expr_mac(mac) {
expr_mac(mac) => {
// Old-style macros, for compatibility, will erase this whole
// block once we've transitioned.
alt mac.node {
mac_invoc(pth, args, body) {
mac_invoc(pth, args, body) => {
assert (vec::len(pth.idents) > 0u);
let extname = pth.idents[0];
alt exts.find(*extname) {
none {
none => {
cx.span_fatal(pth.span,
fmt!{"macro undefined: '%s'", *extname})
}
some(item_decorator(_)) {
some(item_decorator(_)) => {
cx.span_fatal(
pth.span,
fmt!{"%s can only be used as a decorator", *extname});
}
some(normal({expander: exp, span: exp_sp})) {
some(normal({expander: exp, span: exp_sp})) => {
let expanded = exp(cx, mac.span, args, body);
cx.bt_push(expanded_from({call_site: s,
@ -47,17 +47,17 @@ fn expand_expr(exts: hashmap<~str, syntax_extension>, cx: ext_ctxt,
(fully_expanded, s)
}
some(macro_defining(ext)) {
some(macro_defining(ext)) => {
let named_extension = ext(cx, mac.span, args, body);
exts.insert(*named_extension.ident, named_extension.ext);
(ast::expr_rec(~[], none), s)
}
some(expr_tt(_)) {
some(expr_tt(_)) => {
cx.span_fatal(pth.span,
fmt!{"this tt-style macro should be \
invoked '%s!{...}'", *extname})
}
some(item_tt(*)) {
some(item_tt(*)) => {
cx.span_fatal(pth.span,
~"cannot use item macros in this context");
}
@ -66,20 +66,20 @@ fn expand_expr(exts: hashmap<~str, syntax_extension>, cx: ext_ctxt,
// Token-tree macros, these will be the only case when we're
// finished transitioning.
mac_invoc_tt(pth, tts) {
mac_invoc_tt(pth, tts) => {
assert (vec::len(pth.idents) == 1u);
let extname = pth.idents[0];
alt exts.find(*extname) {
none {
none => {
cx.span_fatal(pth.span,
fmt!{"macro undefined: '%s'", *extname})
}
some(expr_tt({expander: exp, span: exp_sp})) {
some(expr_tt({expander: exp, span: exp_sp})) => {
let expanded = alt exp(cx, mac.span, tts) {
mr_expr(e) { e }
_ { cx.span_fatal(
mr_expr(e) => e,
_ => cx.span_fatal(
pth.span, fmt!{"non-expr macro in expr pos: %s",
*extname}) }
*extname})
};
cx.bt_push(expanded_from({call_site: s,
@ -90,7 +90,7 @@ fn expand_expr(exts: hashmap<~str, syntax_extension>, cx: ext_ctxt,
(fully_expanded, s)
}
some(normal({expander: exp, span: exp_sp})) {
some(normal({expander: exp, span: exp_sp})) => {
//convert the new-style invoc for the old-style macro
let arg = base::tt_args_to_original_flavor(cx, pth.span,
tts);
@ -104,7 +104,7 @@ fn expand_expr(exts: hashmap<~str, syntax_extension>, cx: ext_ctxt,
(fully_expanded, s)
}
_ {
_ => {
cx.span_fatal(pth.span,
fmt!{"'%s' is not a tt-style macro",
*extname})
@ -112,10 +112,10 @@ fn expand_expr(exts: hashmap<~str, syntax_extension>, cx: ext_ctxt,
}
}
_ { cx.span_bug(mac.span, ~"naked syntactic bit") }
_ => cx.span_bug(mac.span, ~"naked syntactic bit")
}
}
_ { orig(e, s, fld) }
_ => orig(e, s, fld)
};
}
@ -142,17 +142,14 @@ fn expand_mod_items(exts: hashmap<~str, syntax_extension>, cx: ext_ctxt,
let new_items = do vec::flat_map(module_.items) |item| {
do vec::foldr(item.attrs, ~[item]) |attr, items| {
let mname = alt attr.node.value.node {
ast::meta_word(n) { n }
ast::meta_name_value(n, _) { n }
ast::meta_list(n, _) { n }
ast::meta_word(n) => n,
ast::meta_name_value(n, _) => n,
ast::meta_list(n, _) => n
};
alt exts.find(*mname) {
none | some(normal(_)) | some(macro_defining(_))
| some(expr_tt(_)) | some(item_tt(*)) {
items
}
some(item_decorator(dec_fn)) {
| some(expr_tt(_)) | some(item_tt(*)) => items,
some(item_decorator(dec_fn)) => {
dec_fn(cx, attr.span, attr.node.value, items)
}
}
@ -170,24 +167,22 @@ fn expand_item(exts: hashmap<~str, syntax_extension>,
-> option<@ast::item>
{
let is_mod = alt it.node {
ast::item_mod(_) | ast::item_foreign_mod(_) {true}
_ {false}
ast::item_mod(_) | ast::item_foreign_mod(_) => true,
_ => false
};
let maybe_it = alt it.node {
ast::item_mac(*) {
expand_item_mac(exts, cx, it, fld)
}
_ { some(it) }
ast::item_mac(*) => expand_item_mac(exts, cx, it, fld),
_ => some(it)
};
alt maybe_it {
some(it) {
some(it) => {
if is_mod { cx.mod_push(it.ident); }
let ret_val = orig(it, fld);
if is_mod { cx.mod_pop(); }
return ret_val;
}
none { return none; }
none => return none
}
}
@ -198,24 +193,24 @@ fn expand_item_mac(exts: hashmap<~str, syntax_extension>,
cx: ext_ctxt, &&it: @ast::item,
fld: ast_fold) -> option<@ast::item> {
alt it.node {
item_mac({node: mac_invoc_tt(pth, tts), span}) {
item_mac({node: mac_invoc_tt(pth, tts), span}) => {
let extname = pth.idents[0];
alt exts.find(*extname) {
none {
none => {
cx.span_fatal(pth.span,
fmt!{"macro undefined: '%s'", *extname})
}
some(item_tt(expand)) {
some(item_tt(expand)) => {
let expanded = expand.expander(cx, it.span, it.ident, tts);
cx.bt_push(expanded_from({call_site: it.span,
callie: {name: *extname,
span: expand.span}}));
let maybe_it = alt expanded {
mr_item(it) { fld.fold_item(it) }
mr_expr(e) { cx.span_fatal(pth.span,
mr_item(it) => fld.fold_item(it),
mr_expr(e) => cx.span_fatal(pth.span,
~"expr macro in item position: " +
*extname) }
mr_def(mdef) {
*extname),
mr_def(mdef) => {
exts.insert(*mdef.ident, mdef.ext);
none
}
@ -223,13 +218,11 @@ fn expand_item_mac(exts: hashmap<~str, syntax_extension>,
cx.bt_pop();
return maybe_it
}
_ { cx.span_fatal(it.span,
fmt!{"%s is not a legal here", *extname}) }
_ => cx.span_fatal(it.span,
fmt!{"%s is not a legal here", *extname})
}
}
_ {
cx.span_bug(it.span, ~"invalid item macro invocation");
}
_ => cx.span_bug(it.span, ~"invalid item macro invocation")
}
}

View file

@ -53,11 +53,11 @@ fn pieces_to_expr(cx: ext_ctxt, sp: span,
let mut tmp_expr = make_rt_path_expr(cx, sp, @~"flag_none");
for flags.each |f| {
let fstr = alt f {
flag_left_justify { ~"flag_left_justify" }
flag_left_zero_pad { ~"flag_left_zero_pad" }
flag_space_for_sign { ~"flag_space_for_sign" }
flag_sign_always { ~"flag_sign_always" }
flag_alternate { ~"flag_alternate" }
flag_left_justify => ~"flag_left_justify",
flag_left_zero_pad => ~"flag_left_zero_pad",
flag_space_for_sign => ~"flag_space_for_sign",
flag_sign_always => ~"flag_sign_always",
flag_alternate => ~"flag_alternate"
};
tmp_expr = mk_binary(cx, sp, ast::bitor, tmp_expr,
make_rt_path_expr(cx, sp, @fstr));
@ -66,30 +66,28 @@ fn pieces_to_expr(cx: ext_ctxt, sp: span,
}
fn make_count(cx: ext_ctxt, sp: span, cnt: count) -> @ast::expr {
alt cnt {
count_implied {
count_implied => {
return make_rt_path_expr(cx, sp, @~"count_implied");
}
count_is(c) {
count_is(c) => {
let count_lit = mk_int(cx, sp, c);
let count_is_path = make_path_vec(cx, @~"count_is");
let count_is_args = ~[count_lit];
return mk_call(cx, sp, count_is_path, count_is_args);
}
_ { cx.span_unimpl(sp, ~"unimplemented #fmt conversion"); }
_ => cx.span_unimpl(sp, ~"unimplemented #fmt conversion")
}
}
fn make_ty(cx: ext_ctxt, sp: span, t: ty) -> @ast::expr {
let mut rt_type;
alt t {
ty_hex(c) {
alt c {
case_upper { rt_type = ~"ty_hex_upper"; }
case_lower { rt_type = ~"ty_hex_lower"; }
}
ty_hex(c) => alt c {
case_upper => rt_type = ~"ty_hex_upper",
case_lower => rt_type = ~"ty_hex_lower"
}
ty_bits { rt_type = ~"ty_bits"; }
ty_octal { rt_type = ~"ty_octal"; }
_ { rt_type = ~"ty_default"; }
ty_bits => rt_type = ~"ty_bits",
ty_octal => rt_type = ~"ty_octal",
_ => rt_type = ~"ty_default"
}
return make_rt_path_expr(cx, sp, @rt_type);
}
@ -124,128 +122,117 @@ fn pieces_to_expr(cx: ext_ctxt, sp: span,
fn is_signed_type(cnv: conv) -> bool {
alt cnv.ty {
ty_int(s) {
alt s { signed { return true; } unsigned { return false; } }
ty_int(s) => alt s {
signed => return true,
unsigned => return false
}
ty_float { return true; }
_ { return false; }
ty_float => return true,
_ => return false
}
}
let unsupported = ~"conversion not supported in #fmt string";
alt cnv.param {
option::none { }
_ { cx.span_unimpl(sp, unsupported); }
option::none => (),
_ => cx.span_unimpl(sp, unsupported)
}
for cnv.flags.each |f| {
alt f {
flag_left_justify { }
flag_sign_always {
flag_left_justify => (),
flag_sign_always => {
if !is_signed_type(cnv) {
cx.span_fatal(sp,
~"+ flag only valid in " +
~"signed #fmt conversion");
}
}
flag_space_for_sign {
flag_space_for_sign => {
if !is_signed_type(cnv) {
cx.span_fatal(sp,
~"space flag only valid in " +
~"signed #fmt conversions");
}
}
flag_left_zero_pad { }
_ { cx.span_unimpl(sp, unsupported); }
flag_left_zero_pad => (),
_ => cx.span_unimpl(sp, unsupported)
}
}
alt cnv.width {
count_implied { }
count_is(_) { }
_ { cx.span_unimpl(sp, unsupported); }
count_implied => (),
count_is(_) => (),
_ => cx.span_unimpl(sp, unsupported)
}
alt cnv.precision {
count_implied { }
count_is(_) { }
_ { cx.span_unimpl(sp, unsupported); }
count_implied => (),
count_is(_) => (),
_ => cx.span_unimpl(sp, unsupported)
}
alt cnv.ty {
ty_str { return make_conv_call(cx, arg.span, ~"str", cnv, arg); }
ty_int(sign) {
alt sign {
signed {
return make_conv_call(cx, arg.span, ~"int", cnv, arg);
}
unsigned {
return make_conv_call(cx, arg.span, ~"uint", cnv, arg);
}
ty_str => return make_conv_call(cx, arg.span, ~"str", cnv, arg),
ty_int(sign) => alt sign {
signed => return make_conv_call(cx, arg.span, ~"int", cnv, arg),
unsigned => {
return make_conv_call(cx, arg.span, ~"uint", cnv, arg)
}
}
ty_bool { return make_conv_call(cx, arg.span, ~"bool", cnv, arg); }
ty_char { return make_conv_call(cx, arg.span, ~"char", cnv, arg); }
ty_hex(_) {
ty_bool => return make_conv_call(cx, arg.span, ~"bool", cnv, arg),
ty_char => return make_conv_call(cx, arg.span, ~"char", cnv, arg),
ty_hex(_) => {
return make_conv_call(cx, arg.span, ~"uint", cnv, arg);
}
ty_bits { return make_conv_call(cx, arg.span, ~"uint", cnv, arg); }
ty_octal { return make_conv_call(cx, arg.span, ~"uint", cnv, arg); }
ty_float {
ty_bits => return make_conv_call(cx, arg.span, ~"uint", cnv, arg),
ty_octal => return make_conv_call(cx, arg.span, ~"uint", cnv, arg),
ty_float => {
return make_conv_call(cx, arg.span, ~"float", cnv, arg);
}
ty_poly { return make_conv_call(cx, arg.span, ~"poly", cnv, arg); }
ty_poly => return make_conv_call(cx, arg.span, ~"poly", cnv, arg)
}
}
fn log_conv(c: conv) {
alt c.param {
some(p) { log(debug, ~"param: " + int::to_str(p, 10u)); }
_ { debug!{"param: none"}; }
some(p) => { log(debug, ~"param: " + int::to_str(p, 10u)); }
_ => debug!{"param: none"}
}
for c.flags.each |f| {
alt f {
flag_left_justify { debug!{"flag: left justify"}; }
flag_left_zero_pad { debug!{"flag: left zero pad"}; }
flag_space_for_sign { debug!{"flag: left space pad"}; }
flag_sign_always { debug!{"flag: sign always"}; }
flag_alternate { debug!{"flag: alternate"}; }
flag_left_justify => debug!{"flag: left justify"},
flag_left_zero_pad => debug!{"flag: left zero pad"},
flag_space_for_sign => debug!{"flag: left space pad"},
flag_sign_always => debug!{"flag: sign always"},
flag_alternate => debug!{"flag: alternate"}
}
}
alt c.width {
count_is(i) { log(debug,
~"width: count is " + int::to_str(i, 10u)); }
count_is_param(i) {
log(debug,
~"width: count is param " + int::to_str(i, 10u));
}
count_is_next_param { debug!{"width: count is next param"}; }
count_implied { debug!{"width: count is implied"}; }
count_is(i) => log(
debug, ~"width: count is " + int::to_str(i, 10u)),
count_is_param(i) => log(
debug, ~"width: count is param " + int::to_str(i, 10u)),
count_is_next_param => debug!{"width: count is next param"},
count_implied => debug!{"width: count is implied"}
}
alt c.precision {
count_is(i) { log(debug,
~"prec: count is " + int::to_str(i, 10u)); }
count_is_param(i) {
log(debug,
~"prec: count is param " + int::to_str(i, 10u));
}
count_is_next_param { debug!{"prec: count is next param"}; }
count_implied { debug!{"prec: count is implied"}; }
count_is(i) => log(
debug, ~"prec: count is " + int::to_str(i, 10u)),
count_is_param(i) => log(
debug, ~"prec: count is param " + int::to_str(i, 10u)),
count_is_next_param => debug!{"prec: count is next param"},
count_implied => debug!{"prec: count is implied"}
}
alt c.ty {
ty_bool { debug!{"type: bool"}; }
ty_str { debug!{"type: str"}; }
ty_char { debug!{"type: char"}; }
ty_int(s) {
alt s {
signed { debug!{"type: signed"}; }
unsigned { debug!{"type: unsigned"}; }
}
ty_bool => debug!{"type: bool"},
ty_str => debug!{"type: str"},
ty_char => debug!{"type: char"},
ty_int(s) => alt s {
signed => debug!{"type: signed"},
unsigned => debug!{"type: unsigned"}
}
ty_bits { debug!{"type: bits"}; }
ty_hex(cs) {
alt cs {
case_upper { debug!{"type: uhex"}; }
case_lower { debug!{"type: lhex"}; }
}
ty_bits => debug!{"type: bits"},
ty_hex(cs) => alt cs {
case_upper => debug!{"type: uhex"},
case_lower => debug!{"type: lhex"},
}
ty_octal { debug!{"type: octal"}; }
ty_float { debug!{"type: float"}; }
ty_poly { debug!{"type: poly"}; }
ty_octal => debug!{"type: octal"},
ty_float => debug!{"type: float"},
ty_poly => debug!{"type: poly"}
}
}
let fmt_sp = args[0].span;
@ -254,10 +241,10 @@ fn pieces_to_expr(cx: ext_ctxt, sp: span,
let nargs = args.len();
for pieces.each |pc| {
alt pc {
piece_string(s) {
vec::push(piece_exprs, mk_uniq_str(cx, fmt_sp, s));
piece_string(s) => {
vec::push(piece_exprs, mk_uniq_str(cx, fmt_sp, s))
}
piece_conv(conv) {
piece_conv(conv) => {
n += 1u;
if n >= nargs {
cx.span_fatal(sp,

View file

@ -45,7 +45,7 @@ impl proto_check of proto::visitor<(), (), ()> for ext_ctxt {
fn visit_message(name: ident, _span: span, _tys: &[@ast::ty],
this: state, next: next_state) {
alt next {
some({state: next, tys: next_tys}) {
some({state: next, tys: next_tys}) => {
let proto = this.proto;
if !proto.has_state(next) {
// This should be a span fatal, but then we need to
@ -69,7 +69,7 @@ impl proto_check of proto::visitor<(), (), ()> for ext_ctxt {
}
}
}
none { }
none => ()
}
}
}

View file

@ -26,16 +26,14 @@ impl proto_parser of proto_parser for parser {
let id = self.parse_ident();
self.expect(token::COLON);
let dir = alt copy self.token {
token::IDENT(n, _) {
self.get_str(n)
}
_ { fail }
token::IDENT(n, _) => self.get_str(n),
_ => fail
};
self.bump();
let dir = alt dir {
@~"send" { send }
@~"recv" { recv }
_ { fail }
@~"send" => send,
@~"recv" => recv,
_ => fail
};
let typarms = if self.token == token::LT {
@ -67,7 +65,7 @@ impl proto_parser of proto_parser for parser {
self.expect(token::RARROW);
let next = alt copy self.token {
token::IDENT(_, _) {
token::IDENT(_, _) => {
let name = self.parse_ident();
let ntys = if self.token == token::LT {
self.parse_unspanned_seq(token::LT,
@ -79,12 +77,12 @@ impl proto_parser of proto_parser for parser {
else { ~[] };
some({state: name, tys: ntys})
}
token::NOT {
token::NOT => {
// -> !
self.bump();
none
}
_ { self.fatal(~"invalid next state") }
_ => self.fatal(~"invalid next state")
};
state.add_message(mname, copy self.span, args, next);

View file

@ -48,7 +48,8 @@ impl compile of gen_send for message {
fn gen_send(cx: ext_ctxt) -> @ast::item {
debug!{"pipec: gen_send"};
alt self {
message(id, span, tys, this, some({state: next, tys: next_tys})) {
message(id, span, tys, this,
some({state: next, tys: next_tys})) => {
debug!{"pipec: next state exists"};
let next = this.proto.get_state(next);
assert next_tys.len() == next.ty_params.len();
@ -71,10 +72,10 @@ impl compile of gen_send for message {
if this.proto.is_bounded() {
let (sp, rp) = alt (this.dir, next.dir) {
(send, send) { (~"c", ~"s") }
(send, recv) { (~"s", ~"c") }
(recv, send) { (~"s", ~"c") }
(recv, recv) { (~"c", ~"s") }
(send, send) => (~"c", ~"s"),
(send, recv) => (~"s", ~"c"),
(recv, send) => (~"s", ~"c"),
(recv, recv) => (~"c", ~"s")
};
body += ~"let b = pipe.reuse_buffer();\n";
@ -87,10 +88,10 @@ impl compile of gen_send for message {
}
else {
let pat = alt (this.dir, next.dir) {
(send, send) { ~"(c, s)" }
(send, recv) { ~"(s, c)" }
(recv, send) { ~"(s, c)" }
(recv, recv) { ~"(c, s)" }
(send, send) => ~"(c, s)",
(send, recv) => ~"(s, c)",
(recv, send) => ~"(s, c)",
(recv, recv) => ~"(c, s)"
};
body += fmt!{"let %s = pipes::entangle();\n", pat};
@ -116,7 +117,7 @@ impl compile of gen_send for message {
cx.expr_block(body))
}
message(id, span, tys, this, none) {
message(id, span, tys, this, none) => {
debug!{"pipec: no next state"};
let arg_names = tys.mapi(|i, _ty| @(~"x_" + i.to_str()));
@ -181,20 +182,20 @@ impl compile of to_type_decls for state {
let message(name, _span, tys, this, next) = m;
let tys = alt next {
some({state: next, tys: next_tys}) {
some({state: next, tys: next_tys}) => {
let next = this.proto.get_state(next);
let next_name = next.data_name();
let dir = alt this.dir {
send { @~"server" }
recv { @~"client" }
send => @~"server",
recv => @~"client"
};
vec::append_one(tys,
cx.ty_path_ast_builder((dir + next_name)
.add_tys(next_tys)))
}
none { tys }
none => tys
};
let v = cx.variant(name, tys);
@ -208,8 +209,8 @@ impl compile of to_type_decls for state {
fn to_endpoint_decls(cx: ext_ctxt, dir: direction) -> ~[@ast::item] {
debug!{"pipec: to_endpoint_decls"};
let dir = alt dir {
send { (*self).dir }
recv { (*self).dir.reverse() }
send => (*self).dir,
recv => (*self).dir.reverse()
};
let mut items = ~[];
for self.messages.each |m| {
@ -255,8 +256,8 @@ impl compile of gen_init for protocol {
let body = if !self.is_bounded() {
alt start_state.dir {
send { #ast { pipes::entangle() } }
recv {
send => #ast { pipes::entangle() },
recv => {
#ast {{
let (s, c) = pipes::entangle();
(c, s)
@ -267,8 +268,8 @@ impl compile of gen_init for protocol {
else {
let body = self.gen_init_bounded(ext_cx);
alt start_state.dir {
send { body }
recv {
send => body,
recv => {
#ast {{
let (s, c) = $(body);
(c, s)
@ -322,8 +323,8 @@ impl compile of gen_init for protocol {
for (copy self.states).each |s| {
for s.ty_params.each |tp| {
alt params.find(|tpp| *tp.ident == *tpp.ident) {
none { vec::push(params, tp) }
_ { }
none => vec::push(params, tp),
_ => ()
}
}
}
@ -338,8 +339,8 @@ impl compile of gen_init for protocol {
let fields = do (copy self.states).map_to_vec |s| {
for s.ty_params.each |tp| {
alt params.find(|tpp| *tp.ident == *tpp.ident) {
none { vec::push(params, tp) }
_ { }
none => vec::push(params, tp),
_ => ()
}
}
let ty = s.to_ty(cx);
@ -439,8 +440,8 @@ impl parse_utils of ext_ctxt_parse_utils for ext_ctxt {
~[],
self.parse_sess());
alt res {
some(ast) { ast }
none {
some(ast) => ast,
none => {
error!{"Parse error with ```\n%s\n```", s};
fail
}

View file

@ -12,8 +12,8 @@ enum direction {
impl of to_str for direction {
fn to_str() -> ~str {
alt self {
send { ~"send" }
recv { ~"recv" }
send => ~"send",
recv => ~"recv"
}
}
}
@ -21,8 +21,8 @@ impl of to_str for direction {
impl methods for direction {
fn reverse() -> direction {
alt self {
send { recv }
recv { send }
send => recv,
recv => send
}
}
}
@ -37,26 +37,20 @@ enum message {
impl methods for message {
fn name() -> ident {
alt self {
message(id, _, _, _, _) {
id
}
message(id, _, _, _, _) => id
}
}
fn span() -> span {
alt self {
message(_, span, _, _, _) {
span
}
message(_, span, _, _, _) => span
}
}
/// Return the type parameters actually used by this message
fn get_params() -> ~[ast::ty_param] {
alt self {
message(_, _, _, this, _) {
this.ty_params
}
message(_, _, _, this, _) => this.ty_params
}
}
}
@ -99,11 +93,11 @@ impl methods for state {
fn reachable(f: fn(state) -> bool) {
for self.messages.each |m| {
alt m {
message(_, _, _, _, some({state: id, _})) {
message(_, _, _, _, some({state: id, _})) => {
let state = self.proto.get_state(id);
if !f(state) { break }
}
_ { }
_ => ()
}
}
}

View file

@ -49,8 +49,8 @@ impl of qq_helper for @ast::expr {
fn visit(cx: aq_ctxt, v: vt<aq_ctxt>) {visit_expr(self, cx, v);}
fn extract_mac() -> option<ast::mac_> {
alt (self.node) {
ast::expr_mac({node: mac, _}) {some(mac)}
_ {none}
ast::expr_mac({node: mac, _}) => some(mac),
_ => none
}
}
fn mk_parse_fn(cx: ext_ctxt, sp: span) -> @ast::expr {
@ -64,8 +64,8 @@ impl of qq_helper for @ast::ty {
fn visit(cx: aq_ctxt, v: vt<aq_ctxt>) {visit_ty(self, cx, v);}
fn extract_mac() -> option<ast::mac_> {
alt (self.node) {
ast::ty_mac({node: mac, _}) {some(mac)}
_ {none}
ast::ty_mac({node: mac, _}) => some(mac),
_ => none
}
}
fn mk_parse_fn(cx: ext_ctxt, sp: span) -> @ast::expr {
@ -125,14 +125,14 @@ fn gather_anti_quotes<N: qq_helper>(lo: uint, node: N) -> aq_ctxt
fn visit_aq<T:qq_helper>(node: T, constr: ~str, &&cx: aq_ctxt, v: vt<aq_ctxt>)
{
alt (node.extract_mac()) {
some(mac_aq(sp, e)) {
some(mac_aq(sp, e)) => {
cx.gather.push(gather_item {
lo: sp.lo - cx.lo,
hi: sp.hi - cx.lo,
e: e,
constr: constr});
}
_ {node.visit(cx, v);}
_ => node.visit(cx, v)
}
}
@ -148,8 +148,8 @@ fn expand_ast(ecx: ext_ctxt, _sp: span,
do option::iter(arg) |arg| {
let args: ~[@ast::expr] =
alt arg.node {
ast::expr_vec(elts, _) { elts }
_ {
ast::expr_vec(elts, _) => elts,
_ => {
ecx.span_fatal
(_sp, ~"#ast requires arguments of the form `~[...]`.")
}
@ -159,20 +159,20 @@ fn expand_ast(ecx: ext_ctxt, _sp: span,
}
alt (args[0].node) {
ast::expr_path(@{idents: id, _}) if vec::len(id) == 1u
{what = *id[0]}
_ {ecx.span_fatal(args[0].span, ~"expected an identifier");}
=> what = *id[0],
_ => ecx.span_fatal(args[0].span, ~"expected an identifier")
}
}
let body = get_mac_body(ecx,_sp,body);
return alt what {
~"crate" {finish(ecx, body, parse_crate)}
~"expr" {finish(ecx, body, parse_expr)}
~"ty" {finish(ecx, body, parse_ty)}
~"item" {finish(ecx, body, parse_item)}
~"stmt" {finish(ecx, body, parse_stmt)}
~"pat" {finish(ecx, body, parse_pat)}
_ {ecx.span_fatal(_sp, ~"unsupported ast type")}
~"crate" => finish(ecx, body, parse_crate),
~"expr" => finish(ecx, body, parse_expr),
~"ty" => finish(ecx, body, parse_ty),
~"item" => finish(ecx, body, parse_item),
~"stmt" => finish(ecx, body, parse_stmt),
~"pat" => finish(ecx, body, parse_pat),
_ => ecx.span_fatal(_sp, ~"unsupported ast type")
};
}
@ -184,8 +184,8 @@ fn parse_pat(p: parser) -> @ast::pat { p.parse_pat(true) }
fn parse_item(p: parser) -> @ast::item {
alt p.parse_item(~[]) {
some(item) { item }
none { fail ~"parse_item: parsing an item failed"; }
some(item) => item,
none => fail ~"parse_item: parsing an item failed"
}
}
@ -226,11 +226,11 @@ fn finish<T: qq_helper>
str2 += repl;
}
alt copy state {
active {str::push_char(str2, ch);}
skip(1u) {state = blank;}
skip(sk) {state = skip (sk-1u);}
blank if is_space(ch) {str::push_char(str2, ch);}
blank {str::push_char(str2, ' ');}
active => str::push_char(str2, ch),
skip(1u) => state = blank,
skip(sk) => state = skip (sk-1u),
blank if is_space(ch) => str::push_char(str2, ch),
blank => str::push_char(str2, ' ')
}
i += 1u;
if (j < g_len && i == cx.gather[j].hi) {
@ -309,11 +309,11 @@ fn replace_expr(repls: ~[fragment],
-> (ast::expr_, span)
{
alt e {
ast::expr_mac({node: mac_var(i), _}) {
alt (repls[i]) {
from_expr(r) {(r.node, r.span)}
_ {fail /* fixme error message */}}}
_ {orig(e,s,fld)}
ast::expr_mac({node: mac_var(i), _}) => alt (repls[i]) {
from_expr(r) => (r.node, r.span),
_ => fail /* fixme error message */
}
_ => orig(e,s,fld)
}
}
@ -323,11 +323,11 @@ fn replace_ty(repls: ~[fragment],
-> (ast::ty_, span)
{
alt e {
ast::ty_mac({node: mac_var(i), _}) {
alt (repls[i]) {
from_ty(r) {(r.node, r.span)}
_ {fail /* fixme error message */}}}
_ {orig(e,s,fld)}
ast::ty_mac({node: mac_var(i), _}) => alt (repls[i]) {
from_ty(r) => (r.node, r.span),
_ => fail /* fixme error message */
}
_ => orig(e,s,fld)
}
}

View file

@ -37,27 +37,17 @@ enum matchable {
/* for when given an incompatible bit of AST */
fn match_error(cx: ext_ctxt, m: matchable, expected: ~str) -> ! {
alt m {
match_expr(x) {
cx.span_fatal(x.span,
~"this argument is an expr, expected " + expected);
}
match_path(x) {
cx.span_fatal(x.span,
~"this argument is a path, expected " + expected);
}
match_ident(x) {
cx.span_fatal(x.span,
~"this argument is an ident, expected " + expected);
}
match_ty(x) {
cx.span_fatal(x.span,
~"this argument is a type, expected " + expected);
}
match_block(x) {
cx.span_fatal(x.span,
~"this argument is a block, expected " + expected);
}
match_exact { cx.bug(~"what is a match_exact doing in a bindings?"); }
match_expr(x) => cx.span_fatal(
x.span, ~"this argument is an expr, expected " + expected),
match_path(x) => cx.span_fatal(
x.span, ~"this argument is a path, expected " + expected),
match_ident(x) => cx.span_fatal(
x.span, ~"this argument is an ident, expected " + expected),
match_ty(x) => cx.span_fatal(
x.span, ~"this argument is a type, expected " + expected),
match_block(x) => cx.span_fatal(
x.span, ~"this argument is a block, expected " + expected),
match_exact => cx.bug(~"what is a match_exact doing in a bindings?")
}
}
@ -76,9 +66,8 @@ fn elts_to_ell(cx: ext_ctxt, elts: ~[@expr]) ->
let mut res = none;
for elts.each |elt| {
alt elt.node {
expr_mac(m) {
alt m.node {
ast::mac_ellipsis {
expr_mac(m) => alt m.node {
ast::mac_ellipsis => {
if res != none {
cx.span_fatal(m.span, ~"only one ellipsis allowed");
}
@ -86,37 +75,37 @@ fn elts_to_ell(cx: ext_ctxt, elts: ~[@expr]) ->
some({pre: vec::slice(elts, 0u, idx - 1u),
rep: some(elts[idx - 1u]),
post: vec::slice(elts, idx + 1u, vec::len(elts))});
}
_ { }
}
_ => ()
}
_ { }
_ => ()
}
idx += 1u;
}
return alt res {
some(val) { val }
none { {pre: elts, rep: none, post: ~[]} }
}
some(val) => val,
none => {pre: elts, rep: none, post: ~[]}
}
}
fn option_flatten_map<T: copy, U: copy>(f: fn@(T) -> option<U>, v: ~[T]) ->
option<~[U]> {
let mut res = ~[];
for v.each |elem| {
alt f(elem) { none { return none; } some(fv) { vec::push(res, fv); } }
alt f(elem) {
none => return none,
some(fv) => vec::push(res, fv)
}
}
return some(res);
}
fn a_d_map(ad: arb_depth<matchable>, f: selector) -> match_result {
alt ad {
leaf(x) { return f(x); }
seq(ads, span) {
alt option_flatten_map(|x| a_d_map(x, f), *ads) {
none { return none; }
some(ts) { return some(seq(@ts, span)); }
}
leaf(x) => return f(x),
seq(ads, span) => alt option_flatten_map(|x| a_d_map(x, f), *ads) {
none => return none,
some(ts) => return some(seq(@ts, span))
}
}
}
@ -124,8 +113,8 @@ fn a_d_map(ad: arb_depth<matchable>, f: selector) -> match_result {
fn compose_sels(s1: selector, s2: selector) -> selector {
fn scomp(s1: selector, s2: selector, m: matchable) -> match_result {
return alt s1(m) {
none { none }
some(matches) { a_d_map(matches, s2) }
none => none,
some(matches) => a_d_map(matches, s2)
}
}
return { |x| scomp(s1, s2, x) };
@ -167,13 +156,13 @@ fn use_selectors_to_bind(b: binders, e: @expr) -> option<bindings> {
let res = box_str_hash::<arb_depth<matchable>>();
//need to do this first, to check vec lengths.
for b.literal_ast_matchers.each |sel| {
alt sel(match_expr(e)) { none { return none; } _ { } }
alt sel(match_expr(e)) { none => return none, _ => () }
}
let mut never_mind: bool = false;
for b.real_binders.each |key, val| {
alt val(match_expr(e)) {
none { never_mind = true; }
some(mtc) { res.insert(key, mtc); }
none => never_mind = true,
some(mtc) => { res.insert(key, mtc); }
}
};
//HACK: `ret` doesn't work in `for each`
@ -221,8 +210,8 @@ fn follow(m: arb_depth<matchable>, idx_path: @mut ~[uint]) ->
let mut res: arb_depth<matchable> = m;
for vec::each(*idx_path) |idx| {
res = alt res {
leaf(_) { return res;/* end of the line */ }
seq(new_ms, _) { new_ms[idx] }
leaf(_) => return res,/* end of the line */
seq(new_ms, _) => new_ms[idx]
}
}
return res;
@ -231,15 +220,15 @@ fn follow(m: arb_depth<matchable>, idx_path: @mut ~[uint]) ->
fn follow_for_trans(cx: ext_ctxt, mmaybe: option<arb_depth<matchable>>,
idx_path: @mut ~[uint]) -> option<matchable> {
alt mmaybe {
none { return none }
some(m) {
none => return none,
some(m) => {
return alt follow(m, idx_path) {
seq(_, sp) {
seq(_, sp) => {
cx.span_fatal(sp,
~"syntax matched under ... but not " +
~"used that way.")
}
leaf(m) { return some(m) }
leaf(m) => return some(m)
}
}
}
@ -270,24 +259,24 @@ fn transcribe_exprs(cx: ext_ctxt, b: bindings, idx_path: @mut ~[uint],
recur: fn@(&&@expr) -> @expr,
exprs: ~[@expr]) -> ~[@expr] {
alt elts_to_ell(cx, exprs) {
{pre: pre, rep: repeat_me_maybe, post: post} {
{pre: pre, rep: repeat_me_maybe, post: post} => {
let mut res = vec::map(pre, recur);
alt repeat_me_maybe {
none { }
some(repeat_me) {
none => (),
some(repeat_me) => {
let mut repeat: option<{rep_count: uint, name: ident}> = none;
/* we need to walk over all the free vars in lockstep, except for
the leaves, which are just duplicated */
do free_vars(b, repeat_me) |fv| {
let cur_pos = follow(b.get(fv), idx_path);
alt cur_pos {
leaf(_) { }
seq(ms, _) {
leaf(_) => (),
seq(ms, _) => {
alt repeat {
none {
none => {
repeat = some({rep_count: vec::len(*ms), name: fv});
}
some({rep_count: old_len, name: old_name}) {
some({rep_count: old_len, name: old_name}) => {
let len = vec::len(*ms);
if old_len != len {
let msg =
@ -302,12 +291,12 @@ fn transcribe_exprs(cx: ext_ctxt, b: bindings, idx_path: @mut ~[uint],
}
};
alt repeat {
none {
none => {
cx.span_fatal(repeat_me.span,
~"'...' surrounds an expression without any" +
~" repeating syntax variables");
}
some({rep_count: rc, _}) {
some({rep_count: rc, _}) => {
/* Whew, we now know how how many times to repeat */
let mut idx: uint = 0u;
while idx < rc {
@ -332,9 +321,9 @@ fn transcribe_exprs(cx: ext_ctxt, b: bindings, idx_path: @mut ~[uint],
fn transcribe_ident(cx: ext_ctxt, b: bindings, idx_path: @mut ~[uint],
&&i: ident, _fld: ast_fold) -> ident {
return alt follow_for_trans(cx, b.find(i), idx_path) {
some(match_ident(a_id)) { a_id.node }
some(m) { match_error(cx, m, ~"an identifier") }
none { i }
some(match_ident(a_id)) => a_id.node,
some(m) => match_error(cx, m, ~"an identifier"),
none => i
}
}
@ -344,13 +333,13 @@ fn transcribe_path(cx: ext_ctxt, b: bindings, idx_path: @mut ~[uint],
// Don't substitute into qualified names.
if vec::len(p.types) > 0u || vec::len(p.idents) != 1u { return p; }
alt follow_for_trans(cx, b.find(p.idents[0]), idx_path) {
some(match_ident(id)) {
some(match_ident(id)) => {
{span: id.span, global: false, idents: ~[id.node],
rp: none, types: ~[]}
}
some(match_path(a_pth)) { *a_pth }
some(m) { match_error(cx, m, ~"a path") }
none { p }
some(match_path(a_pth)) => *a_pth,
some(m) => match_error(cx, m, ~"a path"),
none => p
}
}
@ -361,26 +350,26 @@ fn transcribe_expr(cx: ext_ctxt, b: bindings, idx_path: @mut ~[uint],
-> (ast::expr_, span)
{
return alt e {
expr_path(p) {
expr_path(p) => {
// Don't substitute into qualified names.
if vec::len(p.types) > 0u || vec::len(p.idents) != 1u {
(e, s);
}
alt follow_for_trans(cx, b.find(p.idents[0]), idx_path) {
some(match_ident(id)) {
some(match_ident(id)) => {
(expr_path(@{span: id.span,
global: false,
idents: ~[id.node],
rp: none,
types: ~[]}), id.span)
}
some(match_path(a_pth)) { (expr_path(a_pth), s) }
some(match_expr(a_exp)) { (a_exp.node, a_exp.span) }
some(m) { match_error(cx, m, ~"an expression") }
none { orig(e, s, fld) }
some(match_path(a_pth)) => (expr_path(a_pth), s),
some(match_expr(a_exp)) => (a_exp.node, a_exp.span),
some(m) => match_error(cx, m, ~"an expression"),
none => orig(e, s, fld)
}
}
_ { orig(e, s, fld) }
_ => orig(e, s, fld)
}
}
@ -390,19 +379,19 @@ fn transcribe_type(cx: ext_ctxt, b: bindings, idx_path: @mut ~[uint],
-> (ast::ty_, span)
{
return alt t {
ast::ty_path(pth, _) {
ast::ty_path(pth, _) => {
alt path_to_ident(pth) {
some(id) {
some(id) => {
alt follow_for_trans(cx, b.find(id), idx_path) {
some(match_ty(ty)) { (ty.node, ty.span) }
some(m) { match_error(cx, m, ~"a type") }
none { orig(t, s, fld) }
some(match_ty(ty)) => (ty.node, ty.span),
some(m) => match_error(cx, m, ~"a type"),
none => orig(t, s, fld)
}
}
none { orig(t, s, fld) }
none => orig(t, s, fld)
}
}
_ { orig(t, s, fld) }
_ => orig(t, s, fld)
}
}
@ -416,22 +405,16 @@ fn transcribe_block(cx: ext_ctxt, b: bindings, idx_path: @mut ~[uint],
-> (blk_, span)
{
return alt block_to_ident(blk) {
some(id) {
some(id) => {
alt follow_for_trans(cx, b.find(id), idx_path) {
some(match_block(new_blk)) { (new_blk.node, new_blk.span) }
some(match_block(new_blk)) => (new_blk.node, new_blk.span),
// possibly allow promotion of ident/path/expr to blocks?
some(m) {
match_error(cx, m, ~"a block")
}
none { orig(blk, s, fld) }
some(m) => match_error(cx, m, ~"a block"),
none => orig(blk, s, fld)
}
}
none { orig(blk, s, fld) }
none => orig(blk, s, fld)
}
}
@ -442,12 +425,12 @@ fn p_t_s_rec(cx: ext_ctxt, m: matchable, s: selector, b: binders) {
//it might be possible to traverse only exprs, not matchables
alt m {
match_expr(e) {
match_expr(e) => {
alt e.node {
expr_path(p_pth) { p_t_s_r_path(cx, p_pth, s, b); }
expr_vec(p_elts, _) {
expr_path(p_pth) => p_t_s_r_path(cx, p_pth, s, b),
expr_vec(p_elts, _) => {
alt elts_to_ell(cx, p_elts) {
{pre: pre, rep: some(repeat_me), post: post} {
{pre: pre, rep: some(repeat_me), post: post} => {
p_t_s_r_length(cx, vec::len(pre) + vec::len(post), true, s,
b);
if vec::len(pre) > 0u {
@ -460,7 +443,7 @@ fn p_t_s_rec(cx: ext_ctxt, m: matchable, s: selector, b: binders) {
~"matching after `...` not yet supported");
}
}
{pre: pre, rep: none, post: post} {
{pre: pre, rep: none, post: post} => {
if post != ~[] {
cx.bug(~"elts_to_ell provided an invalid result");
}
@ -470,26 +453,24 @@ fn p_t_s_rec(cx: ext_ctxt, m: matchable, s: selector, b: binders) {
}
}
/* FIXME (#2251): handle embedded types and blocks, at least */
expr_mac(mac) {
expr_mac(mac) => {
p_t_s_r_mac(cx, mac, s, b);
}
_ {
_ => {
fn select(cx: ext_ctxt, m: matchable, pat: @expr) ->
match_result {
return alt m {
match_expr(e) {
match_expr(e) => {
if e == pat { some(leaf(match_exact)) } else { none }
}
_ { cx.bug(~"broken traversal in p_t_s_r") }
_ => cx.bug(~"broken traversal in p_t_s_r")
}
}
b.literal_ast_matchers.push(|x| select(cx, x, e));
}
}
}
_ {
cx.bug(~"undocumented invariant in p_t_s_rec");
}
_ => cx.bug(~"undocumented invariant in p_t_s_rec")
}
}
@ -497,29 +478,29 @@ fn p_t_s_rec(cx: ext_ctxt, m: matchable, s: selector, b: binders) {
/* make a match more precise */
fn specialize_match(m: matchable) -> matchable {
return alt m {
match_expr(e) {
match_expr(e) => {
alt e.node {
expr_path(pth) {
expr_path(pth) => {
alt path_to_ident(pth) {
some(id) { match_ident(respan(pth.span, id)) }
none { match_path(pth) }
some(id) => match_ident(respan(pth.span, id)),
none => match_path(pth)
}
}
_ { m }
_ => m
}
}
_ { m }
_ => m
}
}
/* pattern_to_selectors helper functions */
fn p_t_s_r_path(cx: ext_ctxt, p: @path, s: selector, b: binders) {
alt path_to_ident(p) {
some(p_id) {
some(p_id) => {
fn select(cx: ext_ctxt, m: matchable) -> match_result {
return alt m {
match_expr(e) { some(leaf(specialize_match(m))) }
_ { cx.bug(~"broken traversal in p_t_s_r") }
match_expr(e) => some(leaf(specialize_match(m))),
_ => cx.bug(~"broken traversal in p_t_s_r")
}
}
if b.real_binders.contains_key(p_id) {
@ -527,17 +508,18 @@ fn p_t_s_r_path(cx: ext_ctxt, p: @path, s: selector, b: binders) {
}
b.real_binders.insert(p_id, compose_sels(s, |x| select(cx, x)));
}
none { }
none => ()
}
}
fn block_to_ident(blk: blk_) -> option<ident> {
if vec::len(blk.stmts) != 0u { return none; }
return alt blk.expr {
some(expr) {
alt expr.node { expr_path(pth) { path_to_ident(pth) } _ { none } }
some(expr) => alt expr.node {
expr_path(pth) => path_to_ident(pth),
_ => none
}
none { none }
none => none
}
}
@ -545,21 +527,22 @@ fn p_t_s_r_mac(cx: ext_ctxt, mac: ast::mac, _s: selector, _b: binders) {
fn select_pt_1(cx: ext_ctxt, m: matchable,
fn_m: fn(ast::mac) -> match_result) -> match_result {
return alt m {
match_expr(e) {
alt e.node { expr_mac(mac) { fn_m(mac) } _ { none } }
match_expr(e) => alt e.node {
expr_mac(mac) => fn_m(mac),
_ => none
}
_ { cx.bug(~"broken traversal in p_t_s_r") }
_ => cx.bug(~"broken traversal in p_t_s_r")
}
}
fn no_des(cx: ext_ctxt, sp: span, syn: ~str) -> ! {
cx.span_fatal(sp, ~"destructuring " + syn + ~" is not yet supported");
}
alt mac.node {
ast::mac_ellipsis { cx.span_fatal(mac.span, ~"misused `...`"); }
ast::mac_invoc(_, _, _) { no_des(cx, mac.span, ~"macro calls"); }
ast::mac_invoc_tt(_, _) { no_des(cx, mac.span, ~"macro calls"); }
ast::mac_aq(_,_) { no_des(cx, mac.span, ~"antiquotes"); }
ast::mac_var(_) { no_des(cx, mac.span, ~"antiquote variables"); }
ast::mac_ellipsis => cx.span_fatal(mac.span, ~"misused `...`"),
ast::mac_invoc(_, _, _) => no_des(cx, mac.span, ~"macro calls"),
ast::mac_invoc_tt(_, _) => no_des(cx, mac.span, ~"macro calls"),
ast::mac_aq(_,_) => no_des(cx, mac.span, ~"antiquotes"),
ast::mac_var(_) => no_des(cx, mac.span, ~"antiquote variables")
}
}
@ -568,9 +551,9 @@ fn p_t_s_r_ellipses(cx: ext_ctxt, repeat_me: @expr, offset: uint, s: selector,
fn select(cx: ext_ctxt, repeat_me: @expr, offset: uint, m: matchable) ->
match_result {
return alt m {
match_expr(e) {
match_expr(e) => {
alt e.node {
expr_vec(arg_elts, _) {
expr_vec(arg_elts, _) => {
let mut elts = ~[];
let mut idx = offset;
while idx < vec::len(arg_elts) {
@ -582,10 +565,10 @@ fn p_t_s_r_ellipses(cx: ext_ctxt, repeat_me: @expr, offset: uint, s: selector,
// error we want to report is one in the macro def
some(seq(@elts, repeat_me.span))
}
_ { none }
_ => none
}
}
_ { cx.bug(~"broken traversal in p_t_s_r") }
_ => cx.bug(~"broken traversal in p_t_s_r")
}
}
p_t_s_rec(cx, match_expr(repeat_me),
@ -598,18 +581,18 @@ fn p_t_s_r_length(cx: ext_ctxt, len: uint, at_least: bool, s: selector,
fn len_select(_cx: ext_ctxt, m: matchable, at_least: bool, len: uint) ->
match_result {
return alt m {
match_expr(e) {
match_expr(e) => {
alt e.node {
expr_vec(arg_elts, _) {
expr_vec(arg_elts, _) => {
let actual_len = vec::len(arg_elts);
if at_least && actual_len >= len || actual_len == len {
some(leaf(match_exact))
} else { none }
}
_ { none }
_ => none
}
}
_ { none }
_ => none
}
}
b.literal_ast_matchers.push(
@ -622,15 +605,15 @@ fn p_t_s_r_actual_vector(cx: ext_ctxt, elts: ~[@expr], _repeat_after: bool,
while idx < vec::len(elts) {
fn select(cx: ext_ctxt, m: matchable, idx: uint) -> match_result {
return alt m {
match_expr(e) {
match_expr(e) => {
alt e.node {
expr_vec(arg_elts, _) {
expr_vec(arg_elts, _) => {
some(leaf(match_expr(arg_elts[idx])))
}
_ { none }
_ => none
}
}
_ { cx.bug(~"broken traversal in p_t_s_r") }
_ => cx.bug(~"broken traversal in p_t_s_r")
}
}
p_t_s_rec(cx, match_expr(elts[idx]),
@ -647,7 +630,7 @@ fn add_new_extension(cx: ext_ctxt, sp: span, arg: ast::mac_arg,
let mut clauses: ~[@clause] = ~[];
for args.each |arg| {
alt arg.node {
expr_vec(elts, mutbl) {
expr_vec(elts, mutbl) => {
if vec::len(elts) != 2u {
cx.span_fatal((*arg).span,
~"extension clause must consist of ~[" +
@ -656,31 +639,25 @@ fn add_new_extension(cx: ext_ctxt, sp: span, arg: ast::mac_arg,
alt elts[0u].node {
expr_mac(mac) {
expr_mac(mac) => {
alt mac.node {
mac_invoc(pth, invoc_arg, body) {
mac_invoc(pth, invoc_arg, body) => {
alt path_to_ident(pth) {
some(id) {
alt macro_name {
none { macro_name = some(id); }
some(other_id) {
if id != other_id {
cx.span_fatal(pth.span,
~"macro name must be " +
~"consistent");
}
}
some(id) => alt macro_name {
none => macro_name = some(id),
some(other_id) => if id != other_id {
cx.span_fatal(pth.span,
~"macro name must be " +
~"consistent");
}
}
none {
cx.span_fatal(pth.span,
~"macro name must not be a path");
}
none => cx.span_fatal(pth.span,
~"macro name must not be a path")
}
let arg = alt invoc_arg {
some(arg) { arg }
none { cx.span_fatal(mac.span,
~"macro must have arguments")}
some(arg) => arg,
none => cx.span_fatal(mac.span,
~"macro must have arguments")
};
vec::push(clauses,
@{params: pattern_to_selectors(cx, arg),
@ -689,20 +666,20 @@ fn add_new_extension(cx: ext_ctxt, sp: span, arg: ast::mac_arg,
// FIXME (#2251): check duplicates (or just simplify
// the macro arg situation)
}
_ {
_ => {
cx.span_bug(mac.span, ~"undocumented invariant in \
add_extension");
}
}
}
_ {
_ => {
cx.span_fatal(elts[0u].span,
~"extension clause must" +
~" start with a macro invocation.");
}
}
}
_ {
_ => {
cx.span_fatal((*arg).span,
~"extension must be ~[clause, " + ~" ...]");
}
@ -713,11 +690,9 @@ fn add_new_extension(cx: ext_ctxt, sp: span, arg: ast::mac_arg,
return {ident:
alt macro_name {
some(id) { id }
none {
cx.span_fatal(sp, ~"macro definition must have " +
~"at least one clause")
}
some(id) => id,
none => cx.span_fatal(sp, ~"macro definition must have " +
~"at least one clause")
},
ext: normal({expander: ext, span: some(option::get(arg).span)})};
@ -725,13 +700,13 @@ fn add_new_extension(cx: ext_ctxt, sp: span, arg: ast::mac_arg,
_body: ast::mac_body,
clauses: ~[@clause]) -> @expr {
let arg = alt arg {
some(arg) { arg }
none { cx.span_fatal(sp, ~"macro must have arguments")}
some(arg) => arg,
none => cx.span_fatal(sp, ~"macro must have arguments")
};
for clauses.each |c| {
alt use_selectors_to_bind(c.params, arg) {
some(bindings) { return transcribe(cx, bindings, c.body); }
none { again; }
some(bindings) => return transcribe(cx, bindings, c.body),
none => again
}
}
cx.span_fatal(sp, ~"no clauses match macro invocation");

View file

@ -71,8 +71,8 @@ fn expand_include_str(cx: ext_ctxt, sp: codemap::span, arg: ast::mac_arg,
let res = io::read_whole_file_str(res_rel_file(cx, sp, file));
alt res {
result::ok(_) { /* Continue. */ }
result::err(e) {
result::ok(_) => { /* Continue. */ }
result::err(e) => {
cx.parse_sess().span_diagnostic.handler().fatal(e);
}
}
@ -87,13 +87,13 @@ fn expand_include_bin(cx: ext_ctxt, sp: codemap::span, arg: ast::mac_arg,
let file = expr_to_str(cx, args[0], ~"#include_bin requires a string");
alt io::read_whole_file(res_rel_file(cx, sp, file)) {
result::ok(src) {
result::ok(src) => {
let u8_exprs = vec::map(src, |char: u8| {
mk_u8(cx, sp, char)
});
return mk_uniq_vec_e(cx, sp, u8_exprs);
}
result::err(e) {
result::err(e) => {
cx.parse_sess().span_diagnostic.handler().fatal(e)
}
}

View file

@ -32,8 +32,8 @@ enum matcher_pos_up { /* to break a circularity */
fn is_some(&&mpu: matcher_pos_up) -> bool {
alt mpu {
matcher_pos_up(none) { false }
_ { true }
matcher_pos_up(none) => false,
_ => true
}
}
@ -49,17 +49,17 @@ type matcher_pos = ~{
fn copy_up(&& mpu: matcher_pos_up) -> matcher_pos {
alt mpu {
matcher_pos_up(some(mp)) { copy mp }
_ { fail }
matcher_pos_up(some(mp)) => copy mp,
_ => fail
}
}
fn count_names(ms: &[matcher]) -> uint {
vec::foldl(0u, ms, |ct, m| {
ct + alt m.node {
match_tok(_) { 0u }
match_seq(more_ms, _, _, _, _) { count_names(more_ms) }
match_nonterminal(_,_,_) { 1u }
match_tok(_) => 0u,
match_seq(more_ms, _, _, _, _) => count_names(more_ms),
match_nonterminal(_,_,_) => 1u
}})
}
@ -69,11 +69,11 @@ fn initial_matcher_pos(ms: ~[matcher], sep: option<token>, lo: uint)
let mut match_idx_hi = 0u;
for ms.each() |elt| {
alt elt.node {
match_tok(_) {}
match_seq(_,_,_,_,hi) {
match_tok(_) => (),
match_seq(_,_,_,_,hi) => {
match_idx_hi = hi; // it is monotonic...
}
match_nonterminal(_,_,pos) {
match_nonterminal(_,_,pos) => {
match_idx_hi = pos+1u; // ...so latest is highest
}
}
@ -114,11 +114,11 @@ fn nameize(p_s: parse_sess, ms: ~[matcher], res: ~[@named_match])
fn n_rec(p_s: parse_sess, m: matcher, res: ~[@named_match],
ret_val: hashmap<ident, @named_match>) {
alt m {
{node: match_tok(_), span: _} { }
{node: match_seq(more_ms, _, _, _, _), span: _} {
{node: match_tok(_), span: _} => (),
{node: match_seq(more_ms, _, _, _, _), span: _} => {
for more_ms.each() |next_m| { n_rec(p_s, next_m, res, ret_val) };
}
{node: match_nonterminal(bind_name, _, idx), span: sp} {
{node: match_nonterminal(bind_name, _, idx), span: sp} => {
if ret_val.contains_key(bind_name) {
p_s.span_diagnostic.span_fatal(sp, ~"Duplicated bind name: "
+ *bind_name)
@ -140,10 +140,8 @@ enum parse_result {
fn parse_or_else(sess: parse_sess, cfg: ast::crate_cfg, rdr: reader,
ms: ~[matcher]) -> hashmap<ident, @named_match> {
alt parse(sess, cfg, rdr, ms) {
success(m) { m }
failure(sp, str) {
sess.span_diagnostic.span_fatal(sp, str);
}
success(m) => m,
failure(sp, str) => sess.span_diagnostic.span_fatal(sp, str)
}
}
@ -205,14 +203,14 @@ fn parse(sess: parse_sess, cfg: ast::crate_cfg, rdr: reader, ms: ~[matcher])
// the *_t vars are workarounds for the lack of unary move
alt copy ei.sep {
some(t) if idx == len { // we need a separator
some(t) if idx == len => { // we need a separator
if tok == t { //pass the separator
let ei_t <- ei;
ei_t.idx += 1u;
vec::push(next_eis, ei_t);
}
}
_ { // we don't need a separator
_ => { // we don't need a separator
let ei_t <- ei;
ei_t.idx = 0u;
vec::push(cur_eis, ei_t);
@ -225,7 +223,7 @@ fn parse(sess: parse_sess, cfg: ast::crate_cfg, rdr: reader, ms: ~[matcher])
alt copy ei.elts[idx].node {
/* need to descend into sequence */
match_seq(matchers, sep, zero_ok,
match_idx_lo, match_idx_hi){
match_idx_lo, match_idx_hi) => {
if zero_ok {
let new_ei = copy ei;
new_ei.idx += 1u;
@ -248,8 +246,8 @@ fn parse(sess: parse_sess, cfg: ast::crate_cfg, rdr: reader, ms: ~[matcher])
sp_lo: sp.lo
});
}
match_nonterminal(_,_,_) { vec::push(bb_eis, ei) }
match_tok(t) {
match_nonterminal(_,_,_) => { vec::push(bb_eis, ei) }
match_tok(t) => {
let ei_t <- ei;
if t == tok { ei_t.idx += 1u; vec::push(next_eis, ei_t)}
}
@ -273,10 +271,11 @@ fn parse(sess: parse_sess, cfg: ast::crate_cfg, rdr: reader, ms: ~[matcher])
|| bb_eis.len() > 1u {
let nts = str::connect(vec::map(bb_eis, |ei| {
alt ei.elts[ei.idx].node {
match_nonterminal(bind,name,_) {
match_nonterminal(bind,name,_) => {
fmt!{"%s ('%s')", *name, *bind}
}
_ { fail; } } }), ~" or ");
_ => fail
} }), ~" or ");
return failure(sp, fmt!{
"Local ambiguity: multiple parsing options: \
built-in NTs %s or %u other options.",
@ -295,12 +294,12 @@ fn parse(sess: parse_sess, cfg: ast::crate_cfg, rdr: reader, ms: ~[matcher])
let ei = vec::pop(bb_eis);
alt ei.elts[ei.idx].node {
match_nonterminal(_, name, idx) {
match_nonterminal(_, name, idx) => {
ei.matches[idx].push(@matched_nonterminal(
parse_nt(rust_parser, *name)));
ei.idx += 1u;
}
_ { fail; }
_ => fail
}
vec::push(cur_eis,ei);
@ -320,30 +319,30 @@ fn parse(sess: parse_sess, cfg: ast::crate_cfg, rdr: reader, ms: ~[matcher])
fn parse_nt(p: parser, name: ~str) -> nonterminal {
alt name {
~"item" { alt p.parse_item(~[]) {
some(i) { token::nt_item(i) }
none { p.fatal(~"expected an item keyword") }
}}
~"block" { token::nt_block(p.parse_block()) }
~"stmt" { token::nt_stmt(p.parse_stmt(~[])) }
~"pat" { token::nt_pat(p.parse_pat(true)) }
~"expr" { token::nt_expr(p.parse_expr()) }
~"ty" { token::nt_ty(p.parse_ty(false /* no need to disambiguate*/)) }
~"item" => alt p.parse_item(~[]) {
some(i) => token::nt_item(i),
none => p.fatal(~"expected an item keyword")
}
~"block" => token::nt_block(p.parse_block()),
~"stmt" => token::nt_stmt(p.parse_stmt(~[])),
~"pat" => token::nt_pat(p.parse_pat(true)),
~"expr" => token::nt_expr(p.parse_expr()),
~"ty" => token::nt_ty(p.parse_ty(false /* no need to disambiguate*/)),
// this could be handled like a token, since it is one
~"ident" { alt copy p.token {
token::IDENT(sn,b) { p.bump(); token::nt_ident(sn,b) }
_ { p.fatal(~"expected ident, found "
+ token::to_str(*p.reader.interner(), copy p.token)) }
} }
~"path" { token::nt_path(p.parse_path_with_tps(false)) }
~"tt" {
~"ident" => alt copy p.token {
token::IDENT(sn,b) => { p.bump(); token::nt_ident(sn,b) }
_ => p.fatal(~"expected ident, found "
+ token::to_str(*p.reader.interner(), copy p.token))
}
~"path" => token::nt_path(p.parse_path_with_tps(false)),
~"tt" => {
p.quote_depth += 1u; //but in theory, non-quoted tts might be useful
let res = token::nt_tt(@p.parse_token_tree());
p.quote_depth -= 1u;
res
}
~"matchers" { token::nt_matchers(p.parse_matchers()) }
_ { p.fatal(~"Unsupported builtin nonterminal parser: " + name)}
~"matchers" => token::nt_matchers(p.parse_matchers()),
_ => p.fatal(~"Unsupported builtin nonterminal parser: " + name)
}
}

View file

@ -38,12 +38,12 @@ fn add_new_extension(cx: ext_ctxt, sp: span, name: ident,
// Extract the arguments:
let lhses:~[@named_match] = alt argument_map.get(@~"lhs") {
@matched_seq(s, sp) { s }
_ { cx.span_bug(sp, ~"wrong-structured lhs") }
@matched_seq(s, sp) => s,
_ => cx.span_bug(sp, ~"wrong-structured lhs")
};
let rhses:~[@named_match] = alt argument_map.get(@~"rhs") {
@matched_seq(s, sp) { s }
_ { cx.span_bug(sp, ~"wrong-structured rhs") }
@matched_seq(s, sp) => s,
_ => cx.span_bug(sp, ~"wrong-structured rhs")
};
// Given `lhses` and `rhses`, this is the new macro we create
@ -59,14 +59,14 @@ fn add_new_extension(cx: ext_ctxt, sp: span, name: ident,
for lhses.eachi() |i, lhs| { // try each arm's matchers
alt lhs {
@matched_nonterminal(nt_matchers(mtcs)) {
@matched_nonterminal(nt_matchers(mtcs)) => {
// `none` is because we're not interpolating
let arg_rdr = new_tt_reader(s_d, itr, none, arg) as reader;
alt parse(cx.parse_sess(), cx.cfg(), arg_rdr, mtcs) {
success(named_matches) {
success(named_matches) => {
let rhs = alt rhses[i] { // okay, what's your transcriber?
@matched_nonterminal(nt_tt(@tt)) { tt }
_ { cx.span_bug(sp, ~"bad thing in rhs") }
@matched_nonterminal(nt_tt(@tt)) => tt,
_ => cx.span_bug(sp, ~"bad thing in rhs")
};
// rhs has holes ( `$id` and `$(...)` that need filled)
let trncbr = new_tt_reader(s_d, itr, some(named_matches),
@ -75,14 +75,13 @@ fn add_new_extension(cx: ext_ctxt, sp: span, name: ident,
trncbr as reader, SOURCE_FILE);
return mr_expr(p.parse_expr());
}
failure(sp, msg) {
if sp.lo >= best_fail_spot.lo {
best_fail_spot = sp; best_fail_msg = msg;
}
failure(sp, msg) => if sp.lo >= best_fail_spot.lo {
best_fail_spot = sp;
best_fail_msg = msg;
}
}
}
_ { cx.bug(~"non-matcher found in parsed lhses"); }
_ => cx.bug(~"non-matcher found in parsed lhses")
}
}
cx.span_fatal(best_fail_spot, best_fail_msg);

View file

@ -47,8 +47,8 @@ fn new_tt_reader(sp_diag: span_handler, itr: @interner<@~str>,
mut cur: @{readme: src, mut idx: 0u, dotdotdoted: false,
sep: none, up: tt_frame_up(option::none)},
interpolations: alt interp { /* just a convienience */
none { std::map::box_str_hash::<@named_match>() }
some(x) { x }
none => std::map::box_str_hash::<@named_match>(),
some(x) => x
},
mut repeat_idx: ~[mut], mut repeat_len: ~[],
/* dummy values, never read: */
@ -62,10 +62,10 @@ fn new_tt_reader(sp_diag: span_handler, itr: @interner<@~str>,
pure fn dup_tt_frame(&&f: tt_frame) -> tt_frame {
@{readme: f.readme, mut idx: f.idx, dotdotdoted: f.dotdotdoted,
sep: f.sep, up: alt f.up {
tt_frame_up(some(up_frame)) {
tt_frame_up(some(up_frame)) => {
tt_frame_up(some(dup_tt_frame(up_frame)))
}
tt_frame_up(none) { tt_frame_up(none) }
tt_frame_up(none) => tt_frame_up(none)
}
}
}
@ -83,11 +83,11 @@ pure fn lookup_cur_matched_by_matched(r: tt_reader,
start: @named_match) -> @named_match {
pure fn red(&&ad: @named_match, &&idx: uint) -> @named_match {
alt *ad {
matched_nonterminal(_) {
matched_nonterminal(_) => {
// end of the line; duplicate henceforth
ad
}
matched_seq(ads, _) { ads[idx] }
matched_seq(ads, _) => ads[idx]
}
}
vec::foldl(start, r.repeat_idx, red)
@ -103,33 +103,29 @@ enum lis {
fn lockstep_iter_size(&&t: token_tree, &&r: tt_reader) -> lis {
fn lis_merge(lhs: lis, rhs: lis) -> lis {
alt lhs {
lis_unconstrained { rhs }
lis_contradiction(_) { lhs }
lis_constraint(l_len, l_id) {
alt rhs {
lis_unconstrained { lhs }
lis_contradiction(_) { rhs }
lis_constraint(r_len, _) if l_len == r_len { lhs }
lis_constraint(r_len, r_id) {
lis_unconstrained => rhs,
lis_contradiction(_) => lhs,
lis_constraint(l_len, l_id) => alt rhs {
lis_unconstrained => lhs,
lis_contradiction(_) => rhs,
lis_constraint(r_len, _) if l_len == r_len => lhs,
lis_constraint(r_len, r_id) => {
lis_contradiction(fmt!{"Inconsistent lockstep iteration: \
'%s' has %u items, but '%s' has %u",
*l_id, l_len, *r_id, r_len})
}
'%s' has %u items, but '%s' has %u",
*l_id, l_len, *r_id, r_len})
}
}
}
}
alt t {
tt_delim(tts) | tt_seq(_, tts, _, _) {
tt_delim(tts) | tt_seq(_, tts, _, _) => {
vec::foldl(lis_unconstrained, tts, {|lis, tt|
lis_merge(lis, lockstep_iter_size(tt, r)) })
}
tt_tok(*) { lis_unconstrained }
tt_nonterminal(_, name) {
alt *lookup_cur_matched(r, name) {
matched_nonterminal(_) { lis_unconstrained }
matched_seq(ads, _) { lis_constraint(ads.len(), name) }
}
tt_tok(*) => lis_unconstrained,
tt_nonterminal(_, name) => alt *lookup_cur_matched(r, name) {
matched_nonterminal(_) => lis_unconstrained,
matched_seq(ads, _) => lis_constraint(ads.len(), name)
}
}
}
@ -143,11 +139,11 @@ fn tt_next_token(&&r: tt_reader) -> {tok: token, sp: span} {
|| r.repeat_idx.last() == r.repeat_len.last() - 1 {
alt r.cur.up {
tt_frame_up(none) {
tt_frame_up(none) => {
r.cur_tok = EOF;
return ret_val;
}
tt_frame_up(some(tt_f)) {
tt_frame_up(some(tt_f)) => {
if r.cur.dotdotdoted {
vec::pop(r.repeat_idx); vec::pop(r.repeat_len);
}
@ -161,40 +157,40 @@ fn tt_next_token(&&r: tt_reader) -> {tok: token, sp: span} {
r.cur.idx = 0u;
r.repeat_idx[r.repeat_idx.len() - 1u] += 1u;
alt r.cur.sep {
some(tk) {
some(tk) => {
r.cur_tok = tk; /* repeat same span, I guess */
return ret_val;
}
none {}
none => ()
}
}
}
loop { /* because it's easiest, this handles `tt_delim` not starting
with a `tt_tok`, even though it won't happen */
alt r.cur.readme[r.cur.idx] {
tt_delim(tts) {
tt_delim(tts) => {
r.cur = @{readme: tts, mut idx: 0u, dotdotdoted: false,
sep: none, up: tt_frame_up(option::some(r.cur)) };
// if this could be 0-length, we'd need to potentially recur here
}
tt_tok(sp, tok) {
tt_tok(sp, tok) => {
r.cur_span = sp; r.cur_tok = tok;
r.cur.idx += 1u;
return ret_val;
}
tt_seq(sp, tts, sep, zerok) {
tt_seq(sp, tts, sep, zerok) => {
alt lockstep_iter_size(tt_seq(sp, tts, sep, zerok), r) {
lis_unconstrained {
lis_unconstrained => {
r.sp_diag.span_fatal(
sp, /* blame macro writer */
~"attempted to repeat an expression containing no syntax \
variables matched as repeating at this depth");
}
lis_contradiction(msg) { /* FIXME #2887 blame macro invoker
lis_contradiction(msg) => { /* FIXME #2887 blame macro invoker
instead*/
r.sp_diag.span_fatal(sp, msg);
}
lis_constraint(len, _) {
lis_constraint(len, _) => {
if len == 0 {
if !zerok {
r.sp_diag.span_fatal(sp, /* FIXME #2887 blame invoker
@ -215,22 +211,22 @@ fn tt_next_token(&&r: tt_reader) -> {tok: token, sp: span} {
}
}
// FIXME #2887: think about span stuff here
tt_nonterminal(sp, ident) {
tt_nonterminal(sp, ident) => {
alt *lookup_cur_matched(r, ident) {
/* sidestep the interpolation tricks for ident because
(a) idents can be in lots of places, so it'd be a pain
(b) we actually can, since it's a token. */
matched_nonterminal(nt_ident(sn,b)) {
matched_nonterminal(nt_ident(sn,b)) => {
r.cur_span = sp; r.cur_tok = IDENT(sn,b);
r.cur.idx += 1u;
return ret_val;
}
matched_nonterminal(other_whole_nt) {
matched_nonterminal(other_whole_nt) => {
r.cur_span = sp; r.cur_tok = INTERPOLATED(other_whole_nt);
r.cur.idx += 1u;
return ret_val;
}
matched_seq(*) {
matched_seq(*) => {
r.sp_diag.span_fatal(
copy r.cur_span, /* blame the macro writer */
fmt!{"variable '%s' is still repeating at this depth",

View file

@ -81,13 +81,13 @@ type ast_fold_precursor = @{
fn fold_meta_item_(&&mi: @meta_item, fld: ast_fold) -> @meta_item {
return @{node:
alt mi.node {
meta_word(id) { meta_word(fld.fold_ident(id)) }
meta_list(id, mis) {
meta_word(id) => meta_word(fld.fold_ident(id)),
meta_list(id, mis) => {
let fold_meta_item = |x|fold_meta_item_(x, fld);
meta_list(/* FIXME: (#2543) */ copy id,
vec::map(mis, fold_meta_item))
}
meta_name_value(id, s) {
meta_name_value(id, s) => {
meta_name_value(fld.fold_ident(id),
/* FIXME (#2543) */ copy s)
}
@ -113,14 +113,14 @@ fn fold_arg_(a: arg, fld: ast_fold) -> arg {
fn fold_mac_(m: mac, fld: ast_fold) -> mac {
return {node:
alt m.node {
mac_invoc(pth, arg, body) {
mac_invoc(pth, arg, body) => {
mac_invoc(fld.fold_path(pth),
option::map(arg, |x| fld.fold_expr(x)), body)
}
mac_invoc_tt(pth, tt) { m.node }
mac_ellipsis { mac_ellipsis }
mac_aq(_,_) { /* FIXME (#2543) */ copy m.node }
mac_var(_) { /* FIXME (#2543) */ copy m.node }
mac_invoc_tt(pth, tt) => m.node,
mac_ellipsis => mac_ellipsis,
mac_aq(_,_) => /* FIXME (#2543) */ copy m.node,
mac_var(_) => /* FIXME (#2543) */ copy m.node,
},
span: fld.new_span(m.span)};
}
@ -134,8 +134,8 @@ fn fold_fn_decl(decl: ast::fn_decl, fld: ast_fold) -> ast::fn_decl {
fn fold_ty_param_bound(tpb: ty_param_bound, fld: ast_fold) -> ty_param_bound {
alt tpb {
bound_copy | bound_send | bound_const | bound_owned { tpb }
bound_trait(ty) { bound_trait(fld.fold_ty(ty)) }
bound_copy | bound_send | bound_const | bound_owned => tpb,
bound_trait(ty) => bound_trait(fld.fold_ty(ty))
}
}
@ -164,16 +164,16 @@ fn noop_fold_crate(c: crate_, fld: ast_fold) -> crate_ {
fn noop_fold_crate_directive(cd: crate_directive_, fld: ast_fold) ->
crate_directive_ {
return alt cd {
cdir_src_mod(id, attrs) {
cdir_src_mod(id, attrs) => {
cdir_src_mod(fld.fold_ident(id), /* FIXME (#2543) */ copy attrs)
}
cdir_dir_mod(id, cds, attrs) {
cdir_dir_mod(id, cds, attrs) => {
cdir_dir_mod(fld.fold_ident(id),
vec::map(cds, |x| fld.fold_crate_directive(x)),
/* FIXME (#2543) */ copy attrs)
}
cdir_view_item(vi) { cdir_view_item(fld.fold_view_item(vi)) }
cdir_syntax(_) { copy cd }
cdir_view_item(vi) => cdir_view_item(fld.fold_view_item(vi)),
cdir_syntax(_) => copy cd
}
}
@ -191,7 +191,7 @@ fn noop_fold_foreign_item(&&ni: @foreign_item, fld: ast_fold)
attrs: vec::map(ni.attrs, fold_attribute),
node:
alt ni.node {
foreign_item_fn(fdec, typms) {
foreign_item_fn(fdec, typms) => {
foreign_item_fn({inputs: vec::map(fdec.inputs, fold_arg),
output: fld.fold_ty(fdec.output),
purity: fdec.purity,
@ -217,32 +217,32 @@ fn noop_fold_item(&&i: @item, fld: ast_fold) -> option<@item> {
fn noop_fold_class_item(&&ci: @class_member, fld: ast_fold)
-> @class_member {
@{node: alt ci.node {
instance_var(ident, t, cm, id, p) {
instance_var(ident, t, cm, id, p) => {
instance_var(/* FIXME (#2543) */ copy ident,
fld.fold_ty(t), cm, id, p)
}
class_method(m) { class_method(fld.fold_method(m)) }
class_method(m) => class_method(fld.fold_method(m))
},
span: ci.span}
}
fn noop_fold_item_underscore(i: item_, fld: ast_fold) -> item_ {
return alt i {
item_const(t, e) { item_const(fld.fold_ty(t), fld.fold_expr(e)) }
item_fn(decl, typms, body) {
item_const(t, e) => item_const(fld.fold_ty(t), fld.fold_expr(e)),
item_fn(decl, typms, body) => {
item_fn(fold_fn_decl(decl, fld),
fold_ty_params(typms, fld),
fld.fold_block(body))
}
item_mod(m) { item_mod(fld.fold_mod(m)) }
item_foreign_mod(nm) { item_foreign_mod(fld.fold_foreign_mod(nm)) }
item_ty(t, typms) { item_ty(fld.fold_ty(t),
fold_ty_params(typms, fld)) }
item_enum(variants, typms) {
item_mod(m) => item_mod(fld.fold_mod(m)),
item_foreign_mod(nm) => item_foreign_mod(fld.fold_foreign_mod(nm)),
item_ty(t, typms) => item_ty(fld.fold_ty(t),
fold_ty_params(typms, fld)),
item_enum(variants, typms) => {
item_enum(vec::map(variants, |x| fld.fold_variant(x)),
fold_ty_params(typms, fld))
}
item_class(typms, traits, items, m_ctor, m_dtor) {
item_class(typms, traits, items, m_ctor, m_dtor) => {
let resulting_optional_constructor;
alt m_ctor {
none => {
@ -273,18 +273,18 @@ fn noop_fold_item_underscore(i: item_, fld: ast_fold) -> item_ {
resulting_optional_constructor,
dtor)
}
item_impl(tps, ifce, ty, methods) {
item_impl(tps, ifce, ty, methods) => {
item_impl(fold_ty_params(tps, fld),
ifce.map(|p| fold_trait_ref(p, fld)),
fld.fold_ty(ty),
vec::map(methods, |x| fld.fold_method(x)))
}
item_trait(tps, traits, methods) {
item_trait(tps, traits, methods) => {
item_trait(fold_ty_params(tps, fld),
vec::map(traits, |p| fold_trait_ref(p, fld)),
/* FIXME (#2543) */ copy methods)
}
item_mac(m) {
item_mac(m) => {
// FIXME #2888: we might actually want to do something here.
item_mac(m)
}
@ -320,9 +320,9 @@ fn noop_fold_block(b: blk_, fld: ast_fold) -> blk_ {
fn noop_fold_stmt(s: stmt_, fld: ast_fold) -> stmt_ {
return alt s {
stmt_decl(d, nid) { stmt_decl(fld.fold_decl(d), fld.new_id(nid)) }
stmt_expr(e, nid) { stmt_expr(fld.fold_expr(e), fld.new_id(nid)) }
stmt_semi(e, nid) { stmt_semi(fld.fold_expr(e), fld.new_id(nid)) }
stmt_decl(d, nid) => stmt_decl(fld.fold_decl(d), fld.new_id(nid)),
stmt_expr(e, nid) => stmt_expr(fld.fold_expr(e), fld.new_id(nid)),
stmt_semi(e, nid) => stmt_semi(fld.fold_expr(e), fld.new_id(nid))
};
}
@ -334,18 +334,18 @@ fn noop_fold_arm(a: arm, fld: ast_fold) -> arm {
fn noop_fold_pat(p: pat_, fld: ast_fold) -> pat_ {
return alt p {
pat_wild { pat_wild }
pat_ident(binding_mode, pth, sub) {
pat_wild => pat_wild,
pat_ident(binding_mode, pth, sub) => {
pat_ident(binding_mode,
fld.fold_path(pth),
option::map(sub, |x| fld.fold_pat(x)))
}
pat_lit(e) { pat_lit(fld.fold_expr(e)) }
pat_enum(pth, pats) {
pat_lit(e) => pat_lit(fld.fold_expr(e)),
pat_enum(pth, pats) => {
pat_enum(fld.fold_path(pth), option::map(pats,
|pats| vec::map(pats, |x| fld.fold_pat(x))))
}
pat_rec(fields, etc) {
pat_rec(fields, etc) => {
let mut fs = ~[];
for fields.each |f| {
vec::push(fs,
@ -354,10 +354,10 @@ fn noop_fold_pat(p: pat_, fld: ast_fold) -> pat_ {
}
pat_rec(fs, etc)
}
pat_tup(elts) { pat_tup(vec::map(elts, |x| fld.fold_pat(x))) }
pat_box(inner) { pat_box(fld.fold_pat(inner)) }
pat_uniq(inner) { pat_uniq(fld.fold_pat(inner)) }
pat_range(e1, e2) {
pat_tup(elts) => pat_tup(vec::map(elts, |x| fld.fold_pat(x))),
pat_box(inner) => pat_box(fld.fold_pat(inner)),
pat_uniq(inner) => pat_uniq(fld.fold_pat(inner)),
pat_range(e1, e2) => {
pat_range(fld.fold_expr(e1), fld.fold_expr(e2))
}
};
@ -365,12 +365,10 @@ fn noop_fold_pat(p: pat_, fld: ast_fold) -> pat_ {
fn noop_fold_decl(d: decl_, fld: ast_fold) -> decl_ {
alt d {
decl_local(ls) { decl_local(vec::map(ls, |x| fld.fold_local(x))) }
decl_item(it) {
alt fld.fold_item(it) {
some(it_folded) { decl_item(it_folded) }
none { decl_local(~[]) }
}
decl_local(ls) => decl_local(vec::map(ls, |x| fld.fold_local(x))),
decl_item(it) => alt fld.fold_item(it) {
some(it_folded) => decl_item(it_folded),
none => decl_local(~[])
}
}
}
@ -396,91 +394,91 @@ fn noop_fold_expr(e: expr_, fld: ast_fold) -> expr_ {
let fold_mac = |x| fold_mac_(x, fld);
return alt e {
expr_vstore(e, v) {
expr_vstore(e, v) => {
expr_vstore(fld.fold_expr(e), v)
}
expr_vec(exprs, mutt) {
expr_vec(exprs, mutt) => {
expr_vec(fld.map_exprs(|x| fld.fold_expr(x), exprs), mutt)
}
expr_repeat(expr, count, mutt) =>
expr_repeat(fld.fold_expr(expr), fld.fold_expr(count), mutt),
expr_rec(fields, maybe_expr) {
expr_rec(fields, maybe_expr) => {
expr_rec(vec::map(fields, fold_field),
option::map(maybe_expr, |x| fld.fold_expr(x)))
}
expr_tup(elts) { expr_tup(vec::map(elts, |x| fld.fold_expr(x))) }
expr_call(f, args, blk) {
expr_tup(elts) => expr_tup(vec::map(elts, |x| fld.fold_expr(x))),
expr_call(f, args, blk) => {
expr_call(fld.fold_expr(f),
fld.map_exprs(|x| fld.fold_expr(x), args),
blk)
}
expr_binary(binop, lhs, rhs) {
expr_binary(binop, lhs, rhs) => {
expr_binary(binop, fld.fold_expr(lhs), fld.fold_expr(rhs))
}
expr_unary(binop, ohs) { expr_unary(binop, fld.fold_expr(ohs)) }
expr_loop_body(f) { expr_loop_body(fld.fold_expr(f)) }
expr_do_body(f) { expr_do_body(fld.fold_expr(f)) }
expr_lit(_) { copy e }
expr_cast(expr, ty) { expr_cast(fld.fold_expr(expr), ty) }
expr_addr_of(m, ohs) { expr_addr_of(m, fld.fold_expr(ohs)) }
expr_if(cond, tr, fl) {
expr_unary(binop, ohs) => expr_unary(binop, fld.fold_expr(ohs)),
expr_loop_body(f) => expr_loop_body(fld.fold_expr(f)),
expr_do_body(f) => expr_do_body(fld.fold_expr(f)),
expr_lit(_) => copy e,
expr_cast(expr, ty) => expr_cast(fld.fold_expr(expr), ty),
expr_addr_of(m, ohs) => expr_addr_of(m, fld.fold_expr(ohs)),
expr_if(cond, tr, fl) => {
expr_if(fld.fold_expr(cond), fld.fold_block(tr),
option::map(fl, |x| fld.fold_expr(x)))
}
expr_while(cond, body) {
expr_while(cond, body) => {
expr_while(fld.fold_expr(cond), fld.fold_block(body))
}
expr_loop(body) {
expr_loop(body) => {
expr_loop(fld.fold_block(body))
}
expr_alt(expr, arms, mode) {
expr_alt(expr, arms, mode) => {
expr_alt(fld.fold_expr(expr),
vec::map(arms, |x| fld.fold_arm(x)), mode)
}
expr_fn(proto, decl, body, captures) {
expr_fn(proto, decl, body, captures) => {
expr_fn(proto, fold_fn_decl(decl, fld),
fld.fold_block(body),
@((*captures).map(|cap_item| {
@({id: fld.new_id((*cap_item).id)
with *cap_item})})))
}
expr_fn_block(decl, body, captures) {
expr_fn_block(decl, body, captures) => {
expr_fn_block(fold_fn_decl(decl, fld), fld.fold_block(body),
@((*captures).map(|cap_item| {
@({id: fld.new_id((*cap_item).id)
with *cap_item})})))
}
expr_block(blk) { expr_block(fld.fold_block(blk)) }
expr_move(el, er) {
expr_block(blk) => expr_block(fld.fold_block(blk)),
expr_move(el, er) => {
expr_move(fld.fold_expr(el), fld.fold_expr(er))
}
expr_copy(e) { expr_copy(fld.fold_expr(e)) }
expr_unary_move(e) { expr_unary_move(fld.fold_expr(e)) }
expr_assign(el, er) {
expr_copy(e) => expr_copy(fld.fold_expr(e)),
expr_unary_move(e) => expr_unary_move(fld.fold_expr(e)),
expr_assign(el, er) => {
expr_assign(fld.fold_expr(el), fld.fold_expr(er))
}
expr_swap(el, er) {
expr_swap(el, er) => {
expr_swap(fld.fold_expr(el), fld.fold_expr(er))
}
expr_assign_op(op, el, er) {
expr_assign_op(op, el, er) => {
expr_assign_op(op, fld.fold_expr(el), fld.fold_expr(er))
}
expr_field(el, id, tys) {
expr_field(el, id, tys) => {
expr_field(fld.fold_expr(el), fld.fold_ident(id),
vec::map(tys, |x| fld.fold_ty(x)))
}
expr_index(el, er) {
expr_index(el, er) => {
expr_index(fld.fold_expr(el), fld.fold_expr(er))
}
expr_path(pth) { expr_path(fld.fold_path(pth)) }
expr_fail(e) { expr_fail(option::map(e, |x| fld.fold_expr(x))) }
expr_break | expr_again { copy e }
expr_ret(e) { expr_ret(option::map(e, |x| fld.fold_expr(x))) }
expr_log(i, lv, e) { expr_log(i, fld.fold_expr(lv),
fld.fold_expr(e)) }
expr_assert(e) { expr_assert(fld.fold_expr(e)) }
expr_mac(mac) { expr_mac(fold_mac(mac)) }
expr_struct(path, fields) {
expr_path(pth) => expr_path(fld.fold_path(pth)),
expr_fail(e) => expr_fail(option::map(e, |x| fld.fold_expr(x))),
expr_break | expr_again => copy e,
expr_ret(e) => expr_ret(option::map(e, |x| fld.fold_expr(x))),
expr_log(i, lv, e) => expr_log(i, fld.fold_expr(lv),
fld.fold_expr(e)),
expr_assert(e) => expr_assert(fld.fold_expr(e)),
expr_mac(mac) => expr_mac(fold_mac(mac)),
expr_struct(path, fields) => {
expr_struct(fld.fold_path(path), vec::map(fields, fold_field))
}
}
@ -497,18 +495,18 @@ fn noop_fold_ty(t: ty_, fld: ast_fold) -> ty_ {
span: fld.new_span(f.span)}
}
alt t {
ty_nil | ty_bot | ty_infer {copy t}
ty_box(mt) {ty_box(fold_mt(mt, fld))}
ty_uniq(mt) {ty_uniq(fold_mt(mt, fld))}
ty_vec(mt) {ty_vec(fold_mt(mt, fld))}
ty_ptr(mt) {ty_ptr(fold_mt(mt, fld))}
ty_rptr(region, mt) {ty_rptr(region, fold_mt(mt, fld))}
ty_rec(fields) {ty_rec(vec::map(fields, |f| fold_field(f, fld)))}
ty_fn(proto, decl) {ty_fn(proto, fold_fn_decl(decl, fld))}
ty_tup(tys) {ty_tup(vec::map(tys, |ty| fld.fold_ty(ty)))}
ty_path(path, id) {ty_path(fld.fold_path(path), fld.new_id(id))}
ty_fixed_length(t, vs) {ty_fixed_length(fld.fold_ty(t), vs)}
ty_mac(mac) {ty_mac(fold_mac(mac))}
ty_nil | ty_bot | ty_infer => copy t,
ty_box(mt) => ty_box(fold_mt(mt, fld)),
ty_uniq(mt) => ty_uniq(fold_mt(mt, fld)),
ty_vec(mt) => ty_vec(fold_mt(mt, fld)),
ty_ptr(mt) => ty_ptr(fold_mt(mt, fld)),
ty_rptr(region, mt) => ty_rptr(region, fold_mt(mt, fld)),
ty_rec(fields) => ty_rec(vec::map(fields, |f| fold_field(f, fld))),
ty_fn(proto, decl) => ty_fn(proto, fold_fn_decl(decl, fld)),
ty_tup(tys) => ty_tup(vec::map(tys, |ty| fld.fold_ty(ty))),
ty_path(path, id) => ty_path(fld.fold_path(path), fld.new_id(id)),
ty_fixed_length(t, vs) => ty_fixed_length(fld.fold_ty(t), vs),
ty_mac(mac) => ty_mac(fold_mac(mac))
}
}
@ -534,8 +532,8 @@ fn noop_fold_variant(v: variant_, fld: ast_fold) -> variant_ {
let attrs = vec::map(v.attrs, fold_attribute);
let de = alt v.disr_expr {
some(e) {some(fld.fold_expr(e))}
none {none}
some(e) => some(fld.fold_expr(e)),
none => none
};
return {name: /* FIXME (#2543) */ copy v.name,
attrs: attrs,
@ -561,8 +559,8 @@ fn noop_fold_local(l: local_, fld: ast_fold) -> local_ {
pat: fld.fold_pat(l.pat),
init:
alt l.init {
option::none::<initializer> { l.init }
option::some::<initializer>(init) {
option::none::<initializer> => l.init,
option::some::<initializer>(init) => {
option::some::<initializer>({op: init.op,
expr: fld.fold_expr(init.expr)})
}
@ -636,11 +634,11 @@ impl of ast_fold for ast_fold_precursor {
}
fn fold_class_item(&&ci: @class_member) -> @class_member {
@{node: alt ci.node {
instance_var(nm, t, mt, id, p) {
instance_var(nm, t, mt, id, p) => {
instance_var(/* FIXME (#2543) */ copy nm,
(self as ast_fold).fold_ty(t), mt, id, p)
}
class_method(m) {
class_method(m) => {
class_method(self.fold_method(m, self as ast_fold))
}
}, span: self.new_span(ci.span)}

View file

@ -190,8 +190,8 @@ fn new_parser_etc_from_file(sess: parse_sess, cfg: ast::crate_cfg,
(parser, string_reader) {
let res = io::read_whole_file_str(path);
alt res {
result::ok(_) { /* Continue. */ }
result::err(e) { sess.span_diagnostic.handler().fatal(e); }
result::ok(_) => { /* Continue. */ }
result::err(e) => sess.span_diagnostic.handler().fatal(e)
}
let src = @result::unwrap(res);
let filemap = codemap::new_filemap(path, src, sess.chpos, sess.byte_pos);

View file

@ -30,7 +30,7 @@ impl parser_attr of parser_attr for parser {
{
let expect_item_next = vec::is_not_empty(first_item_attrs);
alt self.token {
token::POUND {
token::POUND => {
let lo = self.span.lo;
if self.look_ahead(1u) == token::LBRACKET {
self.bump();
@ -46,12 +46,10 @@ impl parser_attr of parser_attr for parser {
return some(right(self.parse_syntax_ext_naked(lo)));
} else { return none; }
}
token::DOC_COMMENT(_) {
token::DOC_COMMENT(_) => {
return some(left(self.parse_outer_attributes()));
}
_ {
return none;
}
_ => return none
}
}
@ -60,13 +58,13 @@ impl parser_attr of parser_attr for parser {
let mut attrs: ~[ast::attribute] = ~[];
loop {
alt copy self.token {
token::POUND {
token::POUND => {
if self.look_ahead(1u) != token::LBRACKET {
break;
}
attrs += ~[self.parse_attribute(ast::attr_outer)];
}
token::DOC_COMMENT(s) {
token::DOC_COMMENT(s) => {
let attr = ::attr::mk_sugared_doc_attr(
*self.get_str(s), self.span.lo, self.span.hi);
if attr.node.style != ast::attr_outer {
@ -75,9 +73,7 @@ impl parser_attr of parser_attr for parser {
attrs += ~[attr];
self.bump();
}
_ {
break;
}
_ => break
}
}
return attrs;
@ -111,7 +107,7 @@ impl parser_attr of parser_attr for parser {
let mut next_outer_attrs: ~[ast::attribute] = ~[];
loop {
alt copy self.token {
token::POUND {
token::POUND => {
if self.look_ahead(1u) != token::LBRACKET {
// This is an extension
break;
@ -130,7 +126,7 @@ impl parser_attr of parser_attr for parser {
break;
}
}
token::DOC_COMMENT(s) {
token::DOC_COMMENT(s) => {
let attr = ::attr::mk_sugared_doc_attr(
*self.get_str(s), self.span.lo, self.span.hi);
self.bump();
@ -141,9 +137,7 @@ impl parser_attr of parser_attr for parser {
break;
}
}
_ {
break;
}
_ => break
}
}
return {inner: inner_attrs, next: next_outer_attrs};
@ -153,18 +147,18 @@ impl parser_attr of parser_attr for parser {
let lo = self.span.lo;
let ident = self.parse_ident();
alt self.token {
token::EQ {
token::EQ => {
self.bump();
let lit = self.parse_lit();
let mut hi = self.span.hi;
return @spanned(lo, hi, ast::meta_name_value(ident, lit));
}
token::LPAREN {
token::LPAREN => {
let inner_items = self.parse_meta_seq();
let mut hi = self.span.hi;
return @spanned(lo, hi, ast::meta_list(ident, inner_items));
}
_ {
_ => {
let mut hi = self.span.hi;
return @spanned(lo, hi, ast::meta_word(ident));
}
@ -178,8 +172,10 @@ impl parser_attr of parser_attr for parser {
}
fn parse_optional_meta() -> ~[@ast::meta_item] {
alt self.token { token::LPAREN { return self.parse_meta_seq(); }
_ { return ~[]; } }
alt self.token {
token::LPAREN => return self.parse_meta_seq(),
_ => return ~[]
}
}
}

View file

@ -8,25 +8,23 @@ fn expr_requires_semi_to_be_stmt(e: @ast::expr) -> bool {
alt e.node {
ast::expr_if(_, _, _) | ast::expr_alt(_, _, _) | ast::expr_block(_)
| ast::expr_while(_, _) | ast::expr_loop(_)
| ast::expr_call(_, _, true) {
false
}
_ { true }
| ast::expr_call(_, _, true) => false,
_ => true
}
}
fn stmt_ends_with_semi(stmt: ast::stmt) -> bool {
alt stmt.node {
ast::stmt_decl(d, _) {
ast::stmt_decl(d, _) => {
return alt d.node {
ast::decl_local(_) { true }
ast::decl_item(_) { false }
ast::decl_local(_) => true,
ast::decl_item(_) => false
}
}
ast::stmt_expr(e, _) {
ast::stmt_expr(e, _) => {
return expr_requires_semi_to_be_stmt(e);
}
ast::stmt_semi(e, _) {
ast::stmt_semi(e, _) => {
return false;
}
}
@ -34,43 +32,38 @@ fn stmt_ends_with_semi(stmt: ast::stmt) -> bool {
fn need_parens(expr: @ast::expr, outer_prec: uint) -> bool {
alt expr.node {
ast::expr_binary(op, _, _) { operator_prec(op) < outer_prec }
ast::expr_cast(_, _) { parse::prec::as_prec < outer_prec }
ast::expr_binary(op, _, _) => operator_prec(op) < outer_prec,
ast::expr_cast(_, _) => parse::prec::as_prec < outer_prec,
// This may be too conservative in some cases
ast::expr_assign(_, _) { true }
ast::expr_move(_, _) { true }
ast::expr_swap(_, _) { true }
ast::expr_assign_op(_, _, _) { true }
ast::expr_ret(_) { true }
ast::expr_assert(_) { true }
ast::expr_log(_, _, _) { true }
_ { !parse::classify::expr_requires_semi_to_be_stmt(expr) }
ast::expr_assign(_, _) => true,
ast::expr_move(_, _) => true,
ast::expr_swap(_, _) => true,
ast::expr_assign_op(_, _, _) => true,
ast::expr_ret(_) => true,
ast::expr_assert(_) => true,
ast::expr_log(_, _, _) => true,
_ => !parse::classify::expr_requires_semi_to_be_stmt(expr)
}
}
fn ends_in_lit_int(ex: @ast::expr) -> bool {
alt ex.node {
ast::expr_lit(node) {
alt node {
@{node: ast::lit_int(_, ast::ty_i), _} |
@{node: ast::lit_int_unsuffixed(_), _}
{ true }
_ { false }
}
ast::expr_lit(node) => alt node {
@{node: ast::lit_int(_, ast::ty_i), _}
| @{node: ast::lit_int_unsuffixed(_), _} => true,
_ => false
}
ast::expr_binary(_, _, sub) | ast::expr_unary(_, sub) |
ast::expr_move(_, sub) | ast::expr_copy(sub) |
ast::expr_assign(_, sub) |
ast::expr_assign_op(_, _, sub) | ast::expr_swap(_, sub) |
ast::expr_log(_, _, sub) | ast::expr_assert(sub) {
ast::expr_log(_, _, sub) | ast::expr_assert(sub) => {
ends_in_lit_int(sub)
}
ast::expr_fail(osub) | ast::expr_ret(osub) {
alt osub {
some(ex) { ends_in_lit_int(ex) }
_ { false }
}
ast::expr_fail(osub) | ast::expr_ret(osub) => alt osub {
some(ex) => ends_in_lit_int(ex),
_ => false
}
_ { false }
_ => false
}
}

View file

@ -85,10 +85,10 @@ impl parser_common of parser_common for parser {
fn parse_ident() -> ast::ident {
alt copy self.token {
token::IDENT(i, _) { self.bump(); return self.get_str(i); }
token::INTERPOLATED(token::nt_ident(*)) { self.bug(
token::IDENT(i, _) => { self.bump(); return self.get_str(i); }
token::INTERPOLATED(token::nt_ident(*)) => { self.bug(
~"ident interpolation not converted to real token"); }
_ { self.fatal(~"expected ident, found `"
_ => { self.fatal(~"expected ident, found `"
+ token_to_str(self.reader, self.token)
+ ~"`"); }
}
@ -135,10 +135,10 @@ impl parser_common of parser_common for parser {
fn is_any_keyword(tok: token::token) -> bool {
alt tok {
token::IDENT(sid, false) {
token::IDENT(sid, false) => {
self.keywords.contains_key_ref(self.get_str(sid))
}
_ { false }
_ => false
}
}
@ -147,13 +147,13 @@ impl parser_common of parser_common for parser {
let mut bump = false;
let val = alt self.token {
token::IDENT(sid, false) {
token::IDENT(sid, false) => {
if word == *self.get_str(sid) {
bump = true;
true
} else { false }
}
_ { false }
_ => false
};
if bump { self.bump() }
val
@ -174,11 +174,11 @@ impl parser_common of parser_common for parser {
fn check_restricted_keywords() {
alt self.token {
token::IDENT(_, false) {
token::IDENT(_, false) => {
let w = token_to_str(self.reader, self.token);
self.check_restricted_keywords_(w);
}
_ { }
_ => ()
}
}
@ -210,9 +210,11 @@ impl parser_common of parser_common for parser {
while self.token != token::GT
&& self.token != token::BINOP(token::SHR) {
alt sep {
some(t) { if first { first = false; }
else { self.expect(t); } }
_ { }
some(t) => {
if first { first = false; }
else { self.expect(t); }
}
_ => ()
}
vec::push(v, f(self));
}
@ -252,9 +254,11 @@ impl parser_common of parser_common for parser {
let mut v: ~[T] = ~[];
while self.token != ket {
alt sep.sep {
some(t) { if first { first = false; }
else { self.expect(t); } }
_ { }
some(t) => {
if first { first = false; }
else { self.expect(t); }
}
_ => ()
}
if sep.trailing_sep_allowed && self.token == ket { break; }
vec::push(v, f(self));

View file

@ -48,8 +48,8 @@ fn parse_companion_mod(cx: ctx, prefix: ~str, suffix: option<~str>)
fn companion_file(+prefix: ~str, suffix: option<~str>) -> ~str {
return alt suffix {
option::some(s) { path::connect(prefix, s) }
option::none { prefix }
option::some(s) => path::connect(prefix, s),
option::none => prefix
} + ~".rs";
}
@ -57,8 +57,8 @@ fn parse_companion_mod(cx: ctx, prefix: ~str, suffix: option<~str>)
// Crude, but there's no lib function for this and I'm not
// up to writing it just now
alt io::file_reader(path) {
result::ok(_) { true }
result::err(_) { false }
result::ok(_) => true,
result::err(_) => false
}
}
@ -80,10 +80,8 @@ fn parse_companion_mod(cx: ctx, prefix: ~str, suffix: option<~str>)
fn cdir_path_opt(id: ast::ident, attrs: ~[ast::attribute]) -> @~str {
alt ::attr::first_attr_value_str_by_name(attrs, ~"path") {
some(d) {
return d;
}
none { return id; }
some(d) => return d,
none => return id
}
}
@ -91,7 +89,7 @@ fn eval_crate_directive(cx: ctx, cdir: @ast::crate_directive, prefix: ~str,
&view_items: ~[@ast::view_item],
&items: ~[@ast::item]) {
alt cdir.node {
ast::cdir_src_mod(id, attrs) {
ast::cdir_src_mod(id, attrs) => {
let file_path = cdir_path_opt(@(*id + ~".rs"), attrs);
let full_path =
if path::path_is_absolute(*file_path) {
@ -112,7 +110,7 @@ fn eval_crate_directive(cx: ctx, cdir: @ast::crate_directive, prefix: ~str,
cx.sess.byte_pos = cx.sess.byte_pos + r0.pos;
vec::push(items, i);
}
ast::cdir_dir_mod(id, cdirs, attrs) {
ast::cdir_dir_mod(id, cdirs, attrs) => {
let path = cdir_path_opt(id, attrs);
let full_path =
if path::path_is_absolute(*path) {
@ -130,8 +128,8 @@ fn eval_crate_directive(cx: ctx, cdir: @ast::crate_directive, prefix: ~str,
cx.sess.next_id += 1;
vec::push(items, i);
}
ast::cdir_view_item(vi) { vec::push(view_items, vi); }
ast::cdir_syntax(pth) { }
ast::cdir_view_item(vi) => vec::push(view_items, vi),
ast::cdir_syntax(pth) => ()
}
}
//

View file

@ -207,7 +207,7 @@ fn consume_any_line_comment(rdr: string_reader)
-> option<{tok: token::token, sp: span}> {
if rdr.curr == '/' {
alt nextch(rdr) {
'/' {
'/' => {
bump(rdr);
bump(rdr);
// line comments starting with "///" or "//!" are doc-comments
@ -228,8 +228,8 @@ fn consume_any_line_comment(rdr: string_reader)
return consume_whitespace_and_comments(rdr);
}
}
'*' { bump(rdr); bump(rdr); return consume_block_comment(rdr); }
_ {}
'*' => { bump(rdr); bump(rdr); return consume_block_comment(rdr); }
_ => ()
}
} else if rdr.curr == '#' {
if nextch(rdr) == '!' {
@ -314,11 +314,11 @@ fn scan_digits(rdr: string_reader, radix: uint) -> ~str {
let c = rdr.curr;
if c == '_' { bump(rdr); again; }
alt char::to_digit(c, radix) {
some(d) {
some(d) => {
str::push_char(rslt, c);
bump(rdr);
}
_ { return rslt; }
_ => return rslt
}
};
}
@ -372,8 +372,8 @@ fn scan_number(c: char, rdr: string_reader) -> token::token {
}
let parsed = option::get(u64::from_str_radix(num_str, base as u64));
alt tp {
either::left(t) { return token::LIT_INT(parsed as i64, t); }
either::right(t) { return token::LIT_UINT(parsed, t); }
either::left(t) => return token::LIT_INT(parsed as i64, t),
either::right(t) => return token::LIT_UINT(parsed, t)
}
}
let mut is_float = false;
@ -384,11 +384,11 @@ fn scan_number(c: char, rdr: string_reader) -> token::token {
num_str += ~"." + dec_part;
}
alt scan_exponent(rdr) {
some(s) {
some(s) => {
is_float = true;
num_str += s;
}
none {}
none => ()
}
if rdr.curr == 'f' {
bump(rdr);
@ -479,9 +479,9 @@ fn next_token_inner(rdr: string_reader) -> token::token {
// One-byte tokens.
';' { bump(rdr); return token::SEMI; }
',' { bump(rdr); return token::COMMA; }
'.' {
';' => { bump(rdr); return token::SEMI; }
',' => { bump(rdr); return token::COMMA; }
'.' => {
bump(rdr);
if rdr.curr == '.' && nextch(rdr) != '.' {
bump(rdr);
@ -494,16 +494,16 @@ fn next_token_inner(rdr: string_reader) -> token::token {
}
return token::DOT;
}
'(' { bump(rdr); return token::LPAREN; }
')' { bump(rdr); return token::RPAREN; }
'{' { bump(rdr); return token::LBRACE; }
'}' { bump(rdr); return token::RBRACE; }
'[' { bump(rdr); return token::LBRACKET; }
']' { bump(rdr); return token::RBRACKET; }
'@' { bump(rdr); return token::AT; }
'#' { bump(rdr); return token::POUND; }
'~' { bump(rdr); return token::TILDE; }
':' {
'(' => { bump(rdr); return token::LPAREN; }
')' => { bump(rdr); return token::RPAREN; }
'{' => { bump(rdr); return token::LBRACE; }
'}' => { bump(rdr); return token::RBRACE; }
'[' => { bump(rdr); return token::LBRACKET; }
']' => { bump(rdr); return token::RBRACKET; }
'@' => { bump(rdr); return token::AT; }
'#' => { bump(rdr); return token::POUND; }
'~' => { bump(rdr); return token::TILDE; }
':' => {
bump(rdr);
if rdr.curr == ':' {
bump(rdr);
@ -511,14 +511,14 @@ fn next_token_inner(rdr: string_reader) -> token::token {
} else { return token::COLON; }
}
'$' { bump(rdr); return token::DOLLAR; }
'$' => { bump(rdr); return token::DOLLAR; }
// Multi-byte tokens.
'=' {
'=' => {
bump(rdr);
if rdr.curr == '=' {
bump(rdr);
@ -530,37 +530,37 @@ fn next_token_inner(rdr: string_reader) -> token::token {
return token::EQ;
}
}
'!' {
'!' => {
bump(rdr);
if rdr.curr == '=' {
bump(rdr);
return token::NE;
} else { return token::NOT; }
}
'<' {
'<' => {
bump(rdr);
alt rdr.curr {
'=' { bump(rdr); return token::LE; }
'<' { return binop(rdr, token::SHL); }
'-' {
'=' => { bump(rdr); return token::LE; }
'<' => { return binop(rdr, token::SHL); }
'-' => {
bump(rdr);
alt rdr.curr {
'>' { bump(rdr); return token::DARROW; }
_ { return token::LARROW; }
'>' => { bump(rdr); return token::DARROW; }
_ => { return token::LARROW; }
}
}
_ { return token::LT; }
_ => { return token::LT; }
}
}
'>' {
'>' => {
bump(rdr);
alt rdr.curr {
'=' { bump(rdr); return token::GE; }
'>' { return binop(rdr, token::SHR); }
_ { return token::GT; }
'=' => { bump(rdr); return token::GE; }
'>' => { return binop(rdr, token::SHR); }
_ => { return token::GT; }
}
}
'\'' {
'\'' => {
bump(rdr);
let mut c2 = rdr.curr;
bump(rdr);
@ -568,16 +568,16 @@ fn next_token_inner(rdr: string_reader) -> token::token {
let escaped = rdr.curr;
bump(rdr);
alt escaped {
'n' { c2 = '\n'; }
'r' { c2 = '\r'; }
't' { c2 = '\t'; }
'\\' { c2 = '\\'; }
'\'' { c2 = '\''; }
'"' { c2 = '"'; }
'x' { c2 = scan_numeric_escape(rdr, 2u); }
'u' { c2 = scan_numeric_escape(rdr, 4u); }
'U' { c2 = scan_numeric_escape(rdr, 8u); }
c2 {
'n' => { c2 = '\n'; }
'r' => { c2 = '\r'; }
't' => { c2 = '\t'; }
'\\' => { c2 = '\\'; }
'\'' => { c2 = '\''; }
'"' => { c2 = '"'; }
'x' => { c2 = scan_numeric_escape(rdr, 2u); }
'u' => { c2 = scan_numeric_escape(rdr, 4u); }
'U' => { c2 = scan_numeric_escape(rdr, 8u); }
c2 => {
rdr.fatal(fmt!{"unknown character escape: %d", c2 as int});
}
}
@ -588,7 +588,7 @@ fn next_token_inner(rdr: string_reader) -> token::token {
bump(rdr); // advance curr past token
return token::LIT_INT(c2 as i64, ast::ty_char);
}
'"' {
'"' => {
let n = rdr.chpos;
bump(rdr);
while rdr.curr != '"' {
@ -600,63 +600,63 @@ fn next_token_inner(rdr: string_reader) -> token::token {
let ch = rdr.curr;
bump(rdr);
alt ch {
'\\' {
'\\' => {
let escaped = rdr.curr;
bump(rdr);
alt escaped {
'n' { str::push_char(accum_str, '\n'); }
'r' { str::push_char(accum_str, '\r'); }
't' { str::push_char(accum_str, '\t'); }
'\\' { str::push_char(accum_str, '\\'); }
'\'' { str::push_char(accum_str, '\''); }
'"' { str::push_char(accum_str, '"'); }
'\n' { consume_whitespace(rdr); }
'x' {
'n' => str::push_char(accum_str, '\n'),
'r' => str::push_char(accum_str, '\r'),
't' => str::push_char(accum_str, '\t'),
'\\' => str::push_char(accum_str, '\\'),
'\'' => str::push_char(accum_str, '\''),
'"' => str::push_char(accum_str, '"'),
'\n' => consume_whitespace(rdr),
'x' => {
str::push_char(accum_str, scan_numeric_escape(rdr, 2u));
}
'u' {
'u' => {
str::push_char(accum_str, scan_numeric_escape(rdr, 4u));
}
'U' {
'U' => {
str::push_char(accum_str, scan_numeric_escape(rdr, 8u));
}
c2 {
c2 => {
rdr.fatal(fmt!{"unknown string escape: %d", c2 as int});
}
}
}
_ { str::push_char(accum_str, ch); }
_ => str::push_char(accum_str, ch)
}
}
bump(rdr);
return token::LIT_STR((*rdr.interner).intern(@accum_str));
}
'-' {
'-' => {
if nextch(rdr) == '>' {
bump(rdr);
bump(rdr);
return token::RARROW;
} else { return binop(rdr, token::MINUS); }
}
'&' {
'&' => {
if nextch(rdr) == '&' {
bump(rdr);
bump(rdr);
return token::ANDAND;
} else { return binop(rdr, token::AND); }
}
'|' {
'|' => {
alt nextch(rdr) {
'|' { bump(rdr); bump(rdr); return token::OROR; }
_ { return binop(rdr, token::OR); }
'|' => { bump(rdr); bump(rdr); return token::OROR; }
_ => { return binop(rdr, token::OR); }
}
}
'+' { return binop(rdr, token::PLUS); }
'*' { return binop(rdr, token::STAR); }
'/' { return binop(rdr, token::SLASH); }
'^' { return binop(rdr, token::CARET); }
'%' { return binop(rdr, token::PERCENT); }
c { rdr.fatal(fmt!{"unknown start of token: %d", c as int}); }
'+' => { return binop(rdr, token::PLUS); }
'*' => { return binop(rdr, token::STAR); }
'/' => { return binop(rdr, token::SLASH); }
'^' => { return binop(rdr, token::CARET); }
'%' => { return binop(rdr, token::PERCENT); }
c => { rdr.fatal(fmt!{"unknown start of token: %d", c as int}); }
}
}

View file

@ -107,35 +107,35 @@ The important thing is to make sure that lookahead doesn't balk
at INTERPOLATED tokens */
macro_rules! maybe_whole_expr {
{$p:expr} => { alt copy $p.token {
INTERPOLATED(token::nt_expr(e)) {
INTERPOLATED(token::nt_expr(e)) => {
$p.bump();
return pexpr(e);
}
INTERPOLATED(token::nt_path(pt)) {
INTERPOLATED(token::nt_path(pt)) => {
$p.bump();
return $p.mk_pexpr($p.span.lo, $p.span.lo,
expr_path(pt));
}
_ {}
_ => ()
}}
}
macro_rules! maybe_whole {
{$p:expr, $constructor:ident} => { alt copy $p.token {
INTERPOLATED(token::$constructor(x)) { $p.bump(); return x; }
_ {}
INTERPOLATED(token::$constructor(x)) => { $p.bump(); return x; }
_ => ()
}} ;
{deref $p:expr, $constructor:ident} => { alt copy $p.token {
INTERPOLATED(token::$constructor(x)) { $p.bump(); return *x; }
_ {}
INTERPOLATED(token::$constructor(x)) => { $p.bump(); return *x; }
_ => ()
}} ;
{some $p:expr, $constructor:ident} => { alt copy $p.token {
INTERPOLATED(token::$constructor(x)) { $p.bump(); return some(x); }
_ {}
INTERPOLATED(token::$constructor(x)) => { $p.bump(); return some(x); }
_ => ()
}} ;
{pair_empty $p:expr, $constructor:ident} => { alt copy $p.token {
INTERPOLATED(token::$constructor(x)) { $p.bump(); return (~[], x); }
_ {}
INTERPOLATED(token::$constructor(x)) => { $p.bump(); return (~[], x); }
_ => ()
}}
}
@ -284,7 +284,7 @@ class parser {
`%s`",
token_to_str(p.reader, p.token)};
alt p.token {
token::SEMI {
token::SEMI => {
p.bump();
debug!{"parse_trait_methods(): parsing required method"};
// NB: at the moment, visibility annotations on required
@ -294,7 +294,7 @@ class parser {
self_ty: self_ty,
id: p.get_id(), span: mk_sp(lo, hi)})
}
token::LBRACE {
token::LBRACE => {
debug!{"parse_trait_methods(): parsing provided method"};
let (inner_attrs, body) =
p.parse_inner_attrs_and_block(true);
@ -311,7 +311,7 @@ class parser {
vis: vis})
}
_ { p.fatal(~"expected `;` or `}` but found `" +
_ => { p.fatal(~"expected `;` or `}` but found `" +
token_to_str(p.reader, p.token) + ~"`");
}
}
@ -356,8 +356,8 @@ class parser {
fn region_from_name(s: option<@~str>) -> @region {
let r = alt s {
some (string) { re_named(string) }
none { re_anon }
some (string) => re_named(string),
none => re_anon
};
@{id: self.get_id(), node: r}
@ -368,12 +368,12 @@ class parser {
self.expect(token::BINOP(token::AND));
alt copy self.token {
token::IDENT(sid, _) {
token::IDENT(sid, _) => {
self.bump();
let n = self.get_str(sid);
self.region_from_name(some(n))
}
_ {
_ => {
self.region_from_name(none)
}
}
@ -402,12 +402,12 @@ class parser {
let lo = self.span.lo;
alt self.maybe_parse_dollar_mac() {
some(e) {
some(e) => {
return @{id: self.get_id(),
node: ty_mac(spanned(lo, self.span.hi, e)),
span: mk_sp(lo, self.span.hi)};
}
none {}
none => ()
}
let t = if self.token == token::LPAREN {
@ -472,8 +472,8 @@ class parser {
return @{id: self.get_id(),
node: alt self.maybe_parse_fixed_vstore() {
// Consider a fixed vstore suffix (/N or /_)
none { t }
some(v) {
none => t,
some(v) => {
ty_fixed_length(@{id: self.get_id(), node:t, span: sp}, v)
} },
span: sp}
@ -542,27 +542,27 @@ class parser {
fn maybe_parse_dollar_mac() -> option<mac_> {
alt copy self.token {
token::DOLLAR {
token::DOLLAR => {
let lo = self.span.lo;
self.bump();
alt copy self.token {
token::LIT_INT_UNSUFFIXED(num) {
token::LIT_INT_UNSUFFIXED(num) => {
self.bump();
some(mac_var(num as uint))
}
token::LPAREN {
token::LPAREN => {
self.bump();
let e = self.parse_expr();
self.expect(token::RPAREN);
let hi = self.last_span.hi;
some(mac_aq(mk_sp(lo,hi), e))
}
_ {
_ => {
self.fatal(~"expected `(` or unsuffixed integer literal");
}
}
}
_ {none}
_ => none
}
}
@ -570,15 +570,13 @@ class parser {
if self.token == token::BINOP(token::SLASH) {
self.bump();
alt copy self.token {
token::UNDERSCORE {
token::UNDERSCORE => {
self.bump(); some(none)
}
token::LIT_INT_UNSUFFIXED(i) if i >= 0i64 {
token::LIT_INT_UNSUFFIXED(i) if i >= 0i64 => {
self.bump(); some(some(i as uint))
}
_ {
none
}
_ => none
}
} else {
none
@ -587,13 +585,13 @@ class parser {
fn lit_from_token(tok: token::token) -> lit_ {
alt tok {
token::LIT_INT(i, it) { lit_int(i, it) }
token::LIT_UINT(u, ut) { lit_uint(u, ut) }
token::LIT_INT_UNSUFFIXED(i) { lit_int_unsuffixed(i) }
token::LIT_FLOAT(s, ft) { lit_float(self.get_str(s), ft) }
token::LIT_STR(s) { lit_str(self.get_str(s)) }
token::LPAREN { self.expect(token::RPAREN); lit_nil }
_ { self.unexpected_last(tok); }
token::LIT_INT(i, it) => lit_int(i, it),
token::LIT_UINT(u, ut) => lit_uint(u, ut),
token::LIT_INT_UNSUFFIXED(i) => lit_int_unsuffixed(i),
token::LIT_FLOAT(s, ft) => lit_float(self.get_str(s), ft),
token::LIT_STR(s) => lit_str(self.get_str(s)),
token::LPAREN => { self.expect(token::RPAREN); lit_nil }
_ => self.unexpected_last(tok)
}
}
@ -735,8 +733,8 @@ class parser {
fn to_expr(e: pexpr) -> @expr {
alt e.node {
expr_tup(es) if vec::len(es) == 1u { es[0u] }
_ { *e }
expr_tup(es) if vec::len(es) == 1u => es[0u],
_ => *e
}
}
@ -748,8 +746,8 @@ class parser {
let mut ex: expr_;
alt self.maybe_parse_dollar_mac() {
some(x) {return pexpr(self.mk_mac_expr(lo, self.span.hi, x));}
_ {}
some(x) => return pexpr(self.mk_mac_expr(lo, self.span.hi, x)),
_ => ()
}
if self.token == token::LPAREN {
@ -800,8 +798,8 @@ class parser {
} else if self.eat_keyword(~"fn") {
let proto = self.parse_fn_ty_proto();
alt proto {
proto_bare { self.fatal(~"fn expr are deprecated, use fn@"); }
_ { /* fallthrough */ }
proto_bare => self.fatal(~"fn expr are deprecated, use fn@"),
_ => { /* fallthrough */ }
}
return pexpr(self.parse_fn_expr(proto));
} else if self.eat_keyword(~"unchecked") {
@ -895,13 +893,13 @@ class parser {
if self.token == token::NOT {
self.bump();
let tts = alt self.token {
token::LPAREN | token::LBRACE | token::LBRACKET {
token::LPAREN | token::LBRACE | token::LBRACKET => {
let ket = token::flip_delimiter(self.token);
self.parse_unspanned_seq(copy self.token, ket,
seq_sep_none(),
|p| p.parse_token_tree())
}
_ { self.fatal(~"expected open delimiter"); }
_ => self.fatal(~"expected open delimiter")
};
let hi = self.span.hi;
@ -942,16 +940,14 @@ class parser {
// only.
alt ex {
expr_lit(@{node: lit_str(_), span: _}) |
expr_vec(_, _) {
alt self.maybe_parse_fixed_vstore() {
none { }
some(v) {
expr_vec(_, _) => alt self.maybe_parse_fixed_vstore() {
none => (),
some(v) => {
hi = self.span.hi;
ex = expr_vstore(self.mk_expr(lo, hi, ex), vstore_fixed(v));
}
}
}
_ { }
_ => ()
}
return self.mk_pexpr(lo, hi, ex);
@ -971,8 +967,8 @@ class parser {
fn parse_syntax_ext_naked(lo: uint) -> @expr {
alt self.token {
token::IDENT(_, _) {}
_ { self.fatal(~"expected a syntax expander name"); }
token::IDENT(_, _) => (),
_ => self.fatal(~"expected a syntax expander name")
}
let pth = self.parse_path_without_tps();
//temporary for a backwards-compatible cycle:
@ -998,10 +994,10 @@ class parser {
let mut depth = 1u;
while (depth > 0u) {
alt (self.token) {
token::LBRACE {depth += 1u;}
token::RBRACE {depth -= 1u;}
token::EOF {self.fatal(~"unexpected EOF in macro body");}
_ {}
token::LBRACE => depth += 1u,
token::RBRACE => depth -= 1u,
token::EOF => self.fatal(~"unexpected EOF in macro body"),
_ => ()
}
self.bump();
}
@ -1028,7 +1024,7 @@ class parser {
// expr.f
if self.eat(token::DOT) {
alt copy self.token {
token::IDENT(i, _) {
token::IDENT(i, _) => {
hi = self.span.hi;
self.bump();
let tys = if self.eat(token::MOD_SEP) {
@ -1040,14 +1036,14 @@ class parser {
self.get_str(i),
tys));
}
_ { self.unexpected(); }
_ => self.unexpected()
}
again;
}
if self.expr_is_complete(e) { break; }
alt copy self.token {
// expr(...)
token::LPAREN if self.permits_call() {
token::LPAREN if self.permits_call() => {
let es = self.parse_unspanned_seq(
token::LPAREN, token::RPAREN,
seq_sep_trailing_disallowed(token::COMMA),
@ -1059,7 +1055,7 @@ class parser {
}
// expr[...]
token::LBRACKET {
token::LBRACKET => {
self.bump();
let ix = self.parse_expr();
hi = ix.span.hi;
@ -1067,7 +1063,7 @@ class parser {
e = self.mk_pexpr(lo, hi, expr_index(self.to_expr(e), ix));
}
_ { return e; }
_ => return e
}
}
return e;
@ -1099,15 +1095,15 @@ class parser {
fn parse_tt_tok(p: parser, delim_ok: bool) -> token_tree {
alt p.token {
token::RPAREN | token::RBRACE | token::RBRACKET
if !delim_ok {
if !delim_ok => {
p.fatal(~"incorrect close delimiter: `"
+ token_to_str(p.reader, p.token) + ~"`");
}
token::EOF {
token::EOF => {
p.fatal(~"file ended in the middle of a macro invocation");
}
/* we ought to allow different depths of unquotation */
token::DOLLAR if p.quote_depth > 0u {
token::DOLLAR if p.quote_depth > 0u => {
p.bump();
let sp = p.span;
@ -1121,7 +1117,7 @@ class parser {
return tt_nonterminal(sp, p.parse_ident());
}
}
_ { /* ok */ }
_ => { /* ok */ }
}
let res = tt_tok(p.span, p.token);
p.bump();
@ -1129,7 +1125,7 @@ class parser {
}
return alt self.token {
token::LPAREN | token::LBRACE | token::LBRACKET {
token::LPAREN | token::LBRACE | token::LBRACKET => {
let ket = token::flip_delimiter(self.token);
tt_delim(vec::append(
~[parse_tt_tok(self, true)],
@ -1139,7 +1135,7 @@ class parser {
|p| p.parse_token_tree()),
~[parse_tt_tok(self, true)])))
}
_ { parse_tt_tok(self, false) }
_ => parse_tt_tok(self, false)
};
}
@ -1149,11 +1145,11 @@ class parser {
maybe_whole!{self, nt_matchers};
let name_idx = @mut 0u;
return alt self.token {
token::LBRACE | token::LPAREN | token::LBRACKET {
token::LBRACE | token::LPAREN | token::LBRACKET => {
self.parse_matcher_subseq(name_idx, copy self.token,
token::flip_delimiter(self.token))
}
_ { self.fatal(~"expected open delimiter"); }
_ => self.fatal(~"expected open delimiter")
}
}
@ -1217,29 +1213,29 @@ class parser {
let mut ex;
alt copy self.token {
token::NOT {
token::NOT => {
self.bump();
let e = self.to_expr(self.parse_prefix_expr());
hi = e.span.hi;
self.get_id(); // see ast_util::op_expr_callee_id
ex = expr_unary(not, e);
}
token::BINOP(b) {
token::BINOP(b) => {
alt b {
token::MINUS {
token::MINUS => {
self.bump();
let e = self.to_expr(self.parse_prefix_expr());
hi = e.span.hi;
self.get_id(); // see ast_util::op_expr_callee_id
ex = expr_unary(neg, e);
}
token::STAR {
token::STAR => {
self.bump();
let e = self.to_expr(self.parse_prefix_expr());
hi = e.span.hi;
ex = expr_unary(deref, e);
}
token::AND {
token::AND => {
self.bump();
let m = self.parse_mutability();
let e = self.to_expr(self.parse_prefix_expr());
@ -1247,16 +1243,16 @@ class parser {
// HACK: turn &[...] into a &-evec
ex = alt e.node {
expr_vec(*) | expr_lit(@{node: lit_str(_), span: _})
if m == m_imm {
if m == m_imm => {
expr_vstore(e, vstore_slice(self.region_from_name(none)))
}
_ { expr_addr_of(m, e) }
_ => expr_addr_of(m, e)
};
}
_ { return self.parse_dot_or_call_expr(); }
_ => return self.parse_dot_or_call_expr()
}
}
token::AT {
token::AT => {
self.bump();
let m = self.parse_mutability();
let e = self.to_expr(self.parse_prefix_expr());
@ -1264,11 +1260,11 @@ class parser {
// HACK: turn @[...] into a @-evec
ex = alt e.node {
expr_vec(*) | expr_lit(@{node: lit_str(_), span: _})
if m == m_imm { expr_vstore(e, vstore_box) }
_ { expr_unary(box(m), e) }
if m == m_imm => expr_vstore(e, vstore_box),
_ => expr_unary(box(m), e)
};
}
token::TILDE {
token::TILDE => {
self.bump();
let m = self.parse_mutability();
let e = self.to_expr(self.parse_prefix_expr());
@ -1276,11 +1272,11 @@ class parser {
// HACK: turn ~[...] into a ~-evec
ex = alt e.node {
expr_vec(*) | expr_lit(@{node: lit_str(_), span: _})
if m == m_imm { expr_vstore(e, vstore_uniq) }
_ { expr_unary(uniq(m), e) }
if m == m_imm => expr_vstore(e, vstore_uniq),
_ => expr_unary(uniq(m), e)
};
}
_ { return self.parse_dot_or_call_expr(); }
_ => return self.parse_dot_or_call_expr()
}
return self.mk_pexpr(lo, hi, ex);
}
@ -1306,7 +1302,7 @@ class parser {
}
let cur_opt = token_to_binop(peeked);
alt cur_opt {
some(cur_op) {
some(cur_op) => {
let cur_prec = operator_prec(cur_op);
if cur_prec > min_prec {
self.bump();
@ -1318,7 +1314,7 @@ class parser {
return self.parse_more_binops(bin, min_prec);
}
}
_ {}
_ => ()
}
if as_prec > min_prec && self.eat_keyword(~"as") {
let rhs = self.parse_ty(true);
@ -1333,42 +1329,42 @@ class parser {
let lo = self.span.lo;
let lhs = self.parse_binops();
alt copy self.token {
token::EQ {
token::EQ => {
self.bump();
let rhs = self.parse_expr();
return self.mk_expr(lo, rhs.span.hi, expr_assign(lhs, rhs));
}
token::BINOPEQ(op) {
token::BINOPEQ(op) => {
self.bump();
let rhs = self.parse_expr();
let mut aop;
alt op {
token::PLUS { aop = add; }
token::MINUS { aop = subtract; }
token::STAR { aop = mul; }
token::SLASH { aop = div; }
token::PERCENT { aop = rem; }
token::CARET { aop = bitxor; }
token::AND { aop = bitand; }
token::OR { aop = bitor; }
token::SHL { aop = shl; }
token::SHR { aop = shr; }
token::PLUS => aop = add,
token::MINUS => aop = subtract,
token::STAR => aop = mul,
token::SLASH => aop = div,
token::PERCENT => aop = rem,
token::CARET => aop = bitxor,
token::AND => aop = bitand,
token::OR => aop = bitor,
token::SHL => aop = shl,
token::SHR => aop = shr
}
self.get_id(); // see ast_util::op_expr_callee_id
return self.mk_expr(lo, rhs.span.hi,
expr_assign_op(aop, lhs, rhs));
}
token::LARROW {
token::LARROW => {
self.bump();
let rhs = self.parse_expr();
return self.mk_expr(lo, rhs.span.hi, expr_move(lhs, rhs));
}
token::DARROW {
token::DARROW => {
self.bump();
let rhs = self.parse_expr();
return self.mk_expr(lo, rhs.span.hi, expr_swap(lhs, rhs));
}
_ {/* fall through */ }
_ => {/* fall through */ }
}
return lhs;
}
@ -1407,10 +1403,10 @@ class parser {
self.parse_lambda_expr_(
|| {
alt self.token {
token::BINOP(token::OR) | token::OROR {
token::BINOP(token::OR) | token::OROR => {
self.parse_fn_block_decl()
}
_ {
_ => {
// No argument list - `do foo {`
({
{
@ -1476,7 +1472,7 @@ class parser {
// them as the lambda arguments
let e = self.parse_expr_res(RESTRICT_NO_BAR_OR_DOUBLEBAR_OP);
alt e.node {
expr_call(f, args, false) {
expr_call(f, args, false) => {
let block = self.parse_lambda_block_expr();
let last_arg = self.mk_expr(block.span.lo, block.span.hi,
ctor(block));
@ -1484,14 +1480,14 @@ class parser {
@{node: expr_call(f, args, true)
with *e}
}
expr_path(*) | expr_field(*) | expr_call(*) {
expr_path(*) | expr_field(*) | expr_call(*) => {
let block = self.parse_lambda_block_expr();
let last_arg = self.mk_expr(block.span.lo, block.span.hi,
ctor(block));
self.mk_expr(lo.lo, last_arg.span.hi,
expr_call(e, ~[last_arg], true))
}
_ {
_ => {
// There may be other types of expressions that can
// represent the callee in `for` and `do` expressions
// but they aren't represented by tests
@ -1607,11 +1603,11 @@ class parser {
fn parse_initializer() -> option<initializer> {
alt self.token {
token::EQ {
token::EQ => {
self.bump();
return some({op: init_assign, expr: self.parse_expr()});
}
token::LARROW {
token::LARROW => {
self.bump();
return some({op: init_move, expr: self.parse_expr()});
}
@ -1622,7 +1618,7 @@ class parser {
// return some(rec(op = init_recv,
// expr = self.parse_expr()));
// }
_ {
_ => {
return none;
}
}
@ -1644,39 +1640,43 @@ class parser {
let mut hi = self.span.hi;
let mut pat;
alt self.token {
token::UNDERSCORE { self.bump(); pat = pat_wild; }
token::AT {
token::UNDERSCORE => { self.bump(); pat = pat_wild; }
token::AT => {
self.bump();
let sub = self.parse_pat(refutable);
hi = sub.span.hi;
// HACK: parse @"..." as a literal of a vstore @str
pat = alt sub.node {
pat_lit(e@@{node: expr_lit(@{node: lit_str(_), span: _}), _}) {
pat_lit(e@@{
node: expr_lit(@{node: lit_str(_), span: _}), _
}) => {
let vst = @{id: self.get_id(), callee_id: self.get_id(),
node: expr_vstore(e, vstore_box),
span: mk_sp(lo, hi)};
pat_lit(vst)
}
_ { pat_box(sub) }
_ => pat_box(sub)
};
}
token::TILDE {
token::TILDE => {
self.bump();
let sub = self.parse_pat(refutable);
hi = sub.span.hi;
// HACK: parse ~"..." as a literal of a vstore ~str
pat = alt sub.node {
pat_lit(e@@{node: expr_lit(@{node: lit_str(_), span: _}), _}) {
pat_lit(e@@{
node: expr_lit(@{node: lit_str(_), span: _}), _
}) => {
let vst = @{id: self.get_id(), callee_id: self.get_id(),
node: expr_vstore(e, vstore_uniq),
span: mk_sp(lo, hi)};
pat_lit(vst)
}
_ { pat_uniq(sub) }
_ => pat_uniq(sub)
};
}
token::LBRACE {
token::LBRACE => {
self.bump();
let mut fields = ~[];
let mut etc = false;
@ -1722,7 +1722,7 @@ class parser {
self.bump();
pat = pat_rec(fields, etc);
}
token::LPAREN {
token::LPAREN => {
self.bump();
if self.token == token::RPAREN {
hi = self.span.hi;
@ -1742,7 +1742,7 @@ class parser {
pat = pat_tup(fields);
}
}
tok {
tok => {
if !is_ident(tok) ||
self.is_keyword(~"true") || self.is_keyword(~"false") {
let val = self.parse_expr_res(RESTRICT_NO_BAR_OP);
@ -1789,24 +1789,22 @@ class parser {
let mut args: ~[@pat] = ~[];
let mut star_pat = false;
alt self.token {
token::LPAREN {
alt self.look_ahead(1u) {
token::BINOP(token::STAR) {
token::LPAREN => alt self.look_ahead(1u) {
token::BINOP(token::STAR) => {
// This is a "top constructor only" pat
self.bump(); self.bump();
star_pat = true;
self.expect(token::RPAREN);
self.bump(); self.bump();
star_pat = true;
self.expect(token::RPAREN);
}
_ {
_ => {
args = self.parse_unspanned_seq(
token::LPAREN, token::RPAREN,
seq_sep_trailing_disallowed(token::COMMA),
|p| p.parse_pat(refutable));
hi = self.span.hi;
hi = self.span.hi;
}
}
}
_ { }
_ => ()
}
// at this point, we're not sure whether it's a enum or a
// bind
@ -1887,9 +1885,9 @@ class parser {
} else {
let mut item_attrs;
alt self.parse_outer_attrs_or_ext(first_item_attrs) {
none { item_attrs = ~[]; }
some(left(attrs)) { item_attrs = attrs; }
some(right(ext)) {
none => item_attrs = ~[],
some(left(attrs)) => item_attrs = attrs,
some(right(ext)) => {
return @spanned(lo, ext.span.hi,
stmt_expr(ext, self.get_id()));
}
@ -1898,12 +1896,12 @@ class parser {
let item_attrs = vec::append(first_item_attrs, item_attrs);
alt self.parse_item(item_attrs) {
some(i) {
some(i) => {
let mut hi = i.span.hi;
let decl = @spanned(lo, hi, decl_item(i));
return @spanned(lo, hi, stmt_decl(decl, self.get_id()));
}
none() { /* fallthrough */ }
none() => { /* fallthrough */ }
}
check_expected_item(self, item_attrs);
@ -1990,24 +1988,24 @@ class parser {
while self.token != token::RBRACE {
alt self.token {
token::SEMI {
token::SEMI => {
self.bump(); // empty
}
_ {
_ => {
let stmt = self.parse_stmt(initial_attrs);
initial_attrs = ~[];
alt stmt.node {
stmt_expr(e, stmt_id) { // Expression without semicolon:
stmt_expr(e, stmt_id) => { // Expression without semicolon:
alt self.token {
token::SEMI {
token::SEMI => {
self.bump();
push(stmts,
@{node: stmt_semi(e, stmt_id) with *stmt});
}
token::RBRACE {
token::RBRACE => {
expr = some(e);
}
t {
t => {
if classify::stmt_ends_with_semi(*stmt) {
self.fatal(~"expected `;` or `}` after \
expression but found `"
@ -2018,7 +2016,7 @@ class parser {
}
}
_ { // All other kinds of statements:
_ => { // All other kinds of statements:
vec::push(stmts, stmt);
if classify::stmt_ends_with_semi(*stmt) {
@ -2083,12 +2081,8 @@ class parser {
fn is_self_ident() -> bool {
alt self.token {
token::IDENT(sid, false) if ~"self" == *self.get_str(sid) {
true
}
_ => {
false
}
token::IDENT(sid, false) if ~"self" == *self.get_str(sid) => true,
_ => false
}
}
@ -2266,14 +2260,14 @@ class parser {
fn parse_method_name() -> ident {
alt copy self.token {
token::BINOP(op) { self.bump(); @token::binop_to_str(op) }
token::NOT { self.bump(); @~"!" }
token::LBRACKET {
token::BINOP(op) => { self.bump(); @token::binop_to_str(op) }
token::NOT => { self.bump(); @~"!" }
token::LBRACKET => {
self.bump();
self.expect(token::RBRACKET);
@~"[]"
}
_ {
_ => {
let id = self.parse_value_ident();
if id == @~"unary" && self.eat(token::BINOP(token::MINUS)) {
@~"unary-"
@ -2388,8 +2382,8 @@ class parser {
traits = ~[];
};
ident = alt ident_old {
some(name) { name }
none { self.expect_keyword(~"of"); fail; }
some(name) => name,
none => { self.expect_keyword(~"of"); fail; }
};
self.expect_keyword(~"for");
ty = self.parse_ty(false);
@ -2446,13 +2440,13 @@ class parser {
let mut the_dtor : option<(blk, ~[attribute], codemap::span)> = none;
while self.token != token::RBRACE {
alt self.parse_class_item(class_path) {
ctor_decl(a_fn_decl, attrs, blk, s) {
ctor_decl(a_fn_decl, attrs, blk, s) => {
the_ctor = some((a_fn_decl, attrs, blk, s));
}
dtor_decl(blk, attrs, s) {
dtor_decl(blk, attrs, s) => {
the_dtor = some((blk, attrs, s));
}
members(mms) { ms = vec::append(ms, mms); }
members(mms) => { ms = vec::append(ms, mms); }
}
}
let actual_dtor = do option::map(the_dtor) |dtor| {
@ -2464,7 +2458,7 @@ class parser {
span: d_s}};
self.bump();
alt the_ctor {
some((ct_d, ct_attrs, ct_b, ct_s)) {
some((ct_d, ct_attrs, ct_b, ct_s)) => {
(class_name,
item_class(ty_params, traits, ms, some({
node: {id: ctor_id,
@ -2478,7 +2472,7 @@ class parser {
/*
Is it strange for the parser to check this?
*/
none {
none => {
(class_name,
item_class(ty_params, traits, ms, none, actual_dtor),
none)
@ -2488,8 +2482,8 @@ class parser {
fn token_is_pound_or_doc_comment(++tok: token::token) -> bool {
alt tok {
token::POUND | token::DOC_COMMENT(_) { true }
_ { false }
token::POUND | token::DOC_COMMENT(_) => true,
_ => false
}
}
@ -2583,8 +2577,8 @@ class parser {
}
debug!{"parse_mod_items: parse_item(attrs=%?)", attrs};
alt self.parse_item(attrs) {
some(i) { vec::push(items, i); }
_ {
some(i) => vec::push(items, i),
_ => {
self.fatal(~"expected item but found `" +
token_to_str(self.reader, self.token) + ~"`");
}
@ -2765,19 +2759,19 @@ class parser {
fn parse_fn_ty_proto() -> proto {
alt self.token {
token::AT {
token::AT => {
self.bump();
proto_box
}
token::TILDE {
token::TILDE => {
self.bump();
proto_uniq
}
token::BINOP(token::AND) {
token::BINOP(token::AND) => {
self.bump();
proto_block
}
_ {
_ => {
proto_block
}
}
@ -2785,12 +2779,8 @@ class parser {
fn fn_expr_lookahead(tok: token::token) -> bool {
alt tok {
token::LPAREN | token::AT | token::TILDE | token::BINOP(_) {
true
}
_ {
false
}
token::LPAREN | token::AT | token::TILDE | token::BINOP(_) => true,
_ => false
}
}
@ -2851,13 +2841,13 @@ class parser {
self.expect(token::NOT);
let id = self.parse_ident();
let tts = alt self.token {
token::LPAREN | token::LBRACE | token::LBRACKET {
token::LPAREN | token::LBRACE | token::LBRACKET => {
let ket = token::flip_delimiter(self.token);
self.parse_unspanned_seq(copy self.token, ket,
seq_sep_none(),
|p| p.parse_token_tree())
}
_ { self.fatal(~"expected open delimiter"); }
_ => self.fatal(~"expected open delimiter")
};
let m = ast::mac_invoc_tt(pth, tts);
let m: ast::mac = {node: m,
@ -2868,8 +2858,8 @@ class parser {
} else { return none; };
some(self.mk_item(lo, self.last_span.hi, ident, item_, visibility,
alt extra_attrs {
some(as) { vec::append(attrs, as) }
none { attrs }
some(as) => vec::append(attrs, as),
none => attrs
}))
}
@ -2885,7 +2875,7 @@ class parser {
let mut path = ~[first_ident];
debug!{"parsed view_path: %s", *first_ident};
alt self.token {
token::EQ {
token::EQ => {
// x = foo::bar
self.bump();
path = ~[self.parse_ident()];
@ -2900,20 +2890,20 @@ class parser {
view_path_simple(first_ident, path, self.get_id()));
}
token::MOD_SEP {
token::MOD_SEP => {
// foo::bar or foo::{a,b,c} or foo::*
while self.token == token::MOD_SEP {
self.bump();
alt copy self.token {
token::IDENT(i, _) {
token::IDENT(i, _) => {
self.bump();
vec::push(path, self.get_str(i));
}
// foo::bar::{a,b,c}
token::LBRACE {
token::LBRACE => {
let idents = self.parse_unspanned_seq(
token::LBRACE, token::RBRACE,
seq_sep_trailing_allowed(token::COMMA),
@ -2926,7 +2916,7 @@ class parser {
}
// foo::bar::*
token::BINOP(token::STAR) {
token::BINOP(token::STAR) => {
self.bump();
let path = @{span: mk_sp(lo, self.span.hi),
global: false, idents: path,
@ -2935,11 +2925,11 @@ class parser {
view_path_glob(path, self.get_id()));
}
_ { break; }
_ => break
}
}
}
_ { }
_ => ()
}
let last = path[vec::len(path) - 1u];
let path = @{span: mk_sp(lo, self.span.hi), global: false,
@ -3009,10 +2999,8 @@ class parser {
fn parse_str() -> @~str {
alt copy self.token {
token::LIT_STR(s) { self.bump(); self.get_str(s) }
_ {
self.fatal(~"expected string literal")
}
token::LIT_STR(s) => { self.bump(); self.get_str(s) }
_ => self.fatal(~"expected string literal")
}
}
@ -3043,13 +3031,13 @@ class parser {
let id = self.parse_ident();
alt self.token {
// mod x = "foo.rs";
token::SEMI {
token::SEMI => {
let mut hi = self.span.hi;
self.bump();
return spanned(lo, hi, cdir_src_mod(id, outer_attrs));
}
// mod x = "foo_dir" { ...directives... }
token::LBRACE {
token::LBRACE => {
self.bump();
let inner_attrs = self.parse_inner_attrs_and_next();
let mod_attrs = vec::append(outer_attrs, inner_attrs.inner);
@ -3061,7 +3049,7 @@ class parser {
return spanned(lo, hi,
cdir_dir_mod(id, cdirs, mod_attrs));
}
_ { self.unexpected(); }
_ => self.unexpected()
}
} else if self.is_view_item() {
let vi = self.parse_view_item(outer_attrs);

View file

@ -21,25 +21,25 @@ const as_prec: uint = 11u;
*/
fn token_to_binop(tok: token) -> option<ast::binop> {
alt tok {
BINOP(STAR) { some(mul) }
BINOP(SLASH) { some(div) }
BINOP(PERCENT) { some(rem) }
BINOP(STAR) => some(mul),
BINOP(SLASH) => some(div),
BINOP(PERCENT) => some(rem),
// 'as' sits between here with 11
BINOP(PLUS) { some(add) }
BINOP(MINUS) { some(subtract) }
BINOP(SHL) { some(shl) }
BINOP(SHR) { some(shr) }
BINOP(AND) { some(bitand) }
BINOP(CARET) { some(bitxor) }
BINOP(OR) { some(bitor) }
LT { some(lt) }
LE { some(le) }
GE { some(ge) }
GT { some(gt) }
EQEQ { some(eq) }
NE { some(ne) }
ANDAND { some(and) }
OROR { some(or) }
_ { none }
BINOP(PLUS) => some(add),
BINOP(MINUS) => some(subtract),
BINOP(SHL) => some(shl),
BINOP(SHR) => some(shr),
BINOP(AND) => some(bitand),
BINOP(CARET) => some(bitxor),
BINOP(OR) => some(bitor),
LT => some(lt),
LE => some(le),
GE => some(ge),
GT => some(gt),
EQEQ => some(eq),
NE => some(ne),
ANDAND => some(and),
OROR => some(or),
_ => none
}
}

View file

@ -103,95 +103,100 @@ enum nonterminal {
fn binop_to_str(o: binop) -> ~str {
alt o {
PLUS { ~"+" }
MINUS { ~"-" }
STAR { ~"*" }
SLASH { ~"/" }
PERCENT { ~"%" }
CARET { ~"^" }
AND { ~"&" }
OR { ~"|" }
SHL { ~"<<" }
SHR { ~">>" }
PLUS => ~"+",
MINUS => ~"-",
STAR => ~"*",
SLASH => ~"/",
PERCENT => ~"%",
CARET => ~"^",
AND => ~"&",
OR => ~"|",
SHL => ~"<<",
SHR => ~">>"
}
}
fn to_str(in: interner<@~str>, t: token) -> ~str {
alt t {
EQ { ~"=" }
LT { ~"<" }
LE { ~"<=" }
EQEQ { ~"==" }
NE { ~"!=" }
GE { ~">=" }
GT { ~">" }
NOT { ~"!" }
TILDE { ~"~" }
OROR { ~"||" }
ANDAND { ~"&&" }
BINOP(op) { binop_to_str(op) }
BINOPEQ(op) { binop_to_str(op) + ~"=" }
EQ => ~"=",
LT => ~"<",
LE => ~"<=",
EQEQ => ~"==",
NE => ~"!=",
GE => ~">=",
GT => ~">",
NOT => ~"!",
TILDE => ~"~",
OROR => ~"||",
ANDAND => ~"&&",
BINOP(op) => binop_to_str(op),
BINOPEQ(op) => binop_to_str(op) + ~"=",
/* Structural symbols */
AT { ~"@" }
DOT { ~"." }
DOTDOT { ~".." }
ELLIPSIS { ~"..." }
COMMA { ~"," }
SEMI { ~";" }
COLON { ~":" }
MOD_SEP { ~"::" }
RARROW { ~"->" }
LARROW { ~"<-" }
DARROW { ~"<->" }
FAT_ARROW { ~"=>" }
LPAREN { ~"(" }
RPAREN { ~")" }
LBRACKET { ~"[" }
RBRACKET { ~"]" }
LBRACE { ~"{" }
RBRACE { ~"}" }
POUND { ~"#" }
DOLLAR { ~"$" }
AT => ~"@",
DOT => ~".",
DOTDOT => ~"..",
ELLIPSIS => ~"...",
COMMA => ~",",
SEMI => ~";",
COLON => ~":",
MOD_SEP => ~"::",
RARROW => ~"->",
LARROW => ~"<-",
DARROW => ~"<->",
FAT_ARROW => ~"=>",
LPAREN => ~"(",
RPAREN => ~")",
LBRACKET => ~"[",
RBRACKET => ~"]",
LBRACE => ~"{",
RBRACE => ~"}",
POUND => ~"#",
DOLLAR => ~"$",
/* Literals */
LIT_INT(c, ast::ty_char) {
LIT_INT(c, ast::ty_char) => {
~"'" + char::escape_default(c as char) + ~"'"
}
LIT_INT(i, t) {
LIT_INT(i, t) => {
int::to_str(i as int, 10u) + ast_util::int_ty_to_str(t)
}
LIT_UINT(u, t) {
LIT_UINT(u, t) => {
uint::to_str(u as uint, 10u) + ast_util::uint_ty_to_str(t)
}
LIT_INT_UNSUFFIXED(i) {
LIT_INT_UNSUFFIXED(i) => {
int::to_str(i as int, 10u)
}
LIT_FLOAT(s, t) {
LIT_FLOAT(s, t) => {
let mut body = *in.get(s);
if body.ends_with(~".") {
body = body + ~"0"; // `10.f` is not a float literal
}
body + ast_util::float_ty_to_str(t)
}
LIT_STR(s) { ~"\"" + str::escape_default( *in.get(s)) + ~"\"" }
LIT_STR(s) => { ~"\"" + str::escape_default( *in.get(s)) + ~"\"" }
/* Name components */
IDENT(s, _) { *in.get(s) }
IDENT(s, _) => *in.get(s),
UNDERSCORE { ~"_" }
UNDERSCORE => ~"_",
/* Other */
DOC_COMMENT(s) { *in.get(s) }
EOF { ~"<eof>" }
INTERPOLATED(nt) {
DOC_COMMENT(s) => *in.get(s),
EOF => ~"<eof>",
INTERPOLATED(nt) => {
~"an interpolated " +
alt nt {
nt_item(*) { ~"item" } nt_block(*) { ~"block" }
nt_stmt(*) { ~"statement" } nt_pat(*) { ~"pattern" }
nt_expr(*) { ~"expression" } nt_ty(*) { ~"type" }
nt_ident(*) { ~"identifier" } nt_path(*) { ~"path" }
nt_tt(*) { ~"tt" } nt_matchers(*) { ~"matcher sequence" }
nt_item(*) => ~"item",
nt_block(*) => ~"block",
nt_stmt(*) => ~"statement",
nt_pat(*) => ~"pattern",
nt_expr(*) => ~"expression",
nt_ty(*) => ~"type",
nt_ident(*) => ~"identifier",
nt_path(*) => ~"path",
nt_tt(*) => ~"tt",
nt_matchers(*) => ~"matcher sequence"
}
}
}
@ -199,44 +204,44 @@ fn to_str(in: interner<@~str>, t: token) -> ~str {
pure fn can_begin_expr(t: token) -> bool {
alt t {
LPAREN { true }
LBRACE { true }
LBRACKET { true }
IDENT(_, _) { true }
UNDERSCORE { true }
TILDE { true }
LIT_INT(_, _) { true }
LIT_UINT(_, _) { true }
LIT_INT_UNSUFFIXED(_) { true }
LIT_FLOAT(_, _) { true }
LIT_STR(_) { true }
POUND { true }
AT { true }
NOT { true }
BINOP(MINUS) { true }
BINOP(STAR) { true }
BINOP(AND) { true }
BINOP(OR) { true } // in lambda syntax
OROR { true } // in lambda syntax
MOD_SEP { true }
LPAREN => true,
LBRACE => true,
LBRACKET => true,
IDENT(_, _) => true,
UNDERSCORE => true,
TILDE => true,
LIT_INT(_, _) => true,
LIT_UINT(_, _) => true,
LIT_INT_UNSUFFIXED(_) => true,
LIT_FLOAT(_, _) => true,
LIT_STR(_) => true,
POUND => true,
AT => true,
NOT => true,
BINOP(MINUS) => true,
BINOP(STAR) => true,
BINOP(AND) => true,
BINOP(OR) => true, // in lambda syntax
OROR => true, // in lambda syntax
MOD_SEP => true,
INTERPOLATED(nt_expr(*))
| INTERPOLATED(nt_ident(*))
| INTERPOLATED(nt_block(*))
| INTERPOLATED(nt_path(*)) { true }
_ { false }
| INTERPOLATED(nt_path(*)) => true,
_ => false
}
}
/// what's the opposite delimiter?
fn flip_delimiter(&t: token::token) -> token::token {
alt t {
token::LPAREN { token::RPAREN }
token::LBRACE { token::RBRACE }
token::LBRACKET { token::RBRACKET }
token::RPAREN { token::LPAREN }
token::RBRACE { token::LBRACE }
token::RBRACKET { token::LBRACKET }
_ { fail }
token::LPAREN => token::RPAREN,
token::LBRACE => token::RBRACE,
token::LBRACKET => token::RBRACKET,
token::RPAREN => token::LPAREN,
token::RBRACE => token::LBRACE,
token::RBRACKET => token::LBRACKET,
_ => fail
}
}
@ -244,25 +249,25 @@ fn flip_delimiter(&t: token::token) -> token::token {
fn is_lit(t: token) -> bool {
alt t {
LIT_INT(_, _) { true }
LIT_UINT(_, _) { true }
LIT_INT_UNSUFFIXED(_) { true }
LIT_FLOAT(_, _) { true }
LIT_STR(_) { true }
_ { false }
LIT_INT(_, _) => true,
LIT_UINT(_, _) => true,
LIT_INT_UNSUFFIXED(_) => true,
LIT_FLOAT(_, _) => true,
LIT_STR(_) => true,
_ => false
}
}
pure fn is_ident(t: token) -> bool {
alt t { IDENT(_, _) { true } _ { false } }
alt t { IDENT(_, _) => true, _ => false }
}
pure fn is_plain_ident(t: token) -> bool {
alt t { IDENT(_, false) { true } _ { false } }
alt t { IDENT(_, false) => true, _ => false }
}
pure fn is_bar(t: token) -> bool {
alt t { BINOP(OR) | OROR { true } _ { false } }
alt t { BINOP(OR) | OROR => true, _ => false }
}
/**

View file

@ -63,11 +63,11 @@ enum token { STRING(@~str, int), BREAK(break_t), BEGIN(begin_t), END, EOF, }
fn tok_str(++t: token) -> ~str {
alt t {
STRING(s, len) { return fmt!{"STR(%s,%d)", *s, len}; }
BREAK(_) { return ~"BREAK"; }
BEGIN(_) { return ~"BEGIN"; }
END { return ~"END"; }
EOF { return ~"EOF"; }
STRING(s, len) => return fmt!{"STR(%s,%d)", *s, len},
BREAK(_) => return ~"BREAK",
BEGIN(_) => return ~"BEGIN",
END => return ~"END",
EOF => return ~"EOF"
}
}
@ -239,7 +239,7 @@ impl printer for printer {
fn pretty_print(t: token) {
debug!{"pp ~[%u,%u]", self.left, self.right};
alt t {
EOF {
EOF => {
if !self.scan_stack_empty {
self.check_stack(0);
self.advance_left(self.token[self.left],
@ -247,7 +247,7 @@ impl printer for printer {
}
self.indent(0);
}
BEGIN(b) {
BEGIN(b) => {
if self.scan_stack_empty {
self.left_total = 1;
self.right_total = 1;
@ -259,7 +259,7 @@ impl printer for printer {
self.size[self.right] = -self.right_total;
self.scan_push(self.right);
}
END {
END => {
if self.scan_stack_empty {
debug!{"pp END/print ~[%u,%u]", self.left, self.right};
self.print(t, 0);
@ -271,7 +271,7 @@ impl printer for printer {
self.scan_push(self.right);
}
}
BREAK(b) {
BREAK(b) => {
if self.scan_stack_empty {
self.left_total = 1;
self.right_total = 1;
@ -285,7 +285,7 @@ impl printer for printer {
self.size[self.right] = -self.right_total;
self.right_total += b.blank_space;
}
STRING(s, len) {
STRING(s, len) => {
if self.scan_stack_empty {
debug!{"pp STRING/print ~[%u,%u]", self.left, self.right};
self.print(t, len);
@ -358,9 +358,9 @@ impl printer for printer {
if L >= 0 {
self.print(x, L);
alt x {
BREAK(b) { self.left_total += b.blank_space; }
STRING(_, len) { assert (len == L); self.left_total += len; }
_ { }
BREAK(b) => self.left_total += b.blank_space,
STRING(_, len) => { assert (len == L); self.left_total += len; }
_ => ()
}
if self.left != self.right {
self.left += 1u;
@ -374,19 +374,19 @@ impl printer for printer {
if !self.scan_stack_empty {
let x = self.scan_top();
alt copy self.token[x] {
BEGIN(b) {
BEGIN(b) => {
if k > 0 {
self.size[self.scan_pop()] = self.size[x] +
self.right_total;
self.check_stack(k - 1);
}
}
END {
END => {
// paper says + not =, but that makes no sense.
self.size[self.scan_pop()] = 1;
self.check_stack(k + 1);
}
_ {
_ => {
self.size[self.scan_pop()] = self.size[x] + self.right_total;
if k > 0 { self.check_stack(k); }
}
@ -423,7 +423,7 @@ impl printer for printer {
self.space};
log(debug, buf_str(self.token, self.size, self.left, self.right, 6u));
alt x {
BEGIN(b) {
BEGIN(b) => {
if L > self.space {
let col = self.margin - self.space + b.offset;
debug!{"print BEGIN -> push broken block at col %d", col};
@ -435,25 +435,25 @@ impl printer for printer {
pbreak: fits});
}
}
END {
END => {
debug!{"print END -> pop END"};
assert (self.print_stack.len() != 0u);
self.print_stack.pop();
}
BREAK(b) {
BREAK(b) => {
let top = self.get_top();
alt top.pbreak {
fits {
fits => {
debug!{"print BREAK in fitting block"};
self.space -= b.blank_space;
self.indent(b.blank_space);
}
broken(consistent) {
broken(consistent) => {
debug!{"print BREAK in consistent block"};
self.print_newline(top.offset + b.offset);
self.space = self.margin - (top.offset + b.offset);
}
broken(inconsistent) {
broken(inconsistent) => {
if L > self.space {
debug!{"print BREAK w/ newline in inconsistent"};
self.print_newline(top.offset + b.offset);
@ -466,14 +466,14 @@ impl printer for printer {
}
}
}
STRING(s, len) {
STRING(s, len) => {
debug!{"print STRING"};
assert (L == len);
// assert L <= space;
self.space -= len;
self.print_str(*s);
}
EOF {
EOF => {
// EOF should never get here.
fail;
}

File diff suppressed because it is too large Load diff

View file

@ -28,8 +28,8 @@ trait interner<T: const copy> {
impl <T: const copy> of interner<T> for hash_interner<T> {
fn intern(val: T) -> uint {
alt self.map.find(val) {
some(idx) { return idx; }
none {
some(idx) => return idx,
none => {
let new_idx = self.vect.len();
self.map.insert(val, new_idx);
self.vect.push(val);

View file

@ -27,19 +27,19 @@ enum fn_kind {
fn name_of_fn(fk: fn_kind) -> ident {
alt fk {
fk_item_fn(name, _) | fk_method(name, _, _)
| fk_ctor(name, _, _, _, _) { /* FIXME (#2543) */ copy name }
fk_anon(*) | fk_fn_block(*) { @~"anon" }
fk_dtor(*) { @~"drop" }
| fk_ctor(name, _, _, _, _) => /* FIXME (#2543) */ copy name,
fk_anon(*) | fk_fn_block(*) => @~"anon",
fk_dtor(*) => @~"drop"
}
}
fn tps_of_fn(fk: fn_kind) -> ~[ty_param] {
alt fk {
fk_item_fn(_, tps) | fk_method(_, tps, _)
| fk_ctor(_, _, tps, _, _) | fk_dtor(tps, _, _, _) {
| fk_ctor(_, _, tps, _, _) | fk_dtor(tps, _, _, _) => {
/* FIXME (#2543) */ copy tps
}
fk_anon(*) | fk_fn_block(*) { ~[] }
fk_anon(*) | fk_fn_block(*) => ~[]
}
}
@ -90,14 +90,12 @@ fn visit_crate<E>(c: crate, e: E, v: vt<E>) {
fn visit_crate_directive<E>(cd: @crate_directive, e: E, v: vt<E>) {
alt cd.node {
cdir_src_mod(_, _) { }
cdir_dir_mod(_, cdirs, _) {
for cdirs.each |cdir| {
visit_crate_directive(cdir, e, v);
}
cdir_src_mod(_, _) => (),
cdir_dir_mod(_, cdirs, _) => for cdirs.each |cdir| {
visit_crate_directive(cdir, e, v);
}
cdir_view_item(vi) { v.visit_view_item(vi, e, v); }
cdir_syntax(_) { }
cdir_view_item(vi) => v.visit_view_item(vi, e, v),
cdir_syntax(_) => ()
}
}
@ -111,33 +109,36 @@ fn visit_view_item<E>(_vi: @view_item, _e: E, _v: vt<E>) { }
fn visit_local<E>(loc: @local, e: E, v: vt<E>) {
v.visit_pat(loc.node.pat, e, v);
v.visit_ty(loc.node.ty, e, v);
alt loc.node.init { none { } some(i) { v.visit_expr(i.expr, e, v); } }
alt loc.node.init {
none => (),
some(i) => v.visit_expr(i.expr, e, v)
}
}
fn visit_item<E>(i: @item, e: E, v: vt<E>) {
alt i.node {
item_const(t, ex) { v.visit_ty(t, e, v); v.visit_expr(ex, e, v); }
item_fn(decl, tp, body) {
item_const(t, ex) => { v.visit_ty(t, e, v); v.visit_expr(ex, e, v); }
item_fn(decl, tp, body) => {
v.visit_fn(fk_item_fn(/* FIXME (#2543) */ copy i.ident,
/* FIXME (#2543) */ copy tp), decl, body,
i.span, i.id, e, v);
}
item_mod(m) { v.visit_mod(m, i.span, i.id, e, v); }
item_foreign_mod(nm) {
item_mod(m) => v.visit_mod(m, i.span, i.id, e, v),
item_foreign_mod(nm) => {
for nm.view_items.each |vi| { v.visit_view_item(vi, e, v); }
for nm.items.each |ni| { v.visit_foreign_item(ni, e, v); }
}
item_ty(t, tps) {
item_ty(t, tps) => {
v.visit_ty(t, e, v);
v.visit_ty_params(tps, e, v);
}
item_enum(variants, tps) {
item_enum(variants, tps) => {
v.visit_ty_params(tps, e, v);
for variants.each |vr| {
for vr.node.args.each |va| { v.visit_ty(va.ty, e, v); }
}
}
item_impl(tps, traits, ty, methods) {
item_impl(tps, traits, ty, methods) => {
v.visit_ty_params(tps, e, v);
for traits.each |p| {
visit_path(p.path, e, v);
@ -147,7 +148,7 @@ fn visit_item<E>(i: @item, e: E, v: vt<E>) {
visit_method_helper(m, e, v)
}
}
item_class(tps, traits, members, m_ctor, m_dtor) {
item_class(tps, traits, members, m_ctor, m_dtor) => {
v.visit_ty_params(tps, e, v);
for members.each |m| {
v.visit_class_item(m, e, v);
@ -162,25 +163,21 @@ fn visit_item<E>(i: @item, e: E, v: vt<E>) {
ast_util::local_def(i.id), e, v)
};
}
item_trait(tps, traits, methods) {
item_trait(tps, traits, methods) => {
v.visit_ty_params(tps, e, v);
for traits.each |p| { visit_path(p.path, e, v); }
for methods.each |m| {
v.visit_trait_method(m, e, v);
}
}
item_mac(m) { visit_mac(m, e, v) }
item_mac(m) => visit_mac(m, e, v)
}
}
fn visit_class_item<E>(cm: @class_member, e:E, v:vt<E>) {
alt cm.node {
instance_var(_, t, _, _, _) {
v.visit_ty(t, e, v);
}
class_method(m) {
visit_method_helper(m, e, v);
}
instance_var(_, t, _, _, _) => v.visit_ty(t, e, v),
class_method(m) => visit_method_helper(m, e, v)
}
}
@ -189,26 +186,25 @@ fn skip_ty<E>(_t: @ty, _e: E, _v: vt<E>) {}
fn visit_ty<E>(t: @ty, e: E, v: vt<E>) {
alt t.node {
ty_box(mt) | ty_uniq(mt) |
ty_vec(mt) | ty_ptr(mt) | ty_rptr(_, mt) {
ty_vec(mt) | ty_ptr(mt) | ty_rptr(_, mt) => {
v.visit_ty(mt.ty, e, v);
}
ty_rec(flds) {
for flds.each |f| { v.visit_ty(f.node.mt.ty, e, v); }
ty_rec(flds) => for flds.each |f| {
v.visit_ty(f.node.mt.ty, e, v);
}
ty_tup(ts) { for ts.each |tt| { v.visit_ty(tt, e, v); } }
ty_fn(_, decl) {
ty_tup(ts) => for ts.each |tt| {
v.visit_ty(tt, e, v);
}
ty_fn(_, decl) => {
for decl.inputs.each |a| { v.visit_ty(a.ty, e, v); }
v.visit_ty(decl.output, e, v);
}
ty_path(p, _) { visit_path(p, e, v); }
ty_fixed_length(t, _) {
v.visit_ty(t, e, v);
}
ty_path(p, _) => visit_path(p, e, v),
ty_fixed_length(t, _) => v.visit_ty(t, e, v),
ty_nil |
ty_bot |
ty_mac(_) |
ty_infer {
}
ty_infer => ()
}
}
@ -218,31 +214,31 @@ fn visit_path<E>(p: @path, e: E, v: vt<E>) {
fn visit_pat<E>(p: @pat, e: E, v: vt<E>) {
alt p.node {
pat_enum(path, children) {
pat_enum(path, children) => {
visit_path(path, e, v);
do option::iter(children) |children| {
for children.each |child| { v.visit_pat(child, e, v); }}
}
pat_rec(fields, _) {
for fields.each |f| { v.visit_pat(f.pat, e, v); }
pat_rec(fields, _) => for fields.each |f| {
v.visit_pat(f.pat, e, v)
}
pat_tup(elts) { for elts.each |elt| { v.visit_pat(elt, e, v); } }
pat_box(inner) | pat_uniq(inner) {
v.visit_pat(inner, e, v);
pat_tup(elts) => for elts.each |elt| {
v.visit_pat(elt, e, v)
}
pat_ident(_, path, inner) {
pat_box(inner) | pat_uniq(inner) => v.visit_pat(inner, e, v),
pat_ident(_, path, inner) => {
visit_path(path, e, v);
do option::iter(inner) |subpat| { v.visit_pat(subpat, e, v)};
}
pat_lit(ex) { v.visit_expr(ex, e, v); }
pat_range(e1, e2) { v.visit_expr(e1, e, v); v.visit_expr(e2, e, v); }
pat_wild {}
pat_lit(ex) => v.visit_expr(ex, e, v),
pat_range(e1, e2) => { v.visit_expr(e1, e, v); v.visit_expr(e2, e, v); }
pat_wild => ()
}
}
fn visit_foreign_item<E>(ni: @foreign_item, e: E, v: vt<E>) {
alt ni.node {
foreign_item_fn(fd, tps) {
foreign_item_fn(fd, tps) => {
v.visit_ty_params(tps, e, v);
visit_fn_decl(fd, e, v);
}
@ -253,8 +249,8 @@ fn visit_ty_params<E>(tps: ~[ty_param], e: E, v: vt<E>) {
for tps.each |tp| {
for vec::each(*tp.bounds) |bound| {
alt bound {
bound_trait(t) { v.visit_ty(t, e, v); }
bound_copy | bound_send | bound_const | bound_owned { }
bound_trait(t) => v.visit_ty(t, e, v),
bound_copy | bound_send | bound_const | bound_owned => ()
}
}
}
@ -309,12 +305,8 @@ fn visit_ty_method<E>(m: ty_method, e: E, v: vt<E>) {
fn visit_trait_method<E>(m: trait_method, e: E, v: vt<E>) {
alt m {
required(ty_m) {
v.visit_ty_method(ty_m, e, v)
}
provided(m) {
visit_method_helper(m, e, v)
}
required(ty_m) => v.visit_ty_method(ty_m, e, v),
provided(m) => visit_method_helper(m, e, v)
}
}
@ -326,23 +318,23 @@ fn visit_block<E>(b: ast::blk, e: E, v: vt<E>) {
fn visit_stmt<E>(s: @stmt, e: E, v: vt<E>) {
alt s.node {
stmt_decl(d, _) { v.visit_decl(d, e, v); }
stmt_expr(ex, _) { v.visit_expr(ex, e, v); }
stmt_semi(ex, _) { v.visit_expr(ex, e, v); }
stmt_decl(d, _) => v.visit_decl(d, e, v),
stmt_expr(ex, _) => v.visit_expr(ex, e, v),
stmt_semi(ex, _) => v.visit_expr(ex, e, v)
}
}
fn visit_decl<E>(d: @decl, e: E, v: vt<E>) {
alt d.node {
decl_local(locs) {
for locs.each |loc| { v.visit_local(loc, e, v); }
decl_local(locs) => for locs.each |loc| {
v.visit_local(loc, e, v)
}
decl_item(it) { v.visit_item(it, e, v); }
decl_item(it) => v.visit_item(it, e, v)
}
}
fn visit_expr_opt<E>(eo: option<@expr>, e: E, v: vt<E>) {
alt eo { none { } some(ex) { v.visit_expr(ex, e, v); } }
alt eo { none => (), some(ex) => v.visit_expr(ex, e, v) }
}
fn visit_exprs<E>(exprs: ~[@expr], e: E, v: vt<E>) {
@ -351,86 +343,88 @@ fn visit_exprs<E>(exprs: ~[@expr], e: E, v: vt<E>) {
fn visit_mac<E>(m: mac, e: E, v: vt<E>) {
alt m.node {
ast::mac_invoc(pth, arg, body) {
ast::mac_invoc(pth, arg, body) => {
option::map(arg, |arg| v.visit_expr(arg, e, v)); }
ast::mac_invoc_tt(pth, tt) { /* no user-serviceable parts inside */ }
ast::mac_ellipsis { }
ast::mac_aq(_, e) { /* FIXME: maybe visit (Issue #2340) */ }
ast::mac_var(_) { }
ast::mac_invoc_tt(pth, tt) => { /* no user-serviceable parts inside */ }
ast::mac_ellipsis => (),
ast::mac_aq(_, e) => { /* FIXME: maybe visit (Issue #2340) */ }
ast::mac_var(_) => ()
}
}
fn visit_expr<E>(ex: @expr, e: E, v: vt<E>) {
alt ex.node {
expr_vstore(x, _) { v.visit_expr(x, e, v); }
expr_vec(es, _) { visit_exprs(es, e, v); }
expr_vstore(x, _) => v.visit_expr(x, e, v),
expr_vec(es, _) => visit_exprs(es, e, v),
expr_repeat(element, count, _) => {
v.visit_expr(element, e, v);
v.visit_expr(count, e, v);
}
expr_rec(flds, base) {
expr_rec(flds, base) => {
for flds.each |f| { v.visit_expr(f.node.expr, e, v); }
visit_expr_opt(base, e, v);
}
expr_struct(p, flds) {
expr_struct(p, flds) => {
visit_path(p, e, v);
for flds.each |f| { v.visit_expr(f.node.expr, e, v); }
}
expr_tup(elts) { for elts.each |el| { v.visit_expr(el, e, v); } }
expr_call(callee, args, _) {
expr_tup(elts) => for elts.each |el| { v.visit_expr(el, e, v); }
expr_call(callee, args, _) => {
visit_exprs(args, e, v);
v.visit_expr(callee, e, v);
}
expr_binary(_, a, b) { v.visit_expr(a, e, v); v.visit_expr(b, e, v); }
expr_binary(_, a, b) => {
v.visit_expr(a, e, v); v.visit_expr(b, e, v);
}
expr_addr_of(_, x) | expr_unary(_, x) |
expr_loop_body(x) | expr_do_body(x) |
expr_assert(x) { v.visit_expr(x, e, v); }
expr_lit(_) { }
expr_cast(x, t) { v.visit_expr(x, e, v); v.visit_ty(t, e, v); }
expr_if(x, b, eo) {
expr_assert(x) => v.visit_expr(x, e, v),
expr_lit(_) => (),
expr_cast(x, t) => { v.visit_expr(x, e, v); v.visit_ty(t, e, v); }
expr_if(x, b, eo) => {
v.visit_expr(x, e, v);
v.visit_block(b, e, v);
visit_expr_opt(eo, e, v);
}
expr_while(x, b) { v.visit_expr(x, e, v); v.visit_block(b, e, v); }
expr_loop(b) { v.visit_block(b, e, v); }
expr_alt(x, arms, _) {
expr_while(x, b) => { v.visit_expr(x, e, v); v.visit_block(b, e, v); }
expr_loop(b) => v.visit_block(b, e, v),
expr_alt(x, arms, _) => {
v.visit_expr(x, e, v);
for arms.each |a| { v.visit_arm(a, e, v); }
}
expr_fn(proto, decl, body, cap_clause) {
expr_fn(proto, decl, body, cap_clause) => {
v.visit_fn(fk_anon(proto, cap_clause), decl, body,
ex.span, ex.id, e, v);
}
expr_fn_block(decl, body, cap_clause) {
expr_fn_block(decl, body, cap_clause) => {
v.visit_fn(fk_fn_block(cap_clause), decl, body,
ex.span, ex.id, e, v);
}
expr_block(b) { v.visit_block(b, e, v); }
expr_assign(a, b) { v.visit_expr(b, e, v); v.visit_expr(a, e, v); }
expr_copy(a) { v.visit_expr(a, e, v); }
expr_unary_move(a) { v.visit_expr(a, e, v); }
expr_move(a, b) { v.visit_expr(b, e, v); v.visit_expr(a, e, v); }
expr_swap(a, b) { v.visit_expr(a, e, v); v.visit_expr(b, e, v); }
expr_assign_op(_, a, b) {
expr_block(b) => v.visit_block(b, e, v),
expr_assign(a, b) => { v.visit_expr(b, e, v); v.visit_expr(a, e, v); }
expr_copy(a) => v.visit_expr(a, e, v),
expr_unary_move(a) => v.visit_expr(a, e, v),
expr_move(a, b) => { v.visit_expr(b, e, v); v.visit_expr(a, e, v); }
expr_swap(a, b) => { v.visit_expr(a, e, v); v.visit_expr(b, e, v); }
expr_assign_op(_, a, b) => {
v.visit_expr(b, e, v);
v.visit_expr(a, e, v);
}
expr_field(x, _, tys) {
expr_field(x, _, tys) => {
v.visit_expr(x, e, v);
for tys.each |tp| { v.visit_ty(tp, e, v); }
}
expr_index(a, b) { v.visit_expr(a, e, v); v.visit_expr(b, e, v); }
expr_path(p) { visit_path(p, e, v); }
expr_fail(eo) { visit_expr_opt(eo, e, v); }
expr_break { }
expr_again { }
expr_ret(eo) { visit_expr_opt(eo, e, v); }
expr_log(_, lv, x) {
expr_index(a, b) => { v.visit_expr(a, e, v); v.visit_expr(b, e, v); }
expr_path(p) => visit_path(p, e, v),
expr_fail(eo) => visit_expr_opt(eo, e, v),
expr_break => (),
expr_again => (),
expr_ret(eo) => visit_expr_opt(eo, e, v),
expr_log(_, lv, x) => {
v.visit_expr(lv, e, v);
v.visit_expr(x, e, v);
}
expr_mac(mac) { visit_mac(mac, e, v); }
expr_mac(mac) => visit_mac(mac, e, v),
}
v.visit_expr_post(ex, e, v);
}

View file

@ -62,8 +62,8 @@ mod write {
fn mk_intermediate_name(output_path: ~str, extension: ~str) ->
~str unsafe {
let stem = alt str::find_char(output_path, '.') {
some(dot_pos) { str::slice(output_path, 0u, dot_pos) }
none { output_path }
some(dot_pos) => str::slice(output_path, 0u, dot_pos),
none => output_path
};
return stem + ~"." + extension;
}
@ -83,7 +83,7 @@ mod write {
if opts.save_temps {
alt opts.output_type {
output_type_bitcode {
output_type_bitcode => {
if opts.optimize != 0u {
let filename = mk_intermediate_name(output, ~"no-opt.bc");
str::as_c_str(filename, |buf| {
@ -91,7 +91,7 @@ mod write {
});
}
}
_ {
_ => {
let filename = mk_intermediate_name(output, ~"bc");
str::as_c_str(filename, |buf| {
llvm::LLVMWriteBitcodeToFile(llmod, buf)
@ -146,13 +146,12 @@ mod write {
let LLVMOptDefault = 2 as c_int; // -O2, -Os
let LLVMOptAggressive = 3 as c_int; // -O3
let mut CodeGenOptLevel;
alt check opts.optimize {
0u { CodeGenOptLevel = LLVMOptNone; }
1u { CodeGenOptLevel = LLVMOptLess; }
2u { CodeGenOptLevel = LLVMOptDefault; }
3u { CodeGenOptLevel = LLVMOptAggressive; }
}
let mut CodeGenOptLevel = alt check opts.optimize {
0u => LLVMOptNone,
1u => LLVMOptLess,
2u => LLVMOptDefault,
3u => LLVMOptAggressive
};
let mut FileType;
if opts.output_type == output_type_object ||
@ -325,13 +324,13 @@ fn build_link_meta(sess: session, c: ast::crate, output: ~str,
for linkage_metas.each |meta| {
if *attr::get_meta_item_name(meta) == ~"name" {
alt attr::get_meta_item_value_str(meta) {
some(v) { name = some(v); }
none { vec::push(cmh_items, meta); }
some(v) => { name = some(v); }
none => vec::push(cmh_items, meta)
}
} else if *attr::get_meta_item_name(meta) == ~"vers" {
alt attr::get_meta_item_value_str(meta) {
some(v) { vers = some(v); }
none { vec::push(cmh_items, meta); }
some(v) => { vers = some(v); }
none => vec::push(cmh_items, meta)
}
} else { vec::push(cmh_items, meta); }
}
@ -357,14 +356,14 @@ fn build_link_meta(sess: session, c: ast::crate, output: ~str,
for cmh_items.each |m_| {
let m = m_;
alt m.node {
ast::meta_name_value(key, value) {
ast::meta_name_value(key, value) => {
symbol_hasher.write_str(len_and_str(*key));
symbol_hasher.write_str(len_and_str_lit(value));
}
ast::meta_word(name) {
ast::meta_word(name) => {
symbol_hasher.write_str(len_and_str(*name));
}
ast::meta_list(_, _) {
ast::meta_list(_, _) => {
// FIXME (#607): Implement this
fail ~"unimplemented meta_item variant";
}
@ -387,8 +386,8 @@ fn build_link_meta(sess: session, c: ast::crate, output: ~str,
fn crate_meta_name(sess: session, _crate: ast::crate,
output: ~str, metas: provided_metas) -> @~str {
return alt metas.name {
some(v) { v }
none {
some(v) => v,
none => {
let name =
{
let mut os =
@ -409,8 +408,8 @@ fn build_link_meta(sess: session, c: ast::crate, output: ~str,
fn crate_meta_vers(sess: session, _crate: ast::crate,
metas: provided_metas) -> @~str {
return alt metas.vers {
some(v) { v }
none {
some(v) => v,
none => {
let vers = ~"0.0";
warn_missing(sess, ~"vers", vers);
@vers
@ -453,8 +452,8 @@ fn symbol_hash(tcx: ty::ctxt, symbol_hasher: &hash::State, t: ty::t,
fn get_symbol_hash(ccx: @crate_ctxt, t: ty::t) -> ~str {
alt ccx.type_hashcodes.find(t) {
some(h) { return h; }
none {
some(h) => return h,
none => {
let hash = symbol_hash(ccx.tcx, ccx.symbol_hasher, t, ccx.link_meta);
ccx.type_hashcodes.insert(t, hash);
return hash;
@ -469,18 +468,18 @@ fn sanitize(s: ~str) -> ~str {
let mut result = ~"";
do str::chars_iter(s) |c| {
alt c {
'@' { result += ~"_sbox_"; }
'~' { result += ~"_ubox_"; }
'*' { result += ~"_ptr_"; }
'&' { result += ~"_ref_"; }
',' { result += ~"_"; }
'@' => result += ~"_sbox_",
'~' => result += ~"_ubox_",
'*' => result += ~"_ptr_",
'&' => result += ~"_ref_",
',' => result += ~"_",
'{' | '(' { result += ~"_of_"; }
'{' | '(' => result += ~"_of_",
'a' to 'z'
| 'A' to 'Z'
| '0' to '9'
| '_' { str::push_char(result,c); }
_ {
| '_' => str::push_char(result,c),
_ => {
if c > 'z' && char::is_XID_continue(c) {
str::push_char(result,c);
}
@ -504,7 +503,7 @@ fn mangle(ss: path) -> ~str {
let mut n = ~"_ZN"; // Begin name-sequence.
for ss.each |s| {
alt s { path_name(s) | path_mod(s) {
alt s { path_name(s) | path_mod(s) => {
let sani = sanitize(*s);
n += fmt!{"%u%s", str::len(sani), sani};
} }
@ -568,10 +567,10 @@ fn link_binary(sess: session,
return str::connect(parts, ~".");
}
return alt config.os {
session::os_macos { rmext(rmlib(filename)) }
session::os_linux { rmext(rmlib(filename)) }
session::os_freebsd { rmext(rmlib(filename)) }
_ { rmext(filename) }
session::os_macos => rmext(rmlib(filename)),
session::os_linux => rmext(rmlib(filename)),
session::os_freebsd => rmext(rmlib(filename)),
_ => rmext(filename)
};
}

View file

@ -8,8 +8,8 @@ export get_rpath_flags;
pure fn not_win32(os: session::os) -> bool {
alt os {
session::os_win32 { false }
_ { true }
session::os_win32 => false,
_ => true
}
}
@ -109,10 +109,10 @@ fn get_rpath_relative_to_output(os: session::os,
// Mac doesn't appear to support $ORIGIN
let prefix = alt os {
session::os_linux { ~"$ORIGIN" + path::path_sep() }
session::os_freebsd { ~"$ORIGIN" + path::path_sep() }
session::os_macos { ~"@executable_path" + path::path_sep() }
session::os_win32 { core::unreachable(); }
session::os_linux => ~"$ORIGIN" + path::path_sep(),
session::os_freebsd => ~"$ORIGIN" + path::path_sep(),
session::os_macos => ~"@executable_path" + path::path_sep(),
session::os_win32 => core::unreachable()
};
prefix + get_relative_to(

View file

@ -9,31 +9,31 @@ fn get_target_strs(target_os: session::os) -> target_strs::t {
meta_sect_name: meta_section_name(sess_os_to_meta_os(target_os)),
data_layout: alt target_os {
session::os_macos {
session::os_macos => {
~"e-p:32:32:32-i1:8:8-i8:8:8-i16:16:16" +
~"-i32:32:32-i64:32:64" +
~"-f32:32:32-f64:32:64-v64:64:64" +
~"-v128:128:128-a0:0:64-f80:128:128" + ~"-n8:16:32"
}
session::os_win32 {
session::os_win32 => {
~"e-p:32:32-f64:64:64-i64:64:64-f80:32:32-n8:16:32"
}
session::os_linux {
session::os_linux => {
~"e-p:32:32-f64:32:64-i64:32:64-f80:32:32-n8:16:32"
}
session::os_freebsd {
session::os_freebsd => {
~"e-p:32:32-f64:32:64-i64:32:64-f80:32:32-n8:16:32"
}
},
target_triple: alt target_os {
session::os_macos { ~"i686-apple-darwin" }
session::os_win32 { ~"i686-pc-mingw32" }
session::os_linux { ~"i686-unknown-linux-gnu" }
session::os_freebsd { ~"i686-unknown-freebsd" }
session::os_macos => ~"i686-apple-darwin",
session::os_win32 => ~"i686-pc-mingw32",
session::os_linux => ~"i686-unknown-linux-gnu",
session::os_freebsd => ~"i686-unknown-freebsd"
},
cc_args: ~[~"-m32"]

View file

@ -9,26 +9,26 @@ fn get_target_strs(target_os: session::os) -> target_strs::t {
meta_sect_name: meta_section_name(sess_os_to_meta_os(target_os)),
data_layout: alt target_os {
session::os_macos {
session::os_macos => {
~"e-p:64:64:64-i1:8:8-i8:8:8-i16:16:16-i32:32:32-i64:64:64-"+
~"f32:32:32-f64:64:64-v64:64:64-v128:128:128-a0:0:64-"+
~"s0:64:64-f80:128:128-n8:16:32:64"
}
session::os_win32 {
session::os_win32 => {
// FIXME: Test this. Copied from linux (#2398)
~"e-p:64:64:64-i1:8:8-i8:8:8-i16:16:16-i32:32:32-i64:64:64-"+
~"f32:32:32-f64:64:64-v64:64:64-v128:128:128-a0:0:64-"+
~"s0:64:64-f80:128:128-n8:16:32:64-S128"
}
session::os_linux {
session::os_linux => {
~"e-p:64:64:64-i1:8:8-i8:8:8-i16:16:16-i32:32:32-i64:64:64-"+
~"f32:32:32-f64:64:64-v64:64:64-v128:128:128-a0:0:64-"+
~"s0:64:64-f80:128:128-n8:16:32:64-S128"
}
session::os_freebsd {
session::os_freebsd => {
~"e-p:64:64:64-i1:8:8-i8:8:8-i16:16:16-i32:32:32-i64:64:64-"+
~"f32:32:32-f64:64:64-v64:64:64-v128:128:128-a0:0:64-"+
~"s0:64:64-f80:128:128-n8:16:32:64-S128"
@ -36,10 +36,10 @@ fn get_target_strs(target_os: session::os) -> target_strs::t {
},
target_triple: alt target_os {
session::os_macos { ~"x86_64-apple-darwin" }
session::os_win32 { ~"x86_64-pc-mingw32" }
session::os_linux { ~"x86_64-unknown-linux-gnu" }
session::os_freebsd { ~"x86_64-unknown-freebsd" }
session::os_macos => ~"x86_64-apple-darwin",
session::os_win32 => ~"x86_64-pc-mingw32",
session::os_linux => ~"x86_64-unknown-linux-gnu",
session::os_freebsd => ~"x86_64-unknown-freebsd",
},
cc_args: ~[~"-m64"]

View file

@ -27,27 +27,27 @@ fn anon_src() -> ~str { ~"<anon>" }
fn source_name(input: input) -> ~str {
alt input {
file_input(ifile) { ifile }
str_input(_) { anon_src() }
file_input(ifile) => ifile,
str_input(_) => anon_src()
}
}
fn default_configuration(sess: session, argv0: ~str, input: input) ->
ast::crate_cfg {
let libc = alt sess.targ_cfg.os {
session::os_win32 { ~"msvcrt.dll" }
session::os_macos { ~"libc.dylib" }
session::os_linux { ~"libc.so.6" }
session::os_freebsd { ~"libc.so.7" }
session::os_win32 => ~"msvcrt.dll",
session::os_macos => ~"libc.dylib",
session::os_linux => ~"libc.so.6",
session::os_freebsd => ~"libc.so.7"
// _ { "libc.so" }
};
let mk = attr::mk_name_value_item_str;
let (arch,wordsz) = alt sess.targ_cfg.arch {
session::arch_x86 { (~"x86",~"32") }
session::arch_x86_64 { (~"x86_64",~"64") }
session::arch_arm { (~"arm",~"32") }
session::arch_x86 => (~"x86",~"32"),
session::arch_x86_64 => (~"x86_64",~"64"),
session::arch_arm => (~"arm",~"32")
};
return ~[ // Target bindings.
@ -100,10 +100,10 @@ enum input {
fn parse_input(sess: session, cfg: ast::crate_cfg, input: input)
-> @ast::crate {
alt input {
file_input(file) {
file_input(file) => {
parse::parse_crate_from_file(file, cfg, sess.parse_sess)
}
str_input(src) {
str_input(src) => {
// FIXME (#2319): Don't really want to box the source string
parse::parse_crate_from_source_str(
anon_src(), @src, cfg, sess.parse_sess)
@ -270,37 +270,40 @@ fn compile_input(sess: session, cfg: ast::crate_cfg, input: input,
fn pretty_print_input(sess: session, cfg: ast::crate_cfg, input: input,
ppm: pp_mode) {
fn ann_paren_for_expr(node: pprust::ann_node) {
alt node { pprust::node_expr(s, expr) { pprust::popen(s); } _ { } }
alt node {
pprust::node_expr(s, expr) => pprust::popen(s),
_ => ()
}
}
fn ann_typed_post(tcx: ty::ctxt, node: pprust::ann_node) {
alt node {
pprust::node_expr(s, expr) {
pprust::node_expr(s, expr) => {
pp::space(s.s);
pp::word(s.s, ~"as");
pp::space(s.s);
pp::word(s.s, ppaux::ty_to_str(tcx, ty::expr_ty(tcx, expr)));
pprust::pclose(s);
}
_ { }
_ => ()
}
}
fn ann_identified_post(node: pprust::ann_node) {
alt node {
pprust::node_item(s, item) {
pprust::node_item(s, item) => {
pp::space(s.s);
pprust::synth_comment(s, int::to_str(item.id, 10u));
}
pprust::node_block(s, blk) {
pprust::node_block(s, blk) => {
pp::space(s.s);
pprust::synth_comment(s,
~"block " + int::to_str(blk.node.id, 10u));
}
pprust::node_expr(s, expr) {
pprust::node_expr(s, expr) => {
pp::space(s.s);
pprust::synth_comment(s, int::to_str(expr.id, 10u));
pprust::pclose(s);
}
pprust::node_pat(s, pat) {
pprust::node_pat(s, pat) => {
pp::space(s.s);
pprust::synth_comment(s, ~"pat " + int::to_str(pat.id, 10u));
}
@ -312,21 +315,21 @@ fn pretty_print_input(sess: session, cfg: ast::crate_cfg, input: input,
// from stdin, we're going to just suck the source into a string
// so both the parser and pretty-printer can use it.
let upto = alt ppm {
ppm_expanded | ppm_expanded_identified { cu_expand }
ppm_typed { cu_typeck }
_ { cu_parse }
ppm_expanded | ppm_expanded_identified => cu_expand,
ppm_typed => cu_typeck,
_ => cu_parse
};
let {crate, tcx} = compile_upto(sess, cfg, input, upto, none);
let ann = alt ppm {
ppm_typed {
ppm_typed => {
{pre: ann_paren_for_expr,
post: |a| ann_typed_post(option::get(tcx), a) }
}
ppm_identified | ppm_expanded_identified {
ppm_identified | ppm_expanded_identified => {
{pre: ann_paren_for_expr, post: ann_identified_post}
}
ppm_expanded | ppm_normal { pprust::no_ann() }
ppm_expanded | ppm_normal => pprust::no_ann()
};
let is_expanded = upto != cu_parse;
let src = codemap::get_filemap(sess.codemap, source_name(input)).src;
@ -369,23 +372,23 @@ fn get_arch(triple: ~str) -> option<session::arch> {
fn build_target_config(sopts: @session::options,
demitter: diagnostic::emitter) -> @session::config {
let os = alt get_os(sopts.target_triple) {
some(os) { os }
none { early_error(demitter, ~"unknown operating system") }
some(os) => os,
none => early_error(demitter, ~"unknown operating system")
};
let arch = alt get_arch(sopts.target_triple) {
some(arch) { arch }
none { early_error(demitter,
~"unknown architecture: " + sopts.target_triple) }
some(arch) => arch,
none => early_error(demitter,
~"unknown architecture: " + sopts.target_triple)
};
let (int_type, uint_type, float_type) = alt arch {
session::arch_x86 {(ast::ty_i32, ast::ty_u32, ast::ty_f64)}
session::arch_x86_64 {(ast::ty_i64, ast::ty_u64, ast::ty_f64)}
session::arch_arm {(ast::ty_i32, ast::ty_u32, ast::ty_f64)}
session::arch_x86 => (ast::ty_i32, ast::ty_u32, ast::ty_f64),
session::arch_x86_64 => (ast::ty_i64, ast::ty_u64, ast::ty_f64),
session::arch_arm => (ast::ty_i32, ast::ty_u32, ast::ty_f64)
};
let target_strs = alt arch {
session::arch_x86 {x86::get_target_strs(os)}
session::arch_x86_64 {x86_64::get_target_strs(os)}
session::arch_arm {x86::get_target_strs(os)}
session::arch_x86 => x86::get_target_strs(os),
session::arch_x86_64 => x86_64::get_target_strs(os),
session::arch_arm => x86::get_target_strs(os)
};
let target_cfg: @session::config =
@{os: os, arch: arch, target_strs: target_strs, int_type: int_type,
@ -436,11 +439,11 @@ fn build_session_options(matches: getopts::matches,
for flags.each |lint_name| {
let lint_name = str::replace(lint_name, ~"-", ~"_");
alt lint_dict.find(lint_name) {
none {
none => {
early_error(demitter, fmt!{"unknown %s flag: %s",
level_name, lint_name});
}
some(lint) {
some(lint) => {
vec::push(lint_opts, (lint.lint, level));
}
}
@ -485,8 +488,8 @@ fn build_session_options(matches: getopts::matches,
let save_temps = getopts::opt_present(matches, ~"save-temps");
alt output_type {
// unless we're emitting huamn-readable assembly, omit comments.
link::output_type_llvm_assembly | link::output_type_assembly {}
_ { debugging_opts |= session::no_asm_comments; }
link::output_type_llvm_assembly | link::output_type_assembly => (),
_ => debugging_opts |= session::no_asm_comments
}
let opt_level: uint =
if opt_present(matches, ~"O") {
@ -496,11 +499,11 @@ fn build_session_options(matches: getopts::matches,
2u
} else if opt_present(matches, ~"opt-level") {
alt getopts::opt_str(matches, ~"opt-level") {
~"0" { 0u }
~"1" { 1u }
~"2" { 2u }
~"3" { 3u }
_ {
~"0" => 0u,
~"1" => 1u,
~"2" => 2u,
~"3" => 3u,
_ => {
early_error(demitter, ~"optimization level needs " +
~"to be between 0-3")
}
@ -508,8 +511,8 @@ fn build_session_options(matches: getopts::matches,
} else { 0u };
let target =
alt target_opt {
none { host_triple() }
some(s) { s }
none => host_triple(),
some(s) => s
};
let addl_lib_search_paths = getopts::opt_strs(matches, ~"L");
@ -626,43 +629,33 @@ fn build_output_filenames(input: input,
let obj_suffix =
alt sopts.output_type {
link::output_type_none { ~"none" }
link::output_type_bitcode { ~"bc" }
link::output_type_assembly { ~"s" }
link::output_type_llvm_assembly { ~"ll" }
link::output_type_none => ~"none",
link::output_type_bitcode => ~"bc",
link::output_type_assembly => ~"s",
link::output_type_llvm_assembly => ~"ll",
// Object and exe output both use the '.o' extension here
link::output_type_object | link::output_type_exe {
~"o"
}
link::output_type_object | link::output_type_exe => ~"o"
};
alt ofile {
none {
none => {
// "-" as input file will cause the parser to read from stdin so we
// have to make up a name
// We want to toss everything after the final '.'
let dirname = alt odir {
some(d) { d }
none {
alt input {
str_input(_) {
os::getcwd()
}
file_input(ifile) {
path::dirname(ifile)
}
}
some(d) => d,
none => alt input {
str_input(_) => os::getcwd(),
file_input(ifile) => path::dirname(ifile)
}
};
let base_filename = alt input {
file_input(ifile) {
file_input(ifile) => {
let (path, _) = path::splitext(ifile);
path::basename(path)
}
str_input(_) {
~"rust_out"
}
str_input(_) => ~"rust_out"
};
let base_path = path::connect(dirname, base_filename);
@ -678,7 +671,7 @@ fn build_output_filenames(input: input,
}
}
some(out_file) {
some(out_file) => {
out_path = out_file;
obj_path = if stop_after_codegen {
out_file
@ -722,9 +715,9 @@ mod test {
fn test_switch_implies_cfg_test() {
let matches =
alt getopts::getopts(~[~"--test"], opts()) {
ok(m) { m }
err(f) { fail ~"test_switch_implies_cfg_test: " +
getopts::fail_str(f); }
ok(m) => m,
err(f) => fail ~"test_switch_implies_cfg_test: " +
getopts::fail_str(f)
};
let sessopts = build_session_options(matches, diagnostic::emit);
let sess = build_session(sessopts, diagnostic::emit);
@ -738,8 +731,8 @@ mod test {
fn test_switch_implies_cfg_test_unless_cfg_test() {
let matches =
alt getopts::getopts(~[~"--test", ~"--cfg=test"], opts()) {
ok(m) { m }
err(f) {
ok(m) => m,
err(f) => {
fail ~"test_switch_implies_cfg_test_unless_cfg_test: " +
getopts::fail_str(f);
}

View file

@ -96,10 +96,10 @@ fn describe_warnings() {
io::println(fmt!{" %s %7.7s %s",
padded(max_key, k),
alt v.default {
lint::allow { ~"allow" }
lint::warn { ~"warn" }
lint::deny { ~"deny" }
lint::forbid { ~"forbid" }
lint::allow => ~"allow",
lint::warn => ~"warn",
lint::deny => ~"deny",
lint::forbid => ~"forbid"
},
v.desc});
}
@ -125,8 +125,8 @@ fn run_compiler(args: ~[~str], demitter: diagnostic::emitter) {
let matches =
alt getopts::getopts(args, opts()) {
ok(m) { m }
err(f) {
ok(m) => m,
err(f) => {
early_error(demitter, getopts::fail_str(f))
}
};
@ -153,8 +153,8 @@ fn run_compiler(args: ~[~str], demitter: diagnostic::emitter) {
return;
}
let input = alt vec::len(matches.free) {
0u { early_error(demitter, ~"no input filename given") }
1u {
0u => early_error(demitter, ~"no input filename given"),
1u => {
let ifile = matches.free[0];
if ifile == ~"-" {
let src = str::from_bytes(io::stdin().read_whole_stream());
@ -163,7 +163,7 @@ fn run_compiler(args: ~[~str], demitter: diagnostic::emitter) {
file_input(ifile)
}
}
_ { early_error(demitter, ~"multiple input filenames provided") }
_ => early_error(demitter, ~"multiple input filenames provided")
};
let sopts = build_session_options(matches, demitter);
@ -176,19 +176,19 @@ fn run_compiler(args: ~[~str], demitter: diagnostic::emitter) {
~"normal"),
|a| parse_pretty(sess, a) );
alt pretty {
some::<pp_mode>(ppm) {
some::<pp_mode>(ppm) => {
pretty_print_input(sess, cfg, input, ppm);
return;
}
none::<pp_mode> {/* continue */ }
none::<pp_mode> => {/* continue */ }
}
let ls = opt_present(matches, ~"ls");
if ls {
alt input {
file_input(ifile) {
file_input(ifile) => {
list_metadata(sess, ifile, io::stdout());
}
str_input(_) {
str_input(_) => {
early_error(demitter, ~"can not list metadata for stdin");
}
}
@ -241,8 +241,8 @@ fn monitor(+f: fn~(diagnostic::emitter)) {
f(demitter)
} {
result::ok(_) { /* fallthrough */ }
result::err(_) {
result::ok(_) => { /* fallthrough */ }
result::err(_) => {
// Task failed without emitting a fatal diagnostic
if comm::recv(p) == done {
diagnostic::emit(

View file

@ -153,9 +153,9 @@ impl session for session {
fn span_lint_level(level: lint::level,
sp: span, msg: ~str) {
alt level {
lint::allow { }
lint::warn { self.span_warn(sp, msg); }
lint::deny | lint::forbid {
lint::allow => { },
lint::warn => self.span_warn(sp, msg),
lint::deny | lint::forbid => {
self.span_err(sp, msg);
}
}
@ -220,17 +220,17 @@ fn expect<T: copy>(sess: session, opt: option<T>, msg: fn() -> ~str) -> T {
fn building_library(req_crate_type: crate_type, crate: @ast::crate,
testing: bool) -> bool {
alt req_crate_type {
bin_crate { false }
lib_crate { true }
unknown_crate {
bin_crate => false,
lib_crate => true,
unknown_crate => {
if testing {
false
} else {
alt syntax::attr::first_attr_value_str_by_name(
crate.node.attrs,
~"crate_type") {
option::some(@~"lib") { true }
_ { false }
option::some(@~"lib") => true,
_ => false
}
}
}
@ -241,10 +241,10 @@ fn sess_os_to_meta_os(os: os) -> metadata::loader::os {
import metadata::loader;
alt os {
os_win32 { loader::os_win32 }
os_linux { loader::os_linux }
os_macos { loader::os_macos }
os_freebsd { loader::os_freebsd }
os_win32 => loader::os_win32,
os_linux => loader::os_linux,
os_macos => loader::os_macos,
os_freebsd => loader::os_freebsd
}
}

Some files were not shown because too many files have changed in this diff Show more