Use different syntax for checks that matter to typestate
This giant commit changes the syntax of Rust to use "assert" for "check" expressions that didn't mean anything to the typestate system, and continue using "check" for checks that are used as part of typestate checking. Most of the changes are just replacing "check" with "assert" in test cases and rustc.
This commit is contained in:
parent
870435caf5
commit
aa25f22f19
182 changed files with 1256 additions and 1239 deletions
|
|
@ -283,7 +283,10 @@ tag expr_ {
|
|||
expr_put(option.t[@expr], ann);
|
||||
expr_be(@expr, ann);
|
||||
expr_log(int, @expr, ann);
|
||||
expr_check_expr(@expr, ann);
|
||||
/* just an assert, no significance to typestate */
|
||||
expr_assert(@expr, ann);
|
||||
/* preds that typestate is aware of */
|
||||
expr_check(@expr, ann);
|
||||
expr_port(ann);
|
||||
expr_chan(@expr, ann);
|
||||
}
|
||||
|
|
|
|||
|
|
@ -93,7 +93,7 @@ fn parse_ty(@pstate st, str_def sd) -> ty.t {
|
|||
case ('c') { ret ty.mk_char(st.tcx); }
|
||||
case ('s') { ret ty.mk_str(st.tcx); }
|
||||
case ('t') {
|
||||
check(next(st) as char == '[');
|
||||
assert (next(st) as char == '[');
|
||||
auto def = parse_def(st, sd);
|
||||
let vec[ty.t] params = vec();
|
||||
while (peek(st) as char != ']') {
|
||||
|
|
@ -108,7 +108,7 @@ fn parse_ty(@pstate st, str_def sd) -> ty.t {
|
|||
case ('P') { ret ty.mk_port(st.tcx, parse_ty(st, sd)); }
|
||||
case ('C') { ret ty.mk_chan(st.tcx, parse_ty(st, sd)); }
|
||||
case ('T') {
|
||||
check(next(st) as char == '[');
|
||||
assert (next(st) as char == '[');
|
||||
let vec[ty.mt] params = vec();
|
||||
while (peek(st) as char != ']') {
|
||||
params += vec(parse_mt(st, sd));
|
||||
|
|
@ -117,7 +117,7 @@ fn parse_ty(@pstate st, str_def sd) -> ty.t {
|
|||
ret ty.mk_tup(st.tcx, params);
|
||||
}
|
||||
case ('R') {
|
||||
check(next(st) as char == '[');
|
||||
assert (next(st) as char == '[');
|
||||
let vec[ty.field] fields = vec();
|
||||
while (peek(st) as char != ']') {
|
||||
auto name = "";
|
||||
|
|
@ -149,7 +149,7 @@ fn parse_ty(@pstate st, str_def sd) -> ty.t {
|
|||
ret ty.mk_native_fn(st.tcx,abi,func._0,func._1);
|
||||
}
|
||||
case ('O') {
|
||||
check(next(st) as char == '[');
|
||||
assert (next(st) as char == '[');
|
||||
let vec[ty.method] methods = vec();
|
||||
while (peek(st) as char != ']') {
|
||||
auto proto;
|
||||
|
|
@ -175,9 +175,9 @@ fn parse_ty(@pstate st, str_def sd) -> ty.t {
|
|||
case ('Y') { ret ty.mk_type(st.tcx); }
|
||||
case ('#') {
|
||||
auto pos = parse_hex(st);
|
||||
check (next(st) as char == ':');
|
||||
assert (next(st) as char == ':');
|
||||
auto len = parse_hex(st);
|
||||
check (next(st) as char == '#');
|
||||
assert (next(st) as char == '#');
|
||||
alt (st.tcx.rcache.find(tup(st.crate,pos,len))) {
|
||||
case (some[ty.t](?tt)) { ret tt; }
|
||||
case (none[ty.t]) {
|
||||
|
|
@ -245,7 +245,7 @@ fn parse_hex(@pstate st) -> uint {
|
|||
}
|
||||
|
||||
fn parse_ty_fn(@pstate st, str_def sd) -> tup(vec[ty.arg], ty.t) {
|
||||
check(next(st) as char == '[');
|
||||
assert (next(st) as char == '[');
|
||||
let vec[ty.arg] inputs = vec();
|
||||
while (peek(st) as char != ']') {
|
||||
auto mode = ast.val;
|
||||
|
|
|
|||
|
|
@ -127,6 +127,7 @@ fn keyword_table() -> std.map.hashmap[str, token.token] {
|
|||
|
||||
keywords.insert("type", token.TYPE);
|
||||
keywords.insert("check", token.CHECK);
|
||||
keywords.insert("assert", token.ASSERT);
|
||||
keywords.insert("claim", token.CLAIM);
|
||||
keywords.insert("prove", token.PROVE);
|
||||
|
||||
|
|
@ -528,7 +529,7 @@ fn scan_numeric_escape(reader rdr) -> char {
|
|||
|
||||
auto n_hex_digits = 0;
|
||||
|
||||
check (rdr.curr() == '\\');
|
||||
assert (rdr.curr() == '\\');
|
||||
|
||||
alt (rdr.next()) {
|
||||
case ('x') { n_hex_digits = 2; }
|
||||
|
|
|
|||
|
|
@ -621,7 +621,7 @@ fn parse_path(parser p, greed g) -> ast.path {
|
|||
if (p.peek() == token.DOT) {
|
||||
if (g == GREEDY) {
|
||||
p.bump();
|
||||
check (is_ident(p.peek()));
|
||||
assert (is_ident(p.peek()));
|
||||
} else {
|
||||
more = false;
|
||||
}
|
||||
|
|
@ -816,19 +816,22 @@ fn parse_bottom_expr(parser p) -> @ast.expr {
|
|||
ex = ast.expr_log(0, e, ast.ann_none);
|
||||
}
|
||||
|
||||
case (token.ASSERT) {
|
||||
p.bump();
|
||||
auto e = parse_expr(p);
|
||||
auto hi = e.span.hi;
|
||||
ex = ast.expr_assert(e, ast.ann_none);
|
||||
}
|
||||
|
||||
case (token.CHECK) {
|
||||
p.bump();
|
||||
alt (p.peek()) {
|
||||
case (token.LPAREN) {
|
||||
auto e = parse_expr(p);
|
||||
auto hi = e.span.hi;
|
||||
ex = ast.expr_check_expr(e, ast.ann_none);
|
||||
}
|
||||
case (_) {
|
||||
p.get_session().unimpl("constraint-check stmt");
|
||||
}
|
||||
}
|
||||
}
|
||||
/* Should be a predicate (pure boolean function) applied to
|
||||
arguments that are all either slot variables or literals.
|
||||
but the typechecker enforces that. */
|
||||
auto e = parse_expr(p);
|
||||
auto hi = e.span.hi;
|
||||
ex = ast.expr_check(e, ast.ann_none);
|
||||
}
|
||||
|
||||
case (token.RET) {
|
||||
p.bump();
|
||||
|
|
@ -937,7 +940,7 @@ fn expand_syntax_ext(parser p, ast.span sp,
|
|||
&ast.path path, vec[@ast.expr] args,
|
||||
option.t[str] body) -> ast.expr_ {
|
||||
|
||||
check (_vec.len[ast.ident](path.node.idents) > 0u);
|
||||
assert (_vec.len[ast.ident](path.node.idents) > 0u);
|
||||
auto extname = path.node.idents.(0);
|
||||
if (_str.eq(extname, "fmt")) {
|
||||
auto expanded = extfmt.expand_syntax_ext(args, body);
|
||||
|
|
@ -1673,7 +1676,8 @@ fn stmt_ends_with_semi(@ast.stmt stmt) -> bool {
|
|||
case (ast.expr_put(_,_)) { ret true; }
|
||||
case (ast.expr_be(_,_)) { ret true; }
|
||||
case (ast.expr_log(_,_,_)) { ret true; }
|
||||
case (ast.expr_check_expr(_,_)) { ret true; }
|
||||
case (ast.expr_check(_,_)) { ret true; }
|
||||
case (ast.expr_assert(_,_)) { ret true; }
|
||||
}
|
||||
}
|
||||
// We should not be calling this on a cdir.
|
||||
|
|
@ -2157,24 +2161,24 @@ fn parse_item(parser p) -> @ast.item {
|
|||
|
||||
alt (p.peek()) {
|
||||
case (token.CONST) {
|
||||
check (lyr == ast.layer_value);
|
||||
assert (lyr == ast.layer_value);
|
||||
ret parse_item_const(p);
|
||||
}
|
||||
|
||||
case (token.FN) {
|
||||
check (lyr == ast.layer_value);
|
||||
assert (lyr == ast.layer_value);
|
||||
ret parse_item_fn_or_iter(p);
|
||||
}
|
||||
case (token.ITER) {
|
||||
check (lyr == ast.layer_value);
|
||||
assert (lyr == ast.layer_value);
|
||||
ret parse_item_fn_or_iter(p);
|
||||
}
|
||||
case (token.MOD) {
|
||||
check (lyr == ast.layer_value);
|
||||
assert (lyr == ast.layer_value);
|
||||
ret parse_item_mod(p);
|
||||
}
|
||||
case (token.NATIVE) {
|
||||
check (lyr == ast.layer_value);
|
||||
assert (lyr == ast.layer_value);
|
||||
ret parse_item_native_mod(p);
|
||||
}
|
||||
case (token.TYPE) {
|
||||
|
|
|
|||
|
|
@ -89,6 +89,7 @@ tag token {
|
|||
|
||||
/* Type and type-state keywords */
|
||||
TYPE;
|
||||
ASSERT;
|
||||
CHECK;
|
||||
CLAIM;
|
||||
PROVE;
|
||||
|
|
@ -258,6 +259,7 @@ fn to_str(token t) -> str {
|
|||
|
||||
/* Type and type-state keywords */
|
||||
case (TYPE) { ret "type"; }
|
||||
case (ASSERT) { ret "assert"; }
|
||||
case (CHECK) { ret "check"; }
|
||||
case (CLAIM) { ret "claim"; }
|
||||
case (PROVE) { ret "prove"; }
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue