Rename TokenTree variants for clarity
This should be clearer, and fits in better with the `TTNonterminal` variant. Renames: - `TTTok` -> `TTToken` - `TTDelim` -> `TTDelimited` - `TTSeq` -> `TTSequence`
This commit is contained in:
parent
971d776aa5
commit
ec3f0201e7
14 changed files with 98 additions and 95 deletions
|
|
@ -684,8 +684,8 @@ pub fn get_single_str_from_tts(cx: &ExtCtxt,
|
|||
cx.span_err(sp, format!("{} takes 1 argument.", name).as_slice());
|
||||
} else {
|
||||
match tts[0] {
|
||||
ast::TTTok(_, token::LIT_STR(ident)) => return Some(parse::str_lit(ident.as_str())),
|
||||
ast::TTTok(_, token::LIT_STR_RAW(ident, _)) => {
|
||||
ast::TTToken(_, token::LIT_STR(ident)) => return Some(parse::str_lit(ident.as_str())),
|
||||
ast::TTToken(_, token::LIT_STR_RAW(ident, _)) => {
|
||||
return Some(parse::raw_str_lit(ident.as_str()))
|
||||
}
|
||||
_ => {
|
||||
|
|
|
|||
|
|
@ -23,7 +23,7 @@ pub fn expand_syntax_ext<'cx>(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]
|
|||
for (i, e) in tts.iter().enumerate() {
|
||||
if i & 1 == 1 {
|
||||
match *e {
|
||||
ast::TTTok(_, token::COMMA) => (),
|
||||
ast::TTToken(_, token::COMMA) => (),
|
||||
_ => {
|
||||
cx.span_err(sp, "concat_idents! expecting comma.");
|
||||
return DummyResult::expr(sp);
|
||||
|
|
@ -31,7 +31,7 @@ pub fn expand_syntax_ext<'cx>(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]
|
|||
}
|
||||
} else {
|
||||
match *e {
|
||||
ast::TTTok(_, token::IDENT(ident,_)) => {
|
||||
ast::TTToken(_, token::IDENT(ident,_)) => {
|
||||
res_str.push_str(token::get_ident(ident).get())
|
||||
}
|
||||
_ => {
|
||||
|
|
|
|||
|
|
@ -639,10 +639,10 @@ fn mk_token(cx: &ExtCtxt, sp: Span, tok: &token::Token) -> P<ast::Expr> {
|
|||
|
||||
fn mk_tt(cx: &ExtCtxt, _: Span, tt: &ast::TokenTree) -> Vec<P<ast::Stmt>> {
|
||||
match *tt {
|
||||
ast::TTTok(sp, ref tok) => {
|
||||
ast::TTToken(sp, ref tok) => {
|
||||
let e_sp = cx.expr_ident(sp, id_ext("_sp"));
|
||||
let e_tok = cx.expr_call(sp,
|
||||
mk_ast_path(cx, sp, "TTTok"),
|
||||
mk_ast_path(cx, sp, "TTToken"),
|
||||
vec!(e_sp, mk_token(cx, sp, tok)));
|
||||
let e_push =
|
||||
cx.expr_method_call(sp,
|
||||
|
|
@ -651,14 +651,14 @@ fn mk_tt(cx: &ExtCtxt, _: Span, tt: &ast::TokenTree) -> Vec<P<ast::Stmt>> {
|
|||
vec!(e_tok));
|
||||
vec!(cx.stmt_expr(e_push))
|
||||
},
|
||||
ast::TTDelim(sp, ref open, ref tts, ref close) => {
|
||||
ast::TTDelimited(sp, ref open, ref tts, ref close) => {
|
||||
let mut stmts = vec![];
|
||||
stmts.extend(mk_tt(cx, sp, &open.to_tt()).into_iter());
|
||||
stmts.extend(tts.iter().flat_map(|tt| mk_tt(cx, sp, tt).into_iter()));
|
||||
stmts.extend(mk_tt(cx, sp, &close.to_tt()).into_iter());
|
||||
stmts
|
||||
},
|
||||
ast::TTSeq(..) => fail!("TTSeq in quote!"),
|
||||
ast::TTSequence(..) => fail!("TTSequence in quote!"),
|
||||
ast::TTNonterminal(sp, ident) => {
|
||||
// tt.extend($ident.to_tokens(ext_cx).into_iter())
|
||||
|
||||
|
|
|
|||
|
|
@ -20,10 +20,10 @@ pub fn expand_trace_macros(cx: &mut ExtCtxt,
|
|||
tt: &[ast::TokenTree])
|
||||
-> Box<base::MacResult+'static> {
|
||||
match tt {
|
||||
[ast::TTTok(_, ref tok)] if is_keyword(keywords::True, tok) => {
|
||||
[ast::TTToken(_, ref tok)] if is_keyword(keywords::True, tok) => {
|
||||
cx.set_trace_macros(true);
|
||||
}
|
||||
[ast::TTTok(_, ref tok)] if is_keyword(keywords::False, tok) => {
|
||||
[ast::TTToken(_, ref tok)] if is_keyword(keywords::False, tok) => {
|
||||
cx.set_trace_macros(false);
|
||||
}
|
||||
_ => cx.span_err(sp, "trace_macros! accepts only `true` or `false`"),
|
||||
|
|
|
|||
|
|
@ -8,7 +8,7 @@
|
|||
// option. This file may not be copied, modified, or distributed
|
||||
// except according to those terms.
|
||||
|
||||
use ast::{Ident, Matcher_, Matcher, MatchTok, MatchNonterminal, MatchSeq, TTDelim};
|
||||
use ast::{Ident, Matcher_, Matcher, MatchTok, MatchNonterminal, MatchSeq, TTDelimited};
|
||||
use ast;
|
||||
use codemap::{Span, Spanned, DUMMY_SP};
|
||||
use ext::base::{ExtCtxt, MacResult, MacroDef};
|
||||
|
|
@ -172,7 +172,7 @@ fn generic_extension<'cx>(cx: &'cx ExtCtxt,
|
|||
MatchedNonterminal(NtTT(ref tt)) => {
|
||||
match **tt {
|
||||
// ignore delimiters
|
||||
TTDelim(_, _, ref tts, _) => (**tts).clone(),
|
||||
TTDelimited(_, _, ref tts, _) => (**tts).clone(),
|
||||
_ => cx.span_fatal(sp, "macro rhs must be delimited"),
|
||||
}
|
||||
},
|
||||
|
|
|
|||
|
|
@ -9,7 +9,7 @@
|
|||
// except according to those terms.
|
||||
|
||||
use ast;
|
||||
use ast::{TokenTree, TTDelim, TTTok, TTSeq, TTNonterminal, Ident};
|
||||
use ast::{TokenTree, TTDelimited, TTToken, TTSequence, TTNonterminal, Ident};
|
||||
use codemap::{Span, DUMMY_SP};
|
||||
use diagnostic::SpanHandler;
|
||||
use ext::tt::macro_parser::{NamedMatch, MatchedSeq, MatchedNonterminal};
|
||||
|
|
@ -45,7 +45,7 @@ pub struct TtReader<'a> {
|
|||
}
|
||||
|
||||
/// This can do Macro-By-Example transcription. On the other hand, if
|
||||
/// `src` contains no `TTSeq`s and `TTNonterminal`s, `interp` can (and
|
||||
/// `src` contains no `TTSequence`s and `TTNonterminal`s, `interp` can (and
|
||||
/// should) be none.
|
||||
pub fn new_tt_reader<'a>(sp_diag: &'a SpanHandler,
|
||||
interp: Option<HashMap<Ident, Rc<NamedMatch>>>,
|
||||
|
|
@ -130,12 +130,12 @@ fn lockstep_iter_size(t: &TokenTree, r: &TtReader) -> LockstepIterSize {
|
|||
match *t {
|
||||
// The opening and closing delimiters are both tokens, so they are
|
||||
// treated as `LisUnconstrained`.
|
||||
TTDelim(_, _, ref tts, _) | TTSeq(_, ref tts, _, _) => {
|
||||
TTDelimited(_, _, ref tts, _) | TTSequence(_, ref tts, _, _) => {
|
||||
tts.iter().fold(LisUnconstrained, |size, tt| {
|
||||
size + lockstep_iter_size(tt, r)
|
||||
})
|
||||
},
|
||||
TTTok(..) => LisUnconstrained,
|
||||
TTToken(..) => LisUnconstrained,
|
||||
TTNonterminal(_, name) => match *lookup_cur_matched(r, name) {
|
||||
MatchedNonterminal(_) => LisUnconstrained,
|
||||
MatchedSeq(ref ads, _) => LisConstraint(ads.len(), name)
|
||||
|
|
@ -194,15 +194,15 @@ pub fn tt_next_token(r: &mut TtReader) -> TokenAndSpan {
|
|||
}
|
||||
}
|
||||
}
|
||||
loop { /* because it's easiest, this handles `TTDelim` not starting
|
||||
with a `TTTok`, even though it won't happen */
|
||||
loop { /* because it's easiest, this handles `TTDelimited` not starting
|
||||
with a `TTToken`, even though it won't happen */
|
||||
let t = {
|
||||
let frame = r.stack.last().unwrap();
|
||||
// FIXME(pcwalton): Bad copy.
|
||||
(*frame.forest)[frame.idx].clone()
|
||||
};
|
||||
match t {
|
||||
TTDelim(_, open, delimed_tts, close) => {
|
||||
TTDelimited(_, open, delimed_tts, close) => {
|
||||
let mut tts = vec![];
|
||||
tts.push(open.to_tt());
|
||||
tts.extend(delimed_tts.iter().map(|x| (*x).clone()));
|
||||
|
|
@ -216,15 +216,15 @@ pub fn tt_next_token(r: &mut TtReader) -> TokenAndSpan {
|
|||
});
|
||||
// if this could be 0-length, we'd need to potentially recur here
|
||||
}
|
||||
TTTok(sp, tok) => {
|
||||
TTToken(sp, tok) => {
|
||||
r.cur_span = sp;
|
||||
r.cur_tok = tok;
|
||||
r.stack.last_mut().unwrap().idx += 1;
|
||||
return ret_val;
|
||||
}
|
||||
TTSeq(sp, tts, sep, zerok) => {
|
||||
TTSequence(sp, tts, sep, zerok) => {
|
||||
// FIXME(pcwalton): Bad copy.
|
||||
match lockstep_iter_size(&TTSeq(sp, tts.clone(), sep.clone(), zerok), r) {
|
||||
match lockstep_iter_size(&TTSequence(sp, tts.clone(), sep.clone(), zerok), r) {
|
||||
LisUnconstrained => {
|
||||
r.sp_diag.span_fatal(
|
||||
sp.clone(), /* blame macro writer */
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue