Auto merge of #36154 - nrc:proc-macro-init, r=@jseyfried

Adds a `ProcMacro` form of syntax extension

This commit adds syntax extension forms matching the types for procedural macros 2.0 (RFC #1566), these still require the usual syntax extension boiler plate, but this is a first step towards proper implementation and should be useful for macros 1.1 stuff too.

Supports both attribute-like and function-like macros.

Note that RFC #1566 has not been accepted yet, but I think there is consensus that we want to head in vaguely that direction and so this PR will be useful in any case. It is also fairly easy to undo and does not break any existing programs.

This is related to #35957 in that I hope it can be used in the implementation of macros 1.1, however, there is no direct overlap and is more of a complement than a competing proposal. There is still a fair bit of work to do before the two can be combined.

r? @jseyfried

cc @alexcrichton, @cgswords, @eddyb, @aturon
This commit is contained in:
bors 2016-09-22 16:33:41 -07:00 committed by GitHub
commit 3a5d975fdc
12 changed files with 472 additions and 49 deletions

View file

@ -15,8 +15,7 @@ use rustc::session::Session;
use rustc::mir::transform::MirMapPass;
use syntax::ext::base::{SyntaxExtension, NamedSyntaxExtension, NormalTT};
use syntax::ext::base::{IdentTT, MultiModifier, MultiDecorator};
use syntax::ext::base::{SyntaxExtension, NamedSyntaxExtension, NormalTT, IdentTT};
use syntax::ext::base::MacroExpanderFn;
use syntax::parse::token;
use syntax::ast;
@ -109,8 +108,7 @@ impl<'a> Registry<'a> {
IdentTT(ext, _, allow_internal_unstable) => {
IdentTT(ext, Some(self.krate_span), allow_internal_unstable)
}
MultiDecorator(ext) => MultiDecorator(ext),
MultiModifier(ext) => MultiModifier(ext),
_ => extension,
}));
}

View file

@ -73,7 +73,9 @@ impl<'a> base::Resolver for Resolver<'a> {
let name = intern(&attrs[i].name());
match self.expansion_data[0].module.macros.borrow().get(&name) {
Some(ext) => match **ext {
MultiModifier(..) | MultiDecorator(..) => return Some(attrs.remove(i)),
MultiModifier(..) | MultiDecorator(..) | SyntaxExtension::AttrProcMacro(..) => {
return Some(attrs.remove(i))
}
_ => {}
},
None => {}

View file

@ -8,7 +8,7 @@
// option. This file may not be copied, modified, or distributed
// except according to those terms.
pub use self::SyntaxExtension::*;
pub use self::SyntaxExtension::{MultiDecorator, MultiModifier, NormalTT, IdentTT};
use ast::{self, Attribute, Name, PatKind};
use attr::HasAttrs;
@ -18,8 +18,9 @@ use errors::DiagnosticBuilder;
use ext::expand::{self, Invocation, Expansion};
use ext::hygiene::Mark;
use ext::tt::macro_rules;
use fold;
use parse;
use parse::parser;
use parse::parser::{self, Parser};
use parse::token;
use parse::token::{InternedString, str_to_ident};
use ptr::P;
@ -31,7 +32,8 @@ use feature_gate;
use std::collections::HashMap;
use std::path::PathBuf;
use std::rc::Rc;
use tokenstream;
use std::default::Default;
use tokenstream::{self, TokenStream};
#[derive(Debug,Clone)]
@ -146,6 +148,190 @@ impl Into<Vec<Annotatable>> for Annotatable {
}
}
pub trait ProcMacro {
fn expand<'cx>(&self,
ecx: &'cx mut ExtCtxt,
span: Span,
ts: TokenStream)
-> TokenStream;
}
impl<F> ProcMacro for F
where F: Fn(TokenStream) -> TokenStream
{
fn expand<'cx>(&self,
_ecx: &'cx mut ExtCtxt,
_span: Span,
ts: TokenStream)
-> TokenStream {
// FIXME setup implicit context in TLS before calling self.
(*self)(ts)
}
}
pub trait AttrProcMacro {
fn expand<'cx>(&self,
ecx: &'cx mut ExtCtxt,
span: Span,
annotation: TokenStream,
annotated: TokenStream)
-> TokenStream;
}
impl<F> AttrProcMacro for F
where F: Fn(TokenStream, TokenStream) -> TokenStream
{
fn expand<'cx>(&self,
_ecx: &'cx mut ExtCtxt,
_span: Span,
annotation: TokenStream,
annotated: TokenStream)
-> TokenStream {
// FIXME setup implicit context in TLS before calling self.
(*self)(annotation, annotated)
}
}
pub struct TokResult<'a> {
pub parser: Parser<'a>,
pub span: Span,
}
impl<'a> TokResult<'a> {
// There is quite a lot of overlap here with ParserAnyMacro in ext/tt/macro_rules.rs
// We could probably share more code.
// FIXME(#36641) Unify TokResult and ParserAnyMacro.
fn ensure_complete_parse(&mut self, allow_semi: bool) {
let macro_span = &self.span;
self.parser.ensure_complete_parse(allow_semi, |parser| {
let token_str = parser.this_token_to_string();
let msg = format!("macro expansion ignores token `{}` and any following", token_str);
let span = parser.span;
parser.diagnostic()
.struct_span_err(span, &msg)
.span_note(*macro_span, "caused by the macro expansion here")
.emit();
});
}
}
impl<'a> MacResult for TokResult<'a> {
fn make_items(mut self: Box<Self>) -> Option<SmallVector<P<ast::Item>>> {
if self.parser.sess.span_diagnostic.has_errors() {
return Some(SmallVector::zero());
}
let mut items = SmallVector::zero();
loop {
match self.parser.parse_item() {
Ok(Some(item)) => items.push(item),
Ok(None) => {
self.ensure_complete_parse(false);
return Some(items);
}
Err(mut e) => {
e.emit();
return Some(SmallVector::zero());
}
}
}
}
fn make_impl_items(mut self: Box<Self>) -> Option<SmallVector<ast::ImplItem>> {
let mut items = SmallVector::zero();
loop {
if self.parser.token == token::Eof {
break;
}
match self.parser.parse_impl_item() {
Ok(item) => items.push(item),
Err(mut e) => {
e.emit();
return Some(SmallVector::zero());
}
}
}
self.ensure_complete_parse(false);
Some(items)
}
fn make_trait_items(mut self: Box<Self>) -> Option<SmallVector<ast::TraitItem>> {
let mut items = SmallVector::zero();
loop {
if self.parser.token == token::Eof {
break;
}
match self.parser.parse_trait_item() {
Ok(item) => items.push(item),
Err(mut e) => {
e.emit();
return Some(SmallVector::zero());
}
}
}
self.ensure_complete_parse(false);
Some(items)
}
fn make_expr(mut self: Box<Self>) -> Option<P<ast::Expr>> {
match self.parser.parse_expr() {
Ok(e) => {
self.ensure_complete_parse(true);
Some(e)
}
Err(mut e) => {
e.emit();
Some(DummyResult::raw_expr(self.span))
}
}
}
fn make_pat(mut self: Box<Self>) -> Option<P<ast::Pat>> {
match self.parser.parse_pat() {
Ok(e) => {
self.ensure_complete_parse(false);
Some(e)
}
Err(mut e) => {
e.emit();
Some(P(DummyResult::raw_pat(self.span)))
}
}
}
fn make_stmts(mut self: Box<Self>) -> Option<SmallVector<ast::Stmt>> {
let mut stmts = SmallVector::zero();
loop {
if self.parser.token == token::Eof {
break;
}
match self.parser.parse_full_stmt(false) {
Ok(Some(stmt)) => stmts.push(stmt),
Ok(None) => { /* continue */ }
Err(mut e) => {
e.emit();
return Some(SmallVector::zero());
}
}
}
self.ensure_complete_parse(false);
Some(stmts)
}
fn make_ty(mut self: Box<Self>) -> Option<P<ast::Ty>> {
match self.parser.parse_ty() {
Ok(e) => {
self.ensure_complete_parse(false);
Some(e)
}
Err(mut e) => {
e.emit();
Some(DummyResult::raw_ty(self.span))
}
}
}
}
/// Represents a thing that maps token trees to Macro Results
pub trait TTMacroExpander {
fn expand<'cx>(&self,
@ -439,11 +625,22 @@ pub enum SyntaxExtension {
/// based upon it.
///
/// `#[derive(...)]` is a `MultiItemDecorator`.
MultiDecorator(Box<MultiItemDecorator + 'static>),
///
/// Prefer ProcMacro or MultiModifier since they are more flexible.
MultiDecorator(Box<MultiItemDecorator>),
/// A syntax extension that is attached to an item and modifies it
/// in-place. More flexible version than Modifier.
MultiModifier(Box<MultiItemModifier + 'static>),
/// in-place. Also allows decoration, i.e., creating new items.
MultiModifier(Box<MultiItemModifier>),
/// A function-like procedural macro. TokenStream -> TokenStream.
ProcMacro(Box<ProcMacro>),
/// An attribute-like procedural macro. TokenStream, TokenStream -> TokenStream.
/// The first TokenSteam is the attribute, the second is the annotated item.
/// Allows modification of the input items and adding new items, similar to
/// MultiModifier, but uses TokenStreams, rather than AST nodes.
AttrProcMacro(Box<AttrProcMacro>),
/// A normal, function-like syntax extension.
///
@ -451,12 +648,12 @@ pub enum SyntaxExtension {
///
/// The `bool` dictates whether the contents of the macro can
/// directly use `#[unstable]` things (true == yes).
NormalTT(Box<TTMacroExpander + 'static>, Option<Span>, bool),
NormalTT(Box<TTMacroExpander>, Option<Span>, bool),
/// A function-like syntax extension that has an extra ident before
/// the block.
///
IdentTT(Box<IdentMacroExpander + 'static>, Option<Span>, bool),
IdentTT(Box<IdentMacroExpander>, Option<Span>, bool),
}
pub type NamedSyntaxExtension = (Name, SyntaxExtension);
@ -817,3 +1014,17 @@ pub fn get_exprs_from_tts(cx: &mut ExtCtxt,
}
Some(es)
}
pub struct ChangeSpan {
pub span: Span
}
impl Folder for ChangeSpan {
fn new_span(&mut self, _sp: Span) -> Span {
self.span
}
fn fold_mac(&mut self, mac: ast::Mac) -> ast::Mac {
fold::noop_fold_mac(mac, self)
}
}

View file

@ -21,9 +21,12 @@ use ext::base::*;
use feature_gate::{self, Features};
use fold;
use fold::*;
use parse::{ParseSess, lexer};
use parse::parser::Parser;
use parse::token::{intern, keywords};
use print::pprust;
use ptr::P;
use tokenstream::TokenTree;
use tokenstream::{TokenTree, TokenStream};
use util::small_vector::SmallVector;
use visit::Visitor;
@ -315,6 +318,20 @@ impl<'a, 'b> MacroExpander<'a, 'b> {
items.push(item);
kind.expect_from_annotatables(items)
}
SyntaxExtension::AttrProcMacro(ref mac) => {
let attr_toks = TokenStream::from_tts(tts_for_attr(&attr, &self.cx.parse_sess));
let item_toks = TokenStream::from_tts(tts_for_item(&item, &self.cx.parse_sess));
let tok_result = mac.expand(self.cx, attr.span, attr_toks, item_toks);
let parser = self.cx.new_parser_from_tts(&tok_result.to_tts());
let result = Box::new(TokResult { parser: parser, span: attr.span });
kind.make_from(result).unwrap_or_else(|| {
let msg = format!("macro could not be expanded into {} position", kind.name());
self.cx.span_err(attr.span, &msg);
kind.dummy(attr.span)
})
}
_ => unreachable!(),
}
}
@ -384,11 +401,41 @@ impl<'a, 'b> MacroExpander<'a, 'b> {
kind.make_from(expander.expand(self.cx, span, ident, marked_tts, attrs))
}
MultiDecorator(..) | MultiModifier(..) => {
MultiDecorator(..) | MultiModifier(..) | SyntaxExtension::AttrProcMacro(..) => {
self.cx.span_err(path.span,
&format!("`{}` can only be used in attributes", extname));
return kind.dummy(span);
}
SyntaxExtension::ProcMacro(ref expandfun) => {
if ident.name != keywords::Invalid.name() {
let msg =
format!("macro {}! expects no ident argument, given '{}'", extname, ident);
self.cx.span_err(path.span, &msg);
return kind.dummy(span);
}
self.cx.bt_push(ExpnInfo {
call_site: span,
callee: NameAndSpan {
format: MacroBang(extname),
// FIXME procedural macros do not have proper span info
// yet, when they do, we should use it here.
span: None,
// FIXME probably want to follow macro_rules macros here.
allow_internal_unstable: false,
},
});
let tok_result = expandfun.expand(self.cx,
span,
TokenStream::from_tts(marked_tts));
let parser = self.cx.new_parser_from_tts(&tok_result.to_tts());
let result = Box::new(TokResult { parser: parser, span: span });
// FIXME better span info.
kind.make_from(result).map(|i| i.fold_with(&mut ChangeSpan { span: span }))
}
};
let expanded = if let Some(expanded) = opt_expanded {
@ -460,6 +507,36 @@ impl<'a, 'b> InvocationCollector<'a, 'b> {
}
}
// These are pretty nasty. Ideally, we would keep the tokens around, linked from
// the AST. However, we don't so we need to create new ones. Since the item might
// have come from a macro expansion (possibly only in part), we can't use the
// existing codemap.
//
// Therefore, we must use the pretty printer (yuck) to turn the AST node into a
// string, which we then re-tokenise (double yuck), but first we have to patch
// the pretty-printed string on to the end of the existing codemap (infinity-yuck).
fn tts_for_item(item: &Annotatable, parse_sess: &ParseSess) -> Vec<TokenTree> {
let text = match *item {
Annotatable::Item(ref i) => pprust::item_to_string(i),
Annotatable::TraitItem(ref ti) => pprust::trait_item_to_string(ti),
Annotatable::ImplItem(ref ii) => pprust::impl_item_to_string(ii),
};
string_to_tts(text, parse_sess)
}
fn tts_for_attr(attr: &ast::Attribute, parse_sess: &ParseSess) -> Vec<TokenTree> {
string_to_tts(pprust::attr_to_string(attr), parse_sess)
}
fn string_to_tts(text: String, parse_sess: &ParseSess) -> Vec<TokenTree> {
let filemap = parse_sess.codemap()
.new_filemap(String::from("<macro expansion>"), None, text);
let lexer = lexer::StringReader::new(&parse_sess.span_diagnostic, filemap);
let mut parser = Parser::new(parse_sess, Vec::new(), Box::new(lexer));
panictry!(parser.parse_all_token_trees())
}
impl<'a, 'b> Folder for InvocationCollector<'a, 'b> {
fn fold_expr(&mut self, expr: P<ast::Expr>) -> P<ast::Expr> {
let mut expr = self.cfg.configure_expr(expr).unwrap();

View file

@ -24,7 +24,9 @@ use ext::base::*;
/// Take a `ExtCtxt`, `Span`, and `TokenStream`, and produce a Macro Result that parses
/// the TokenStream as a block and returns it as an `Expr`.
pub fn build_block_emitter<'cx>(cx: &'cx mut ExtCtxt, sp: Span, output: TokenStream)
pub fn build_block_emitter<'cx>(cx: &'cx mut ExtCtxt,
sp: Span,
output: TokenStream)
-> Box<MacResult + 'cx> {
let parser = cx.new_parser_from_tts(&output.to_tts());
@ -60,7 +62,7 @@ pub fn build_block_emitter<'cx>(cx: &'cx mut ExtCtxt, sp: Span, output: TokenStr
}
pub mod prelude {
pub use ext::proc_macro_shim::build_block_emitter;
pub use super::build_block_emitter;
pub use ast::Ident;
pub use codemap::{DUMMY_SP, Span};
pub use ext::base::{ExtCtxt, MacResult};

View file

@ -49,22 +49,19 @@ impl<'a> ParserAnyMacro<'a> {
/// allowed to be there.
fn ensure_complete_parse(&self, allow_semi: bool, context: &str) {
let mut parser = self.parser.borrow_mut();
if allow_semi && parser.token == token::Semi {
parser.bump();
}
if parser.token != token::Eof {
parser.ensure_complete_parse(allow_semi, |parser| {
let token_str = parser.this_token_to_string();
let msg = format!("macro expansion ignores token `{}` and any \
following",
token_str);
let span = parser.span;
let mut err = parser.diagnostic().struct_span_err(span, &msg[..]);
let mut err = parser.diagnostic().struct_span_err(span, &msg);
let msg = format!("caused by the macro expansion here; the usage \
of `{}!` is likely invalid in {} context",
self.macro_ident, context);
err.span_note(self.site_span, &msg[..])
err.span_note(self.site_span, &msg)
.emit();
}
});
}
}

View file

@ -85,6 +85,12 @@ pub struct StringReader<'a> {
/// The last character to be read
pub curr: Option<char>,
pub filemap: Rc<syntax_pos::FileMap>,
/// If Some, stop reading the source at this position (inclusive).
pub terminator: Option<BytePos>,
/// Whether to record new-lines in filemap. This is only necessary the first
/// time a filemap is lexed. If part of a filemap is being re-lexed, this
/// should be set to false.
pub save_new_lines: bool,
// cached:
pub peek_tok: token::Token,
pub peek_span: Span,
@ -96,7 +102,14 @@ pub struct StringReader<'a> {
impl<'a> Reader for StringReader<'a> {
fn is_eof(&self) -> bool {
self.curr.is_none()
if self.curr.is_none() {
return true;
}
match self.terminator {
Some(t) => self.pos > t,
None => false,
}
}
/// Return the next token. EFFECT: advances the string_reader.
fn try_next_token(&mut self) -> Result<TokenAndSpan, ()> {
@ -164,6 +177,14 @@ impl<'a> StringReader<'a> {
pub fn new_raw<'b>(span_diagnostic: &'b Handler,
filemap: Rc<syntax_pos::FileMap>)
-> StringReader<'b> {
let mut sr = StringReader::new_raw_internal(span_diagnostic, filemap);
sr.bump();
sr
}
fn new_raw_internal<'b>(span_diagnostic: &'b Handler,
filemap: Rc<syntax_pos::FileMap>)
-> StringReader<'b> {
if filemap.src.is_none() {
span_diagnostic.bug(&format!("Cannot lex filemap \
without source: {}",
@ -172,21 +193,21 @@ impl<'a> StringReader<'a> {
let source_text = (*filemap.src.as_ref().unwrap()).clone();
let mut sr = StringReader {
StringReader {
span_diagnostic: span_diagnostic,
pos: filemap.start_pos,
last_pos: filemap.start_pos,
col: CharPos(0),
curr: Some('\n'),
filemap: filemap,
terminator: None,
save_new_lines: true,
// dummy values; not read
peek_tok: token::Eof,
peek_span: syntax_pos::DUMMY_SP,
source_text: source_text,
fatal_errs: Vec::new(),
};
sr.bump();
sr
}
}
pub fn new<'b>(span_diagnostic: &'b Handler,
@ -405,7 +426,9 @@ impl<'a> StringReader<'a> {
self.curr = Some(ch);
self.col = self.col + CharPos(1);
if last_char == '\n' {
self.filemap.next_line(self.last_pos);
if self.save_new_lines {
self.filemap.next_line(self.last_pos);
}
self.col = CharPos(0);
}

View file

@ -3872,15 +3872,17 @@ impl<'a> Parser<'a> {
}
}
fn parse_stmt_(&mut self, macro_expanded: bool) -> Option<Stmt> {
self.parse_stmt_without_recovery(macro_expanded).unwrap_or_else(|mut e| {
fn parse_stmt_(&mut self, macro_legacy_warnings: bool) -> Option<Stmt> {
self.parse_stmt_without_recovery(macro_legacy_warnings).unwrap_or_else(|mut e| {
e.emit();
self.recover_stmt_(SemiColonMode::Break);
None
})
}
fn parse_stmt_without_recovery(&mut self, macro_expanded: bool) -> PResult<'a, Option<Stmt>> {
fn parse_stmt_without_recovery(&mut self,
macro_legacy_warnings: bool)
-> PResult<'a, Option<Stmt>> {
maybe_whole!(Some deref self, NtStmt);
let attrs = self.parse_outer_attributes()?;
@ -3950,7 +3952,7 @@ impl<'a> Parser<'a> {
// We used to incorrectly stop parsing macro-expanded statements here.
// If the next token will be an error anyway but could have parsed with the
// earlier behavior, stop parsing here and emit a warning to avoid breakage.
else if macro_expanded && self.token.can_begin_expr() && match self.token {
else if macro_legacy_warnings && self.token.can_begin_expr() && match self.token {
// These can continue an expression, so we can't stop parsing and warn.
token::OpenDelim(token::Paren) | token::OpenDelim(token::Bracket) |
token::BinOp(token::Minus) | token::BinOp(token::Star) |
@ -4125,8 +4127,8 @@ impl<'a> Parser<'a> {
}
/// Parse a statement, including the trailing semicolon.
pub fn parse_full_stmt(&mut self, macro_expanded: bool) -> PResult<'a, Option<Stmt>> {
let mut stmt = match self.parse_stmt_(macro_expanded) {
pub fn parse_full_stmt(&mut self, macro_legacy_warnings: bool) -> PResult<'a, Option<Stmt>> {
let mut stmt = match self.parse_stmt_(macro_legacy_warnings) {
Some(stmt) => stmt,
None => return Ok(None),
};
@ -4146,7 +4148,7 @@ impl<'a> Parser<'a> {
}
StmtKind::Local(..) => {
// We used to incorrectly allow a macro-expanded let statement to lack a semicolon.
if macro_expanded && self.token != token::Semi {
if macro_legacy_warnings && self.token != token::Semi {
self.warn_missing_semicolon();
} else {
self.expect_one_of(&[token::Semi], &[])?;
@ -6169,4 +6171,15 @@ impl<'a> Parser<'a> {
_ => Err(self.fatal("expected string literal"))
}
}
pub fn ensure_complete_parse<F>(&mut self, allow_semi: bool, on_err: F)
where F: FnOnce(&Parser)
{
if allow_semi && self.token == token::Semi {
self.bump();
}
if self.token != token::Eof {
on_err(self);
}
}
}

View file

@ -33,6 +33,7 @@ use parse::lexer::comments::{doc_comment_style, strip_doc_comment_decoration};
use parse::lexer;
use parse;
use parse::token::{self, Token, Lit, Nonterminal};
use print::pprust;
use std::fmt;
use std::iter::*;
@ -781,6 +782,12 @@ impl TokenStream {
}
}
impl fmt::Display for TokenStream {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.write_str(&pprust::tts_to_string(&self.to_tts()))
}
}
// FIXME Reimplement this iterator to hold onto a slice iterator for a leaf, getting the
// next leaf's iterator when the current one is exhausted.
pub struct Iter<'a> {

View file

@ -15,7 +15,7 @@ use rustc_macro::{TokenStream, __internal};
use syntax::ast::{self, ItemKind};
use syntax::codemap::{ExpnInfo, MacroAttribute, NameAndSpan, Span};
use syntax::ext::base::*;
use syntax::fold::{self, Folder};
use syntax::fold::Folder;
use syntax::parse::token::intern;
use syntax::print::pprust;
@ -97,14 +97,3 @@ impl MultiItemModifier for CustomDerive {
}
}
struct ChangeSpan { span: Span }
impl Folder for ChangeSpan {
fn new_span(&mut self, _sp: Span) -> Span {
self.span
}
fn fold_mac(&mut self, mac: ast::Mac) -> ast::Mac {
fold::noop_fold_mac(mac, self)
}
}

View file

@ -0,0 +1,56 @@
// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![feature(plugin, plugin_registrar, rustc_private)]
extern crate proc_macro;
extern crate rustc_plugin;
extern crate syntax;
use proc_macro::prelude::*;
use rustc_plugin::Registry;
use syntax::ext::base::SyntaxExtension;
use syntax::ext::proc_macro_shim::prelude::*;
#[plugin_registrar]
pub fn plugin_registrar(reg: &mut Registry) {
reg.register_syntax_extension(token::intern("attr_tru"),
SyntaxExtension::AttrProcMacro(Box::new(attr_tru)));
reg.register_syntax_extension(token::intern("attr_identity"),
SyntaxExtension::AttrProcMacro(Box::new(attr_identity)));
reg.register_syntax_extension(token::intern("tru"),
SyntaxExtension::ProcMacro(Box::new(tru)));
reg.register_syntax_extension(token::intern("ret_tru"),
SyntaxExtension::ProcMacro(Box::new(ret_tru)));
reg.register_syntax_extension(token::intern("identity"),
SyntaxExtension::ProcMacro(Box::new(identity)));
}
fn attr_tru(_attr: TokenStream, _item: TokenStream) -> TokenStream {
lex("fn f1() -> bool { true }")
}
fn attr_identity(_attr: TokenStream, item: TokenStream) -> TokenStream {
let source = item.to_string();
lex(&source)
}
fn tru(_ts: TokenStream) -> TokenStream {
lex("true")
}
fn ret_tru(_ts: TokenStream) -> TokenStream {
lex("return true;")
}
fn identity(ts: TokenStream) -> TokenStream {
let source = ts.to_string();
lex(&source)
}

View file

@ -0,0 +1,48 @@
// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// aux-build:proc_macro_def.rs
// ignore-stage1
// ignore-cross-compile
#![feature(plugin, custom_attribute)]
#![feature(type_macros)]
#![plugin(proc_macro_def)]
#[attr_tru]
fn f1() -> bool {
return false;
}
#[attr_identity]
fn f2() -> bool {
return identity!(true);
}
fn f3() -> identity!(bool) {
ret_tru!();
}
fn f4(x: bool) -> bool {
match x {
identity!(true) => false,
identity!(false) => true,
}
}
fn main() {
assert!(f1());
assert!(f2());
assert!(tru!());
assert!(f3());
assert!(identity!(5 == 5));
assert!(f4(false));
}