Implement quote! and other proc_macro API.

This commit is contained in:
Jeffrey Seyfried 2017-03-17 23:41:09 +00:00
parent 7d41674b17
commit e42836b208
34 changed files with 1084 additions and 576 deletions

9
src/Cargo.lock generated
View file

@ -882,14 +882,6 @@ name = "proc_macro"
version = "0.0.0"
dependencies = [
"syntax 0.0.0",
]
[[package]]
name = "proc_macro_plugin"
version = "0.0.0"
dependencies = [
"rustc_plugin 0.0.0",
"syntax 0.0.0",
"syntax_pos 0.0.0",
]
@ -1210,7 +1202,6 @@ dependencies = [
"env_logger 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)",
"graphviz 0.0.0",
"log 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)",
"proc_macro_plugin 0.0.0",
"rustc 0.0.0",
"rustc_back 0.0.0",
"rustc_borrowck 0.0.0",

View file

@ -0,0 +1,7 @@
# `proc_macro`
The tracking issue for this feature is: [#38356]
[#38356]: https://github.com/rust-lang/rust/issues/38356
------------------------

View file

@ -9,3 +9,4 @@ crate-type = ["dylib"]
[dependencies]
syntax = { path = "../libsyntax" }
syntax_pos = { path = "../libsyntax_pos" }

View file

@ -37,18 +37,24 @@
test(no_crate_inject, attr(deny(warnings))),
test(attr(allow(dead_code, deprecated, unused_variables, unused_mut))))]
#![feature(i128_type)]
#![feature(rustc_private)]
#![feature(staged_api)]
#![feature(lang_items)]
extern crate syntax;
extern crate syntax_pos;
use std::fmt;
use std::{fmt, iter, ops};
use std::str::FromStr;
use syntax::ast;
use syntax::errors::DiagnosticBuilder;
use syntax::parse;
use syntax::parse::{self, token};
use syntax::symbol;
use syntax::tokenstream;
use syntax_pos::DUMMY_SP;
use syntax_pos::SyntaxContext;
/// The main type provided by this crate, representing an abstract stream of
/// tokens.
@ -60,6 +66,7 @@
/// The API of this type is intentionally bare-bones, but it'll be expanded over
/// time!
#[stable(feature = "proc_macro_lib", since = "1.15.0")]
#[derive(Clone)]
pub struct TokenStream(tokenstream::TokenStream);
/// Error returned from `TokenStream::from_str`.
@ -69,6 +76,443 @@ pub struct LexError {
_inner: (),
}
#[stable(feature = "proc_macro_lib", since = "1.15.0")]
impl FromStr for TokenStream {
type Err = LexError;
fn from_str(src: &str) -> Result<TokenStream, LexError> {
__internal::with_sess(|(sess, mark)| {
let src = src.to_string();
let name = "<proc-macro source code>".to_string();
let call_site = mark.expn_info().unwrap().call_site;
let stream = parse::parse_stream_from_source_str(name, src, sess, Some(call_site));
Ok(__internal::token_stream_wrap(stream))
})
}
}
#[stable(feature = "proc_macro_lib", since = "1.15.0")]
impl fmt::Display for TokenStream {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
self.0.fmt(f)
}
}
/// `quote!(..)` accepts arbitrary tokens and expands into a `TokenStream` describing the input.
/// For example, `quote!(a + b)` will produce a expression, that, when evaluated, constructs
/// constructs the `TokenStream` `[Word("a"), Op('+', Alone), Word("b")]`.
///
/// Unquoting is done with `$`, and works by taking the single next ident as the unquoted term.
/// To quote `$` itself, use `$$`.
#[unstable(feature = "proc_macro", issue = "38356")]
#[macro_export]
macro_rules! quote { () => {} }
#[unstable(feature = "proc_macro", issue = "38356")]
impl From<TokenTree> for TokenStream {
fn from(tree: TokenTree) -> TokenStream {
TokenStream(tree.to_raw())
}
}
#[unstable(feature = "proc_macro", issue = "38356")]
impl From<TokenKind> for TokenStream {
fn from(kind: TokenKind) -> TokenStream {
TokenTree::from(kind).into()
}
}
#[unstable(feature = "proc_macro", issue = "38356")]
impl<T: Into<TokenStream>> iter::FromIterator<T> for TokenStream {
fn from_iter<I: IntoIterator<Item = T>>(streams: I) -> Self {
let mut builder = tokenstream::TokenStream::builder();
for stream in streams {
builder.push(stream.into().0);
}
TokenStream(builder.build())
}
}
#[unstable(feature = "proc_macro", issue = "38356")]
impl IntoIterator for TokenStream {
type Item = TokenTree;
type IntoIter = TokenIter;
fn into_iter(self) -> TokenIter {
TokenIter { cursor: self.0.trees(), next: None }
}
}
impl TokenStream {
/// Returns an empty `TokenStream`.
#[unstable(feature = "proc_macro", issue = "38356")]
pub fn empty() -> TokenStream {
TokenStream(tokenstream::TokenStream::empty())
}
/// Checks if this `TokenStream` is empty.
#[unstable(feature = "proc_macro", issue = "38356")]
pub fn is_empty(&self) -> bool {
self.0.is_empty()
}
}
/// A region of source code, along with macro expansion information.
#[unstable(feature = "proc_macro", issue = "38356")]
#[derive(Copy, Clone)]
pub struct Span(syntax_pos::Span);
#[unstable(feature = "proc_macro", issue = "38356")]
impl Default for Span {
fn default() -> Span {
::__internal::with_sess(|(_, mark)| Span(syntax_pos::Span {
ctxt: SyntaxContext::empty().apply_mark(mark),
..mark.expn_info().unwrap().call_site
}))
}
}
impl Span {
/// The span of the invocation of the current procedural macro.
#[unstable(feature = "proc_macro", issue = "38356")]
pub fn call_site() -> Span {
::__internal::with_sess(|(_, mark)| Span(mark.expn_info().unwrap().call_site))
}
}
/// A single token or a delimited sequence of token trees (e.g. `[1, (), ..]`).
#[unstable(feature = "proc_macro", issue = "38356")]
#[derive(Clone)]
pub struct TokenTree {
/// The `TokenTree`'s span
pub span: Span,
/// Description of the `TokenTree`
pub kind: TokenKind,
}
#[unstable(feature = "proc_macro", issue = "38356")]
impl From<TokenKind> for TokenTree {
fn from(kind: TokenKind) -> TokenTree {
TokenTree { span: Span::default(), kind: kind }
}
}
#[unstable(feature = "proc_macro", issue = "38356")]
impl fmt::Display for TokenTree {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
TokenStream::from(self.clone()).fmt(f)
}
}
/// Description of a `TokenTree`
#[derive(Clone)]
#[unstable(feature = "proc_macro", issue = "38356")]
pub enum TokenKind {
/// A delimited tokenstream.
Sequence(Delimiter, TokenStream),
/// A unicode identifier.
Word(Symbol),
/// A punctuation character (`+`, `,`, `$`, etc.).
Op(char, OpKind),
/// A literal character (`'a'`), string (`"hello"`), or number (`2.3`).
Literal(Literal),
}
/// Describes how a sequence of token trees is delimited.
#[derive(Copy, Clone)]
#[unstable(feature = "proc_macro", issue = "38356")]
pub enum Delimiter {
/// `( ... )`
Parenthesis,
/// `[ ... ]`
Brace,
/// `{ ... }`
Bracket,
/// An implicit delimiter, e.g. `$var`, where $var is `...`.
None,
}
/// An interned string.
#[derive(Copy, Clone)]
#[unstable(feature = "proc_macro", issue = "38356")]
pub struct Symbol(symbol::Symbol);
#[unstable(feature = "proc_macro", issue = "38356")]
impl<'a> From<&'a str> for Symbol {
fn from(string: &'a str) -> Symbol {
Symbol(symbol::Symbol::intern(string))
}
}
#[unstable(feature = "proc_macro", issue = "38356")]
impl ops::Deref for Symbol {
type Target = str;
fn deref(&self) -> &str {
unsafe { &*(self.0.as_str().deref() as *const str) }
}
}
/// Whether an `Op` is either followed immediately by another `Op` or followed by whitespace.
#[derive(Copy, Clone)]
#[unstable(feature = "proc_macro", issue = "38356")]
pub enum OpKind {
/// e.g. `+` is `Alone` in `+ =`.
Alone,
/// e.g. `+` is `Joint` in `+=`.
Joint,
}
/// A literal character (`'a'`), string (`"hello"`), or number (`2.3`).
#[derive(Clone)]
#[unstable(feature = "proc_macro", issue = "38356")]
pub struct Literal(token::Token);
#[unstable(feature = "proc_macro", issue = "38356")]
impl fmt::Display for Literal {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
TokenTree { kind: TokenKind::Literal(self.clone()), span: Span(DUMMY_SP) }.fmt(f)
}
}
macro_rules! int_literals {
($($int_kind:ident),*) => {$(
/// Integer literal.
#[unstable(feature = "proc_macro", issue = "38356")]
pub fn $int_kind(n: $int_kind) -> Literal {
Literal::integer(n as i128, stringify!($int_kind))
}
)*}
}
impl Literal {
int_literals!(u8, i8, u16, i16, u32, i32, u64, i64);
fn integer(n: i128, kind: &'static str) -> Literal {
Literal(token::Literal(token::Lit::Integer(symbol::Symbol::intern(&n.to_string())),
Some(symbol::Symbol::intern(kind))))
}
/// Floating point literal.
#[unstable(feature = "proc_macro", issue = "38356")]
pub fn f32(n: f32) -> Literal {
Literal(token::Literal(token::Lit::Float(symbol::Symbol::intern(&n.to_string())),
Some(symbol::Symbol::intern("f32"))))
}
/// Floating point literal.
#[unstable(feature = "proc_macro", issue = "38356")]
pub fn f64(n: f32) -> Literal {
Literal(token::Literal(token::Lit::Float(symbol::Symbol::intern(&n.to_string())),
Some(symbol::Symbol::intern("f64"))))
}
/// String literal.
#[unstable(feature = "proc_macro", issue = "38356")]
pub fn string(string: &str) -> Literal {
let mut escaped = String::new();
for ch in string.chars() {
escaped.extend(ch.escape_unicode());
}
Literal(token::Literal(token::Lit::Str_(symbol::Symbol::intern(&escaped)), None))
}
/// Character literal.
#[unstable(feature = "proc_macro", issue = "38356")]
pub fn character(ch: char) -> Literal {
let mut escaped = String::new();
escaped.extend(ch.escape_unicode());
Literal(token::Literal(token::Lit::Char(symbol::Symbol::intern(&escaped)), None))
}
}
/// An iterator over `TokenTree`s.
#[unstable(feature = "proc_macro", issue = "38356")]
pub struct TokenIter {
cursor: tokenstream::Cursor,
next: Option<tokenstream::TokenStream>,
}
#[unstable(feature = "proc_macro", issue = "38356")]
impl Iterator for TokenIter {
type Item = TokenTree;
fn next(&mut self) -> Option<TokenTree> {
self.next.take().or_else(|| self.cursor.next_as_stream())
.map(|next| TokenTree::from_raw(next, &mut self.next))
}
}
impl Delimiter {
fn from_raw(delim: token::DelimToken) -> Delimiter {
match delim {
token::Paren => Delimiter::Parenthesis,
token::Brace => Delimiter::Brace,
token::Bracket => Delimiter::Bracket,
token::NoDelim => Delimiter::None,
}
}
fn to_raw(self) -> token::DelimToken {
match self {
Delimiter::Parenthesis => token::Paren,
Delimiter::Brace => token::Brace,
Delimiter::Bracket => token::Bracket,
Delimiter::None => token::NoDelim,
}
}
}
impl TokenTree {
fn from_raw(stream: tokenstream::TokenStream, next: &mut Option<tokenstream::TokenStream>)
-> TokenTree {
use syntax::parse::token::*;
let (tree, is_joint) = stream.as_tree();
let (mut span, token) = match tree {
tokenstream::TokenTree::Token(span, token) => (span, token),
tokenstream::TokenTree::Delimited(span, delimed) => {
let delimiter = Delimiter::from_raw(delimed.delim);
return TokenTree {
span: Span(span),
kind: TokenKind::Sequence(delimiter, TokenStream(delimed.tts.into())),
};
}
};
let op_kind = if is_joint { OpKind::Joint } else { OpKind::Alone };
macro_rules! op {
($op:expr) => { TokenKind::Op($op, op_kind) }
}
macro_rules! joint {
($first:expr, $rest:expr) => { joint($first, $rest, is_joint, &mut span, next) }
}
fn joint(first: char, rest: Token, is_joint: bool, span: &mut syntax_pos::Span,
next: &mut Option<tokenstream::TokenStream>)
-> TokenKind {
let (first_span, rest_span) = (*span, *span);
*span = first_span;
let tree = tokenstream::TokenTree::Token(rest_span, rest);
*next = Some(if is_joint { tree.joint() } else { tree.into() });
TokenKind::Op(first, OpKind::Joint)
}
let kind = match token {
Eq => op!('='),
Lt => op!('<'),
Le => joint!('<', Eq),
EqEq => joint!('=', Eq),
Ne => joint!('!', Eq),
Ge => joint!('>', Eq),
Gt => op!('>'),
AndAnd => joint!('&', BinOp(And)),
OrOr => joint!('|', BinOp(Or)),
Not => op!('!'),
Tilde => op!('~'),
BinOp(Plus) => op!('+'),
BinOp(Minus) => op!('-'),
BinOp(Star) => op!('*'),
BinOp(Slash) => op!('/'),
BinOp(Percent) => op!('%'),
BinOp(Caret) => op!('^'),
BinOp(And) => op!('&'),
BinOp(Or) => op!('|'),
BinOp(Shl) => joint!('<', Lt),
BinOp(Shr) => joint!('>', Gt),
BinOpEq(Plus) => joint!('+', Eq),
BinOpEq(Minus) => joint!('-', Eq),
BinOpEq(Star) => joint!('*', Eq),
BinOpEq(Slash) => joint!('/', Eq),
BinOpEq(Percent) => joint!('%', Eq),
BinOpEq(Caret) => joint!('^', Eq),
BinOpEq(And) => joint!('&', Eq),
BinOpEq(Or) => joint!('|', Eq),
BinOpEq(Shl) => joint!('<', Le),
BinOpEq(Shr) => joint!('>', Ge),
At => op!('@'),
Dot => op!('.'),
DotDot => joint!('.', Dot),
DotDotDot => joint!('.', DotDot),
Comma => op!(','),
Semi => op!(';'),
Colon => op!(':'),
ModSep => joint!(':', Colon),
RArrow => joint!('-', Gt),
LArrow => joint!('<', BinOp(Minus)),
FatArrow => joint!('=', Gt),
Pound => op!('#'),
Dollar => op!('$'),
Question => op!('?'),
Underscore => op!('_'),
Ident(ident) | Lifetime(ident) => TokenKind::Word(Symbol(ident.name)),
Literal(..) | DocComment(..) => TokenKind::Literal(self::Literal(token)),
Interpolated(..) => unimplemented!(),
OpenDelim(..) | CloseDelim(..) => unreachable!(),
Whitespace | Comment | Shebang(..) | Eof => unreachable!(),
};
TokenTree { span: Span(span), kind: kind }
}
fn to_raw(self) -> tokenstream::TokenStream {
use syntax::parse::token::*;
use syntax::tokenstream::{TokenTree, Delimited};
let (op, kind) = match self.kind {
TokenKind::Op(op, kind) => (op, kind),
TokenKind::Sequence(delimiter, tokens) => {
return TokenTree::Delimited(self.span.0, Delimited {
delim: delimiter.to_raw(),
tts: tokens.0.into(),
}).into();
},
TokenKind::Word(symbol) => {
let ident = ast::Ident { name: symbol.0, ctxt: self.span.0.ctxt };
let token =
if symbol.0.as_str().starts_with("'") { Lifetime(ident) } else { Ident(ident) };
return TokenTree::Token(self.span.0, token).into();
}
TokenKind::Literal(token) => return TokenTree::Token(self.span.0, token.0).into(),
};
let token = match op {
'=' => Eq,
'<' => Lt,
'>' => Gt,
'!' => Not,
'~' => Tilde,
'+' => BinOp(Plus),
'-' => BinOp(Minus),
'*' => BinOp(Star),
'/' => BinOp(Slash),
'%' => BinOp(Percent),
'^' => BinOp(Caret),
'&' => BinOp(And),
'|' => BinOp(Or),
'@' => At,
'.' => Dot,
',' => Comma,
';' => Semi,
':' => Colon,
'#' => Pound,
'$' => Dollar,
'?' => Question,
'_' => Underscore,
_ => panic!("unsupported character {}", op),
};
let tree = TokenTree::Token(self.span.0, token);
match kind {
OpKind::Alone => tree.into(),
OpKind::Joint => tree.joint(),
}
}
}
/// Permanently unstable internal implementation details of this crate. This
/// should not be used.
///
@ -80,7 +524,11 @@ pub struct LexError {
/// all of the contents.
#[unstable(feature = "proc_macro_internals", issue = "27812")]
#[doc(hidden)]
#[path = ""]
pub mod __internal {
mod quote;
pub use self::quote::{Quoter, __rt};
use std::cell::Cell;
use std::rc::Rc;
@ -172,25 +620,3 @@ fn parse_to_lex_err(mut err: DiagnosticBuilder) -> LexError {
err.cancel();
LexError { _inner: () }
}
#[stable(feature = "proc_macro_lib", since = "1.15.0")]
impl FromStr for TokenStream {
type Err = LexError;
fn from_str(src: &str) -> Result<TokenStream, LexError> {
__internal::with_sess(|(sess, mark)| {
let src = src.to_string();
let name = "<proc-macro source code>".to_string();
let call_site = mark.expn_info().unwrap().call_site;
let stream = parse::parse_stream_from_source_str(name, src, sess, Some(call_site));
Ok(__internal::token_stream_wrap(stream))
})
}
}
#[stable(feature = "proc_macro_lib", since = "1.15.0")]
impl fmt::Display for TokenStream {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
self.0.fmt(f)
}
}

259
src/libproc_macro/quote.rs Normal file
View file

@ -0,0 +1,259 @@
// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! # Quasiquoter
//! This file contains the implementation internals of the quasiquoter provided by `qquote!`.
use syntax::ast::Ident;
use syntax::ext::base::{ExtCtxt, ProcMacro};
use syntax::parse::token::{self, Token, Lit};
use syntax::symbol::Symbol;
use syntax::tokenstream::{Delimited, TokenTree, TokenStream};
use syntax_pos::{DUMMY_SP, Span};
use syntax_pos::hygiene::SyntaxContext;
pub struct Quoter;
pub mod __rt {
pub use syntax::ast::Ident;
pub use syntax::parse::token;
pub use syntax::symbol::Symbol;
pub use syntax::tokenstream::{TokenStream, TokenTree, Delimited};
pub use super::{ctxt, span};
pub fn unquote<T: Into<::TokenStream> + Clone>(tokens: &T) -> TokenStream {
T::into(tokens.clone()).0
}
}
pub fn ctxt() -> SyntaxContext {
::__internal::with_sess(|(_, mark)| SyntaxContext::empty().apply_mark(mark))
}
pub fn span() -> Span {
::Span::default().0
}
trait Quote {
fn quote(&self) -> TokenStream;
}
macro_rules! quote_tok {
(,) => { Token::Comma };
(.) => { Token::Dot };
(:) => { Token::Colon };
(::) => { Token::ModSep };
(!) => { Token::Not };
(<) => { Token::Lt };
(>) => { Token::Gt };
(_) => { Token::Underscore };
(0) => { Token::Literal(token::Lit::Integer(Symbol::intern("0")), None) };
(&) => { Token::BinOp(token::And) };
($i:ident) => { Token::Ident(Ident { name: Symbol::intern(stringify!($i)), ctxt: ctxt() }) };
}
macro_rules! quote_tree {
((unquote $($t:tt)*)) => { TokenStream::from($($t)*) };
((quote $($t:tt)*)) => { ($($t)*).quote() };
(($($t:tt)*)) => { delimit(token::Paren, quote!($($t)*)) };
([$($t:tt)*]) => { delimit(token::Bracket, quote!($($t)*)) };
({$($t:tt)*}) => { delimit(token::Brace, quote!($($t)*)) };
(rt) => { quote!(::__internal::__rt) };
($t:tt) => { TokenStream::from(TokenTree::Token(span(), quote_tok!($t))) };
}
fn delimit(delim: token::DelimToken, stream: TokenStream) -> TokenStream {
TokenTree::Delimited(span(), Delimited { delim: delim, tts: stream.into() }).into()
}
macro_rules! quote {
() => { TokenStream::empty() };
($($t:tt)*) => { [ $( quote_tree!($t), )* ].iter().cloned().collect::<TokenStream>() };
}
impl ProcMacro for Quoter {
fn expand<'cx>(&self, cx: &'cx mut ExtCtxt, _: Span, stream: TokenStream) -> TokenStream {
let mut info = cx.current_expansion.mark.expn_info().unwrap();
info.callee.allow_internal_unstable = true;
cx.current_expansion.mark.set_expn_info(info);
::__internal::set_sess(cx, || quote!(::TokenStream((quote stream))))
}
}
impl<T: Quote> Quote for Option<T> {
fn quote(&self) -> TokenStream {
match *self {
Some(ref t) => quote!(Some((quote t))),
None => quote!(None),
}
}
}
impl Quote for TokenStream {
fn quote(&self) -> TokenStream {
let mut builder = TokenStream::builder();
builder.push(quote!(rt::TokenStream::builder()));
let mut trees = self.trees();
loop {
let (mut tree, mut is_joint) = match trees.next_as_stream() {
Some(next) => next.as_tree(),
None => return builder.add(quote!(.build())).build(),
};
if let TokenTree::Token(_, Token::Dollar) = tree {
let (next_tree, next_is_joint) = match trees.next_as_stream() {
Some(next) => next.as_tree(),
None => panic!("unexpected trailing `$` in `quote!`"),
};
match next_tree {
TokenTree::Token(_, Token::Ident(..)) => {
builder.push(quote!(.add(rt::unquote(&(unquote next_tree)))));
continue
}
TokenTree::Token(_, Token::Dollar) => {
tree = next_tree;
is_joint = next_is_joint;
}
_ => panic!("`$` must be followed by an ident or `$` in `quote!`"),
}
}
builder.push(match is_joint {
true => quote!(.add((quote tree).joint())),
false => quote!(.add(rt::TokenStream::from((quote tree)))),
});
}
}
}
impl Quote for TokenTree {
fn quote(&self) -> TokenStream {
match *self {
TokenTree::Token(span, ref token) => quote! {
rt::TokenTree::Token((quote span), (quote token))
},
TokenTree::Delimited(span, ref delimited) => quote! {
rt::TokenTree::Delimited((quote span), (quote delimited))
},
}
}
}
impl Quote for Delimited {
fn quote(&self) -> TokenStream {
quote!(rt::Delimited { delim: (quote self.delim), tts: (quote self.stream()).into() })
}
}
impl<'a> Quote for &'a str {
fn quote(&self) -> TokenStream {
TokenTree::Token(span(), Token::Literal(token::Lit::Str_(Symbol::intern(self)), None))
.into()
}
}
impl Quote for usize {
fn quote(&self) -> TokenStream {
let integer_symbol = Symbol::intern(&self.to_string());
TokenTree::Token(DUMMY_SP, Token::Literal(token::Lit::Integer(integer_symbol), None))
.into()
}
}
impl Quote for Ident {
fn quote(&self) -> TokenStream {
quote!(rt::Ident { name: (quote self.name), ctxt: rt::ctxt() })
}
}
impl Quote for Symbol {
fn quote(&self) -> TokenStream {
quote!(rt::Symbol::intern((quote &*self.as_str())))
}
}
impl Quote for Span {
fn quote(&self) -> TokenStream {
quote!(rt::span())
}
}
impl Quote for Token {
fn quote(&self) -> TokenStream {
macro_rules! gen_match {
($($i:ident),*; $($t:tt)*) => {
match *self {
$( Token::$i => quote!(rt::token::$i), )*
$( $t )*
}
}
}
gen_match! {
Eq, Lt, Le, EqEq, Ne, Ge, Gt, AndAnd, OrOr, Not, Tilde, At, Dot, DotDot, DotDotDot,
Comma, Semi, Colon, ModSep, RArrow, LArrow, FatArrow, Pound, Dollar, Question,
Underscore;
Token::OpenDelim(delim) => quote!(rt::token::OpenDelim((quote delim))),
Token::CloseDelim(delim) => quote!(rt::token::CloseDelim((quote delim))),
Token::BinOp(tok) => quote!(rt::token::BinOp((quote tok))),
Token::BinOpEq(tok) => quote!(rt::token::BinOpEq((quote tok))),
Token::Ident(ident) => quote!(rt::token::Ident((quote ident))),
Token::Lifetime(ident) => quote!(rt::token::Lifetime((quote ident))),
Token::Literal(lit, sfx) => quote!(rt::token::Literal((quote lit), (quote sfx))),
_ => panic!("Unhandled case!"),
}
}
}
impl Quote for token::BinOpToken {
fn quote(&self) -> TokenStream {
macro_rules! gen_match {
($($i:ident),*) => {
match *self {
$( token::BinOpToken::$i => quote!(rt::token::BinOpToken::$i), )*
}
}
}
gen_match!(Plus, Minus, Star, Slash, Percent, Caret, And, Or, Shl, Shr)
}
}
impl Quote for Lit {
fn quote(&self) -> TokenStream {
macro_rules! gen_match {
($($i:ident),*; $($raw:ident),*) => {
match *self {
$( Lit::$i(lit) => quote!(rt::token::Lit::$i((quote lit))), )*
$( Lit::$raw(lit, n) => {
quote!(::syntax::parse::token::Lit::$raw((quote lit), (quote n)))
})*
}
}
}
gen_match!(Byte, Char, Float, Str_, Integer, ByteStr; StrRaw, ByteStrRaw)
}
}
impl Quote for token::DelimToken {
fn quote(&self) -> TokenStream {
macro_rules! gen_match {
($($i:ident),*) => {
match *self {
$(token::DelimToken::$i => { quote!(rt::token::DelimToken::$i) })*
}
}
}
gen_match!(Paren, Bracket, Brace, NoDelim)
}
}

View file

@ -1,13 +0,0 @@
[package]
authors = ["The Rust Project Developers"]
name = "proc_macro_plugin"
version = "0.0.0"
[lib]
path = "lib.rs"
crate-type = ["dylib"]
[dependencies]
rustc_plugin = { path = "../librustc_plugin" }
syntax = { path = "../libsyntax" }
syntax_pos = { path = "../libsyntax_pos" }

View file

@ -1,103 +0,0 @@
// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! # Proc_Macro
//!
//! A library for procedural macro writers.
//!
//! ## Usage
//! This crate provides the `quote!` macro for syntax creation.
//!
//! The `quote!` macro uses the crate `syntax`, so users must declare `extern crate syntax;`
//! at the crate root. This is a temporary solution until we have better hygiene.
//!
//! ## Quasiquotation
//!
//! The quasiquoter creates output that, when run, constructs the tokenstream specified as
//! input. For example, `quote!(5 + 5)` will produce a program, that, when run, will
//! construct the TokenStream `5 | + | 5`.
//!
//! ### Unquoting
//!
//! Unquoting is done with `$`, and works by taking the single next ident as the unquoted term.
//! To quote `$` itself, use `$$`.
//!
//! A simple example is:
//!
//!```
//!fn double(tmp: TokenStream) -> TokenStream {
//! quote!($tmp * 2)
//!}
//!```
//!
//! ### Large example: Scheme's `cond`
//!
//! Below is an example implementation of Scheme's `cond`.
//!
//! ```
//! fn cond(input: TokenStream) -> TokenStream {
//! let mut conds = Vec::new();
//! let mut input = input.trees().peekable();
//! while let Some(tree) = input.next() {
//! let mut cond = match tree {
//! TokenTree::Delimited(_, ref delimited) => delimited.stream(),
//! _ => panic!("Invalid input"),
//! };
//! let mut trees = cond.trees();
//! let test = trees.next();
//! let rhs = trees.collect::<TokenStream>();
//! if rhs.is_empty() {
//! panic!("Invalid macro usage in cond: {}", cond);
//! }
//! let is_else = match test {
//! Some(TokenTree::Token(_, Token::Ident(ident))) if ident.name == "else" => true,
//! _ => false,
//! };
//! conds.push(if is_else || input.peek().is_none() {
//! quote!({ $rhs })
//! } else {
//! let test = test.unwrap();
//! quote!(if $test { $rhs } else)
//! });
//! }
//!
//! conds.into_iter().collect()
//! }
//! ```
#![crate_name = "proc_macro_plugin"]
#![feature(plugin_registrar)]
#![crate_type = "dylib"]
#![crate_type = "rlib"]
#![doc(html_logo_url = "https://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png",
html_favicon_url = "https://doc.rust-lang.org/favicon.ico",
html_root_url = "https://doc.rust-lang.org/nightly/")]
#![deny(warnings)]
#![feature(rustc_diagnostic_macros)]
extern crate rustc_plugin;
extern crate syntax;
extern crate syntax_pos;
mod quote;
use quote::quote;
use rustc_plugin::Registry;
use syntax::ext::base::SyntaxExtension;
use syntax::symbol::Symbol;
// ____________________________________________________________________________________________
// Main macro definition
#[plugin_registrar]
pub fn plugin_registrar(reg: &mut Registry) {
reg.register_syntax_extension(Symbol::intern("quote"),
SyntaxExtension::ProcMacro(Box::new(quote)));
}

View file

@ -1,230 +0,0 @@
// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! # Quasiquoter
//! This file contains the implementation internals of the quasiquoter provided by `qquote!`.
use syntax::ast::Ident;
use syntax::parse::token::{self, Token, Lit};
use syntax::symbol::Symbol;
use syntax::tokenstream::{self, Delimited, TokenTree, TokenStream};
use syntax_pos::DUMMY_SP;
use std::iter;
pub fn quote<'cx>(stream: TokenStream) -> TokenStream {
stream.quote()
}
trait Quote {
fn quote(&self) -> TokenStream;
}
macro_rules! quote_tok {
(,) => { Token::Comma };
(.) => { Token::Dot };
(:) => { Token::Colon };
(::) => { Token::ModSep };
(!) => { Token::Not };
(<) => { Token::Lt };
(>) => { Token::Gt };
(_) => { Token::Underscore };
($i:ident) => { Token::Ident(Ident::from_str(stringify!($i))) };
}
macro_rules! quote_tree {
((unquote $($t:tt)*)) => { $($t)* };
((quote $($t:tt)*)) => { ($($t)*).quote() };
(($($t:tt)*)) => { delimit(token::Paren, quote!($($t)*)) };
([$($t:tt)*]) => { delimit(token::Bracket, quote!($($t)*)) };
({$($t:tt)*}) => { delimit(token::Brace, quote!($($t)*)) };
($t:tt) => { TokenStream::from(TokenTree::Token(DUMMY_SP, quote_tok!($t))) };
}
fn delimit(delim: token::DelimToken, stream: TokenStream) -> TokenStream {
TokenTree::Delimited(DUMMY_SP, Delimited { delim: delim, tts: stream.into() }).into()
}
macro_rules! quote {
() => { TokenStream::empty() };
($($t:tt)*) => { [ $( quote_tree!($t), )* ].iter().cloned().collect::<TokenStream>() };
}
impl<T: Quote> Quote for Option<T> {
fn quote(&self) -> TokenStream {
match *self {
Some(ref t) => quote!(::std::option::Option::Some((quote t))),
None => quote!(::std::option::Option::None),
}
}
}
impl Quote for TokenStream {
fn quote(&self) -> TokenStream {
if self.is_empty() {
return quote!(::syntax::tokenstream::TokenStream::empty());
}
struct Quoter(iter::Peekable<tokenstream::Cursor>);
impl Iterator for Quoter {
type Item = TokenStream;
fn next(&mut self) -> Option<TokenStream> {
let quoted_tree = if let Some(&TokenTree::Token(_, Token::Dollar)) = self.0.peek() {
self.0.next();
match self.0.next() {
Some(tree @ TokenTree::Token(_, Token::Ident(..))) => Some(tree.into()),
Some(tree @ TokenTree::Token(_, Token::Dollar)) => Some(tree.quote()),
// FIXME(jseyfried): improve these diagnostics
Some(..) => panic!("`$` must be followed by an ident or `$` in `quote!`"),
None => panic!("unexpected trailing `$` in `quote!`"),
}
} else {
self.0.next().as_ref().map(Quote::quote)
};
quoted_tree.map(|quoted_tree| {
quote!(::syntax::tokenstream::TokenStream::from((unquote quoted_tree)),)
})
}
}
let quoted = Quoter(self.trees().peekable()).collect::<TokenStream>();
quote!([(unquote quoted)].iter().cloned().collect::<::syntax::tokenstream::TokenStream>())
}
}
impl Quote for TokenTree {
fn quote(&self) -> TokenStream {
match *self {
TokenTree::Token(_, ref token) => quote! {
::syntax::tokenstream::TokenTree::Token(::syntax::ext::quote::rt::DUMMY_SP,
(quote token))
},
TokenTree::Delimited(_, ref delimited) => quote! {
::syntax::tokenstream::TokenTree::Delimited(::syntax::ext::quote::rt::DUMMY_SP,
(quote delimited))
},
}
}
}
impl Quote for Delimited {
fn quote(&self) -> TokenStream {
quote!(::syntax::tokenstream::Delimited {
delim: (quote self.delim),
tts: (quote self.stream()).into(),
})
}
}
impl<'a> Quote for &'a str {
fn quote(&self) -> TokenStream {
TokenTree::Token(DUMMY_SP, Token::Literal(token::Lit::Str_(Symbol::intern(self)), None))
.into()
}
}
impl Quote for usize {
fn quote(&self) -> TokenStream {
let integer_symbol = Symbol::intern(&self.to_string());
TokenTree::Token(DUMMY_SP, Token::Literal(token::Lit::Integer(integer_symbol), None))
.into()
}
}
impl Quote for Ident {
fn quote(&self) -> TokenStream {
// FIXME(jseyfried) quote hygiene
quote!(::syntax::ast::Ident::from_str((quote &*self.name.as_str())))
}
}
impl Quote for Symbol {
fn quote(&self) -> TokenStream {
quote!(::syntax::symbol::Symbol::intern((quote &*self.as_str())))
}
}
impl Quote for Token {
fn quote(&self) -> TokenStream {
macro_rules! gen_match {
($($i:ident),*; $($t:tt)*) => {
match *self {
$( Token::$i => quote!(::syntax::parse::token::$i), )*
$( $t )*
}
}
}
gen_match! {
Eq, Lt, Le, EqEq, Ne, Ge, Gt, AndAnd, OrOr, Not, Tilde, At, Dot, DotDot, DotDotDot,
Comma, Semi, Colon, ModSep, RArrow, LArrow, FatArrow, Pound, Dollar, Question,
Underscore;
Token::OpenDelim(delim) => quote!(::syntax::parse::token::OpenDelim((quote delim))),
Token::CloseDelim(delim) => quote!(::syntax::parse::token::CloseDelim((quote delim))),
Token::BinOp(tok) => quote!(::syntax::parse::token::BinOp((quote tok))),
Token::BinOpEq(tok) => quote!(::syntax::parse::token::BinOpEq((quote tok))),
Token::Ident(ident) => quote!(::syntax::parse::token::Ident((quote ident))),
Token::Lifetime(ident) => quote!(::syntax::parse::token::Lifetime((quote ident))),
Token::Literal(lit, sfx) => quote! {
::syntax::parse::token::Literal((quote lit), (quote sfx))
},
_ => panic!("Unhandled case!"),
}
}
}
impl Quote for token::BinOpToken {
fn quote(&self) -> TokenStream {
macro_rules! gen_match {
($($i:ident),*) => {
match *self {
$( token::BinOpToken::$i => quote!(::syntax::parse::token::BinOpToken::$i), )*
}
}
}
gen_match!(Plus, Minus, Star, Slash, Percent, Caret, And, Or, Shl, Shr)
}
}
impl Quote for Lit {
fn quote(&self) -> TokenStream {
macro_rules! gen_match {
($($i:ident),*; $($raw:ident),*) => {
match *self {
$( Lit::$i(lit) => quote!(::syntax::parse::token::Lit::$i((quote lit))), )*
$( Lit::$raw(lit, n) => {
quote!(::syntax::parse::token::Lit::$raw((quote lit), (quote n)))
})*
}
}
}
gen_match!(Byte, Char, Float, Str_, Integer, ByteStr; StrRaw, ByteStrRaw)
}
}
impl Quote for token::DelimToken {
fn quote(&self) -> TokenStream {
macro_rules! gen_match {
($($i:ident),*) => {
match *self {
$(token::DelimToken::$i => { quote!(::syntax::parse::token::DelimToken::$i) })*
}
}
}
gen_match!(Paren, Bracket, Brace, NoDelim)
}
}

View file

@ -55,12 +55,19 @@ fn clone(&self) -> Self {
}
impl DefPathTable {
pub fn new() -> Self {
DefPathTable {
index_to_key: [vec![], vec![]],
key_to_index: FxHashMap(),
def_path_hashes: [vec![], vec![]],
}
}
fn allocate(&mut self,
key: DefKey,
def_path_hash: DefPathHash,
address_space: DefIndexAddressSpace)
-> DefIndex {
pub fn allocate(&mut self,
key: DefKey,
def_path_hash: DefPathHash,
address_space: DefIndexAddressSpace)
-> DefIndex {
let index = {
let index_to_key = &mut self.index_to_key[address_space.index()];
let index = DefIndex::new(index_to_key.len() + address_space.start());
@ -241,7 +248,7 @@ pub struct DefKey {
}
impl DefKey {
fn compute_stable_hash(&self, parent_hash: DefPathHash) -> DefPathHash {
pub fn compute_stable_hash(&self, parent_hash: DefPathHash) -> DefPathHash {
let mut hasher = StableHasher::new();
// We hash a 0u8 here to disambiguate between regular DefPath hashes,
@ -284,7 +291,7 @@ fn compute_stable_hash(&self, parent_hash: DefPathHash) -> DefPathHash {
DefPathHash(hasher.finish())
}
fn root_parent_stable_hash(crate_name: &str, crate_disambiguator: &str) -> DefPathHash {
pub fn root_parent_stable_hash(crate_name: &str, crate_disambiguator: &str) -> DefPathHash {
let mut hasher = StableHasher::new();
// Disambiguate this from a regular DefPath hash,
// see compute_stable_hash() above.
@ -446,11 +453,7 @@ impl Definitions {
/// Create new empty definition map.
pub fn new() -> Definitions {
Definitions {
table: DefPathTable {
index_to_key: [vec![], vec![]],
key_to_index: FxHashMap(),
def_path_hashes: [vec![], vec![]],
},
table: DefPathTable::new(),
node_to_def_index: NodeMap(),
def_index_to_node: [vec![], vec![]],
node_to_hir_id: IndexVec::new(),

View file

@ -728,6 +728,7 @@ pub fn check_unused_or_stable_features<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) {
let ref declared_lib_features = sess.features.borrow().declared_lib_features;
let mut remaining_lib_features: FxHashMap<Symbol, Span>
= declared_lib_features.clone().into_iter().collect();
remaining_lib_features.remove(&Symbol::intern("proc_macro"));
fn format_stable_since_msg(version: &str) -> String {
format!("this feature has been stable since {}. Attribute no longer needed", version)

View file

@ -13,7 +13,6 @@ arena = { path = "../libarena" }
graphviz = { path = "../libgraphviz" }
log = { version = "0.3", features = ["release_max_level_info"] }
env_logger = { version = "0.4", default-features = false }
proc_macro_plugin = { path = "../libproc_macro_plugin" }
rustc = { path = "../librustc" }
rustc_back = { path = "../librustc_back" }
rustc_borrowck = { path = "../librustc_borrowck" }

View file

@ -26,7 +26,8 @@
use rustc::util::common::record_time;
use rustc::util::nodemap::FxHashSet;
use rustc::middle::cstore::NativeLibrary;
use rustc::hir::map::Definitions;
use rustc::hir::map::{Definitions, DefKey, DefPathData, DisambiguatedDefPathData, ITEM_LIKE_SPACE};
use rustc::hir::map::definitions::DefPathTable;
use std::cell::{RefCell, Cell};
use std::ops::Deref;
@ -34,7 +35,7 @@
use std::rc::Rc;
use std::{cmp, fs};
use syntax::ast;
use syntax::ast::{self, Ident};
use syntax::abi::Abi;
use syntax::attr;
use syntax::ext::base::SyntaxExtension;
@ -307,9 +308,16 @@ fn register_crate(&mut self,
let cnum_map = self.resolve_crate_deps(root, &crate_root, &metadata, cnum, span, dep_kind);
let def_path_table = record_time(&self.sess.perf_stats.decode_def_path_tables_time, || {
crate_root.def_path_table.decode(&metadata)
let proc_macros = crate_root.macro_derive_registrar.map(|_| {
self.load_derive_macros(&crate_root, dylib.clone().map(|p| p.0), span)
});
let def_path_table = if let Some(ref proc_macros) = proc_macros {
proc_macro_def_path_table(proc_macros)
} else {
record_time(&self.sess.perf_stats.decode_def_path_tables_time, || {
crate_root.def_path_table.decode(&metadata)
})
};
let exported_symbols = crate_root.exported_symbols
.map(|x| x.decode(&metadata).collect());
@ -328,9 +336,7 @@ fn register_crate(&mut self,
def_path_table: Rc::new(def_path_table),
exported_symbols: exported_symbols,
trait_impls: trait_impls,
proc_macros: crate_root.macro_derive_registrar.map(|_| {
self.load_derive_macros(&crate_root, dylib.clone().map(|p| p.0), span)
}),
proc_macros: proc_macros,
root: crate_root,
blob: metadata,
cnum_map: RefCell::new(cnum_map),
@ -1213,3 +1219,31 @@ fn process_item(&mut self, item: &ast::Item, definitions: &Definitions) {
}
}
}
fn proc_macro_def_path_table(proc_macros: &[(ast::Name, Rc<SyntaxExtension>)]) -> DefPathTable {
let mut table = DefPathTable::new();
let root = DefKey {
parent: None,
disambiguated_data: DisambiguatedDefPathData {
data: DefPathData::CrateRoot,
disambiguator: 0,
},
};
let initial_hash = DefKey::root_parent_stable_hash("", "");
let root_hash = root.compute_stable_hash(initial_hash);
let root_id = table.allocate(root, root_hash, ITEM_LIKE_SPACE);
let root_path_hash = table.def_path_hash(root_id);
for proc_macro in proc_macros {
let key = DefKey {
parent: Some(CRATE_DEF_INDEX),
disambiguated_data: DisambiguatedDefPathData {
data: DefPathData::MacroDef(Ident::with_empty_ctxt(proc_macro.0)),
disambiguator: 0,
},
};
let def_path_hash = key.compute_stable_hash(root_path_hash);
table.allocate(key, def_path_hash, ITEM_LIKE_SPACE);
}
table
}

View file

@ -33,6 +33,7 @@
use syntax::ast;
use syntax::attr;
use syntax::ext::base::SyntaxExtension;
use syntax::parse::filemap_to_stream;
use syntax::symbol::Symbol;
use syntax_pos::{Span, NO_EXPANSION};
@ -365,6 +366,10 @@ fn load_macro(&self, id: DefId, sess: &Session) -> LoadedMacro {
let data = self.get_crate_data(id.krate);
if let Some(ref proc_macros) = data.proc_macros {
return LoadedMacro::ProcMacro(proc_macros[id.index.as_usize() - 1].1.clone());
} else if data.name == "proc_macro" &&
self.get_crate_data(id.krate).item_name(id.index) == "quote" {
let ext = SyntaxExtension::ProcMacro(Box::new(::proc_macro::__internal::Quoter));
return LoadedMacro::ProcMacro(Rc::new(ext));
}
let (name, def) = data.get_macro(id.index);

View file

@ -472,7 +472,7 @@ fn local_def_id(&self, index: DefIndex) -> DefId {
}
}
fn item_name(&self, item_index: DefIndex) -> ast::Name {
pub fn item_name(&self, item_index: DefIndex) -> ast::Name {
self.def_key(item_index)
.disambiguated_data
.data

View file

@ -1095,6 +1095,7 @@ fn encode_info_for_item(&mut self, (def_id, item): (DefId, &'tcx hir::Item)) ->
/// Serialize the text of exported macros
fn encode_info_for_macro_def(&mut self, macro_def: &hir::MacroDef) -> Entry<'tcx> {
use syntax::print::pprust;
let def_id = self.tcx.hir.local_def_id(macro_def.id);
Entry {
kind: EntryKind::MacroDef(self.lazy(&MacroDef {
body: pprust::tts_to_string(&macro_def.body.trees().collect::<Vec<_>>()),
@ -1102,11 +1103,11 @@ fn encode_info_for_macro_def(&mut self, macro_def: &hir::MacroDef) -> Entry<'tcx
})),
visibility: self.lazy(&ty::Visibility::Public),
span: self.lazy(&macro_def.span),
attributes: self.encode_attributes(&macro_def.attrs),
stability: self.encode_stability(def_id),
deprecation: self.encode_deprecation(def_id),
children: LazySeq::empty(),
stability: None,
deprecation: None,
ty: None,
inherent_impls: LazySeq::empty(),
variances: LazySeq::empty(),

View file

@ -100,7 +100,7 @@ pub fn default_to_global(mut self) -> Path {
let name = self.segments[0].identifier.name;
if !self.is_global() && name != "$crate" &&
name != keywords::SelfValue.name() && name != keywords::Super.name() {
self.segments.insert(0, PathSegment::crate_root());
self.segments.insert(0, PathSegment::crate_root(self.span));
}
self
}
@ -134,10 +134,10 @@ impl PathSegment {
pub fn from_ident(ident: Ident, span: Span) -> Self {
PathSegment { identifier: ident, span: span, parameters: None }
}
pub fn crate_root() -> Self {
pub fn crate_root(span: Span) -> Self {
PathSegment {
identifier: keywords::CrateRoot.ident(),
span: DUMMY_SP,
identifier: Ident { ctxt: span.ctxt, ..keywords::CrateRoot.ident() },
span: span,
parameters: None,
}
}

View file

@ -578,7 +578,10 @@ pub fn kind(&self) -> MacroKind {
pub fn is_modern(&self) -> bool {
match *self {
SyntaxExtension::DeclMacro(..) => true,
SyntaxExtension::DeclMacro(..) |
SyntaxExtension::ProcMacro(..) |
SyntaxExtension::AttrProcMacro(..) |
SyntaxExtension::ProcMacroDerive(..) => true,
_ => false,
}
}

View file

@ -320,7 +320,7 @@ fn path_all(&self,
let last_identifier = idents.pop().unwrap();
let mut segments: Vec<ast::PathSegment> = Vec::new();
if global {
segments.push(ast::PathSegment::crate_root());
segments.push(ast::PathSegment::crate_root(sp));
}
segments.extend(idents.into_iter().map(|i| ast::PathSegment::from_ident(i, sp)));

View file

@ -38,12 +38,19 @@
use std::ascii::AsciiExt;
use std::env;
macro_rules! setter {
($field: ident) => {{
fn f(features: &mut Features) -> &mut bool {
&mut features.$field
macro_rules! set {
(proc_macro) => {{
fn f(features: &mut Features, span: Span) {
features.declared_lib_features.push((Symbol::intern("proc_macro"), span));
features.proc_macro = true;
}
f as fn(&mut Features) -> &mut bool
f as fn(&mut Features, Span)
}};
($field: ident) => {{
fn f(features: &mut Features, _: Span) {
features.$field = true;
}
f as fn(&mut Features, Span)
}}
}
@ -51,10 +58,9 @@ macro_rules! declare_features {
($((active, $feature: ident, $ver: expr, $issue: expr),)+) => {
/// Represents active features that are currently being implemented or
/// currently being considered for addition/removal.
const ACTIVE_FEATURES: &'static [(&'static str, &'static str,
Option<u32>, fn(&mut Features) -> &mut bool)] = &[
$((stringify!($feature), $ver, $issue, setter!($feature))),+
];
const ACTIVE_FEATURES:
&'static [(&'static str, &'static str, Option<u32>, fn(&mut Features, Span))] =
&[$((stringify!($feature), $ver, $issue, set!($feature))),+];
/// A set of features to be used by later passes.
pub struct Features {
@ -1464,9 +1470,9 @@ pub fn get_features(span_handler: &Handler, krate_attrs: &[ast::Attribute]) -> F
continue
};
if let Some(&(_, _, _, setter)) = ACTIVE_FEATURES.iter()
if let Some(&(_, _, _, set)) = ACTIVE_FEATURES.iter()
.find(|& &(n, _, _, _)| name == n) {
*(setter(&mut features)) = true;
set(&mut features, mi.span);
feature_checker.collect(&features, mi.span);
}
else if let Some(&(_, _, _)) = REMOVED_FEATURES.iter()
@ -1500,7 +1506,7 @@ struct MutexFeatureChecker {
impl MutexFeatureChecker {
// If this method turns out to be a hotspot due to branching,
// the branching can be eliminated by modifying `setter!()` to set these spans
// the branching can be eliminated by modifying `set!()` to set these spans
// only for the features that need to be checked for mutual exclusion.
fn collect(&mut self, features: &Features, span: Span) {
if features.proc_macro {

View file

@ -573,7 +573,7 @@ pub fn noop_fold_tt<T: Folder>(tt: TokenTree, fld: &mut T) -> TokenTree {
}
pub fn noop_fold_tts<T: Folder>(tts: TokenStream, fld: &mut T) -> TokenStream {
tts.trees().map(|tt| fld.fold_tt(tt)).collect()
tts.map(|tt| fld.fold_tt(tt))
}
// apply ident folder if it's an ident, apply other folds to interpolated nodes

View file

@ -19,7 +19,9 @@ impl<'a> StringReader<'a> {
pub fn parse_all_token_trees(&mut self) -> PResult<'a, TokenStream> {
let mut tts = Vec::new();
while self.token != token::Eof {
tts.push(self.parse_token_tree()?.into());
let tree = self.parse_token_tree()?;
let is_joint = tree.span().hi == self.span.lo && token::is_op(&self.token);
tts.push(if is_joint { tree.joint() } else { tree.into() });
}
Ok(TokenStream::concat(tts))
}
@ -31,13 +33,15 @@ fn parse_token_trees_until_close_delim(&mut self) -> TokenStream {
if let token::CloseDelim(..) = self.token {
return TokenStream::concat(tts);
}
match self.parse_token_tree() {
Ok(tt) => tts.push(tt.into()),
let tree = match self.parse_token_tree() {
Ok(tree) => tree,
Err(mut e) => {
e.emit();
return TokenStream::concat(tts);
}
}
};
let is_joint = tree.span().hi == self.span.lo && token::is_op(&self.token);
tts.push(if is_joint { tree.joint() } else { tree.into() });
}
}

View file

@ -1777,7 +1777,7 @@ fn parse_path_common(&mut self, mode: PathStyle, parse_generics: bool)
};
if is_global {
segments.insert(0, PathSegment::crate_root());
segments.insert(0, PathSegment::crate_root(lo));
}
// Assemble the result.
@ -6187,7 +6187,7 @@ fn parse_view_path(&mut self) -> PResult<'a, P<ViewPath>> {
// `{foo, bar}`, `::{foo, bar}`, `*`, or `::*`.
self.eat(&token::ModSep);
let prefix = ast::Path {
segments: vec![PathSegment::crate_root()],
segments: vec![PathSegment::crate_root(lo)],
span: lo.to(self.span),
};
let view_path_kind = if self.eat(&token::BinOp(token::Star)) {

View file

@ -349,6 +349,60 @@ pub fn is_reserved_keyword(&self) -> bool {
_ => false,
}
}
pub fn glue(self, joint: Token) -> Option<Token> {
Some(match self {
Eq => match joint {
Eq => EqEq,
Gt => FatArrow,
_ => return None,
},
Lt => match joint {
Eq => Le,
Lt => BinOp(Shl),
Le => BinOpEq(Shl),
BinOp(Minus) => LArrow,
_ => return None,
},
Gt => match joint {
Eq => Ge,
Gt => BinOp(Shr),
Ge => BinOpEq(Shr),
_ => return None,
},
Not => match joint {
Eq => Ne,
_ => return None,
},
BinOp(op) => match joint {
Eq => BinOpEq(op),
BinOp(And) if op == And => AndAnd,
BinOp(Or) if op == Or => OrOr,
Gt if op == Minus => RArrow,
_ => return None,
},
Dot => match joint {
Dot => DotDot,
DotDot => DotDotDot,
_ => return None,
},
DotDot => match joint {
Dot => DotDotDot,
_ => return None,
},
Colon => match joint {
Colon => ModSep,
_ => return None,
},
Le | EqEq | Ne | Ge | AndAnd | OrOr | Tilde | BinOpEq(..) | At | DotDotDot | Comma |
Semi | ModSep | RArrow | LArrow | FatArrow | Pound | Dollar | Question |
OpenDelim(..) | CloseDelim(..) | Underscore => return None,
Literal(..) | Ident(..) | Lifetime(..) | Interpolated(..) | DocComment(..) |
Whitespace | Comment | Shebang(..) | Eof => return None,
})
}
}
#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Eq, Hash)]
@ -398,3 +452,12 @@ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
}
}
}
pub fn is_op(tok: &Token) -> bool {
match *tok {
OpenDelim(..) | CloseDelim(..) | Literal(..) | DocComment(..) |
Ident(..) | Underscore | Lifetime(..) | Interpolated(..) |
Whitespace | Comment | Shebang(..) | Eof => false,
_ => true,
}
}

View file

@ -138,6 +138,10 @@ pub fn eq_token(&self, t: Token) -> bool {
_ => false,
}
}
pub fn joint(self) -> TokenStream {
TokenStream { kind: TokenStreamKind::JointTree(self) }
}
}
/// # Token Streams
@ -155,6 +159,7 @@ pub struct TokenStream {
enum TokenStreamKind {
Empty,
Tree(TokenTree),
JointTree(TokenTree),
Stream(RcSlice<TokenStream>),
}
@ -196,6 +201,10 @@ pub fn is_empty(&self) -> bool {
}
}
pub fn builder() -> TokenStreamBuilder {
TokenStreamBuilder(Vec::new())
}
pub fn concat(mut streams: Vec<TokenStream>) -> TokenStream {
match streams.len() {
0 => TokenStream::empty(),
@ -225,6 +234,99 @@ pub fn eq_unspanned(&self, other: &TokenStream) -> bool {
}
true
}
pub fn as_tree(self) -> (TokenTree, bool /* joint? */) {
match self.kind {
TokenStreamKind::Tree(tree) => (tree, false),
TokenStreamKind::JointTree(tree) => (tree, true),
_ => unreachable!(),
}
}
pub fn map<F: FnMut(TokenTree) -> TokenTree>(self, mut f: F) -> TokenStream {
let mut trees = self.into_trees();
let mut result = Vec::new();
while let Some(stream) = trees.next_as_stream() {
result.push(match stream.kind {
TokenStreamKind::Tree(tree) => f(tree).into(),
TokenStreamKind::JointTree(tree) => f(tree).joint(),
_ => unreachable!()
});
}
TokenStream::concat(result)
}
fn first_tree(&self) -> Option<TokenTree> {
match self.kind {
TokenStreamKind::Empty => None,
TokenStreamKind::Tree(ref tree) |
TokenStreamKind::JointTree(ref tree) => Some(tree.clone()),
TokenStreamKind::Stream(ref stream) => stream.first().unwrap().first_tree(),
}
}
fn last_tree_if_joint(&self) -> Option<TokenTree> {
match self.kind {
TokenStreamKind::Empty | TokenStreamKind::Tree(..) => None,
TokenStreamKind::JointTree(ref tree) => Some(tree.clone()),
TokenStreamKind::Stream(ref stream) => stream.last().unwrap().last_tree_if_joint(),
}
}
}
pub struct TokenStreamBuilder(Vec<TokenStream>);
impl TokenStreamBuilder {
pub fn push<T: Into<TokenStream>>(&mut self, stream: T) {
let stream = stream.into();
let last_tree_if_joint = self.0.last().and_then(TokenStream::last_tree_if_joint);
if let Some(TokenTree::Token(last_span, last_tok)) = last_tree_if_joint {
if let Some(TokenTree::Token(span, tok)) = stream.first_tree() {
if let Some(glued_tok) = last_tok.glue(tok) {
let last_stream = self.0.pop().unwrap();
self.push_all_but_last_tree(&last_stream);
let glued_span = last_span.to(span);
self.0.push(TokenTree::Token(glued_span, glued_tok).into());
self.push_all_but_first_tree(&stream);
return
}
}
}
self.0.push(stream);
}
pub fn add<T: Into<TokenStream>>(mut self, stream: T) -> Self {
self.push(stream);
self
}
pub fn build(self) -> TokenStream {
TokenStream::concat(self.0)
}
fn push_all_but_last_tree(&mut self, stream: &TokenStream) {
if let TokenStreamKind::Stream(ref streams) = stream.kind {
let len = streams.len();
match len {
1 => {}
2 => self.0.push(streams[0].clone().into()),
_ => self.0.push(TokenStream::concat_rc_slice(streams.sub_slice(0 .. len - 1))),
}
self.push_all_but_last_tree(&streams[len - 1])
}
}
fn push_all_but_first_tree(&mut self, stream: &TokenStream) {
if let TokenStreamKind::Stream(ref streams) = stream.kind {
let len = streams.len();
match len {
1 => {}
2 => self.0.push(streams[1].clone().into()),
_ => self.0.push(TokenStream::concat_rc_slice(streams.sub_slice(1 .. len))),
}
self.push_all_but_first_tree(&streams[0])
}
}
}
#[derive(Clone)]
@ -234,6 +336,7 @@ pub fn eq_unspanned(&self, other: &TokenStream) -> bool {
enum CursorKind {
Empty,
Tree(TokenTree, bool /* consumed? */),
JointTree(TokenTree, bool /* consumed? */),
Stream(StreamCursor),
}
@ -245,12 +348,13 @@ struct StreamCursor {
}
impl StreamCursor {
fn next(&mut self) -> Option<TokenTree> {
fn next_as_stream(&mut self) -> Option<TokenStream> {
loop {
if self.index < self.stream.len() {
self.index += 1;
match self.stream[self.index - 1].kind.clone() {
TokenStreamKind::Tree(tree) => return Some(tree),
let next = self.stream[self.index - 1].clone();
match next.kind {
TokenStreamKind::Tree(..) | TokenStreamKind::JointTree(..) => return Some(next),
TokenStreamKind::Stream(stream) => {
self.stack.push((mem::replace(&mut self.stream, stream),
mem::replace(&mut self.index, 0)));
@ -271,14 +375,10 @@ impl Iterator for Cursor {
type Item = TokenTree;
fn next(&mut self) -> Option<TokenTree> {
let (tree, consumed) = match self.0 {
CursorKind::Tree(ref tree, ref mut consumed @ false) => (tree, consumed),
CursorKind::Stream(ref mut cursor) => return cursor.next(),
_ => return None,
};
*consumed = true;
Some(tree.clone())
self.next_as_stream().map(|stream| match stream.kind {
TokenStreamKind::Tree(tree) | TokenStreamKind::JointTree(tree) => tree,
_ => unreachable!()
})
}
}
@ -287,16 +387,32 @@ fn new(stream: TokenStream) -> Self {
Cursor(match stream.kind {
TokenStreamKind::Empty => CursorKind::Empty,
TokenStreamKind::Tree(tree) => CursorKind::Tree(tree, false),
TokenStreamKind::JointTree(tree) => CursorKind::JointTree(tree, false),
TokenStreamKind::Stream(stream) => {
CursorKind::Stream(StreamCursor { stream: stream, index: 0, stack: Vec::new() })
}
})
}
pub fn next_as_stream(&mut self) -> Option<TokenStream> {
let (stream, consumed) = match self.0 {
CursorKind::Tree(ref tree, ref mut consumed @ false) =>
(tree.clone().into(), consumed),
CursorKind::JointTree(ref tree, ref mut consumed @ false) =>
(tree.clone().joint(), consumed),
CursorKind::Stream(ref mut cursor) => return cursor.next_as_stream(),
_ => return None,
};
*consumed = true;
Some(stream)
}
pub fn original_stream(self) -> TokenStream {
match self.0 {
CursorKind::Empty => TokenStream::empty(),
CursorKind::Tree(tree, _) => tree.into(),
CursorKind::JointTree(tree, _) => tree.joint(),
CursorKind::Stream(cursor) => TokenStream::concat_rc_slice({
cursor.stack.get(0).cloned().map(|(stream, _)| stream).unwrap_or(cursor.stream)
}),
@ -307,8 +423,9 @@ pub fn look_ahead(&self, n: usize) -> Option<TokenTree> {
fn look_ahead(streams: &[TokenStream], mut n: usize) -> Result<TokenTree, usize> {
for stream in streams {
n = match stream.kind {
TokenStreamKind::Tree(ref tree) if n == 0 => return Ok(tree.clone()),
TokenStreamKind::Tree(..) => n - 1,
TokenStreamKind::Tree(ref tree) | TokenStreamKind::JointTree(ref tree)
if n == 0 => return Ok(tree.clone()),
TokenStreamKind::Tree(..) | TokenStreamKind::JointTree(..) => n - 1,
TokenStreamKind::Stream(ref stream) => match look_ahead(stream, n) {
Ok(tree) => return Ok(tree),
Err(n) => n,
@ -316,13 +433,15 @@ fn look_ahead(streams: &[TokenStream], mut n: usize) -> Result<TokenTree, usize>
_ => n,
};
}
Err(n)
}
match self.0 {
CursorKind::Empty | CursorKind::Tree(_, true) => Err(n),
CursorKind::Tree(ref tree, false) => look_ahead(&[tree.clone().into()], n),
CursorKind::Empty |
CursorKind::Tree(_, true) |
CursorKind::JointTree(_, true) => Err(n),
CursorKind::Tree(ref tree, false) |
CursorKind::JointTree(ref tree, false) => look_ahead(&[tree.clone().into()], n),
CursorKind::Stream(ref cursor) => {
look_ahead(&cursor.stream[cursor.index ..], n).or_else(|mut n| {
for &(ref stream, index) in cursor.stack.iter().rev() {
@ -350,6 +469,7 @@ fn from(stream: TokenStream) -> ThinTokenStream {
ThinTokenStream(match stream.kind {
TokenStreamKind::Empty => None,
TokenStreamKind::Tree(tree) => Some(RcSlice::new(vec![tree.into()])),
TokenStreamKind::JointTree(tree) => Some(RcSlice::new(vec![tree.joint()])),
TokenStreamKind::Stream(stream) => Some(stream),
})
}

View file

@ -9,7 +9,7 @@
// except according to those terms.
use std::fmt;
use std::ops::Deref;
use std::ops::{Deref, Range};
use std::rc::Rc;
use rustc_data_structures::stable_hasher::{StableHasher, StableHasherResult,
@ -30,6 +30,14 @@ pub fn new(vec: Vec<T>) -> Self {
data: Rc::new(vec.into_boxed_slice()),
}
}
pub fn sub_slice(&self, range: Range<usize>) -> Self {
RcSlice {
data: self.data.clone(),
offset: self.offset + range.start as u32,
len: (range.end - range.start) as u32,
}
}
}
impl<T> Deref for RcSlice<T> {

View file

@ -8,50 +8,37 @@
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![allow(unused_parens)]
#![feature(plugin)]
#![feature(plugin_registrar)]
#![feature(rustc_private)]
#![plugin(proc_macro_plugin)]
// no-prefer-dynamic
extern crate rustc_plugin;
extern crate syntax;
#![crate_type = "proc-macro"]
#![feature(proc_macro, proc_macro_lib)]
use rustc_plugin::Registry;
extern crate proc_macro;
use syntax::ext::base::SyntaxExtension;
use syntax::parse::token::Token;
use syntax::symbol::Symbol;
use syntax::tokenstream::{TokenTree, TokenStream};
use proc_macro::{TokenStream, TokenKind, quote};
#[plugin_registrar]
pub fn plugin_registrar(reg: &mut Registry) {
reg.register_syntax_extension(Symbol::intern("cond"),
SyntaxExtension::ProcMacro(Box::new(cond)));
}
fn cond(input: TokenStream) -> TokenStream {
#[proc_macro]
pub fn cond(input: TokenStream) -> TokenStream {
let mut conds = Vec::new();
let mut input = input.trees().peekable();
let mut input = input.into_iter().peekable();
while let Some(tree) = input.next() {
let mut cond = match tree {
TokenTree::Delimited(_, ref delimited) => delimited.stream(),
let cond = match tree.kind {
TokenKind::Sequence(_, cond) => cond,
_ => panic!("Invalid input"),
};
let mut trees = cond.trees();
let test = trees.next();
let rhs = trees.collect::<TokenStream>();
let mut cond_trees = cond.clone().into_iter();
let test = cond_trees.next().expect("Unexpected empty condition in `cond!`");
let rhs = cond_trees.collect::<TokenStream>();
if rhs.is_empty() {
panic!("Invalid macro usage in cond: {}", cond);
}
let is_else = match test {
Some(TokenTree::Token(_, Token::Ident(ident))) if ident.name == "else" => true,
let is_else = match test.kind {
TokenKind::Word(word) => *word == *"else",
_ => false,
};
conds.push(if is_else || input.peek().is_none() {
quote!({ $rhs })
} else {
let test = test.unwrap();
quote!(if $test { $rhs } else)
});
}

View file

@ -8,29 +8,20 @@
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![feature(plugin)]
#![feature(plugin_registrar)]
#![feature(rustc_private)]
#![plugin(proc_macro_plugin)]
// no-prefer-dynamic
extern crate rustc_plugin;
extern crate syntax;
#![crate_type = "proc-macro"]
#![feature(proc_macro, proc_macro_lib)]
use rustc_plugin::Registry;
use syntax::ext::base::SyntaxExtension;
use syntax::symbol::Symbol;
use syntax::tokenstream::TokenStream;
extern crate proc_macro;
#[plugin_registrar]
pub fn plugin_registrar(reg: &mut Registry) {
reg.register_syntax_extension(Symbol::intern("hello"),
SyntaxExtension::ProcMacro(Box::new(hello)));
}
use proc_macro::{TokenStream, quote};
// This macro is not very interesting, but it does contain delimited tokens with
// no content - `()` and `{}` - which has caused problems in the past.
// Also, it tests that we can escape `$` via `$$`.
fn hello(_: TokenStream) -> TokenStream {
#[proc_macro]
pub fn hello(_: TokenStream) -> TokenStream {
quote!({
fn hello() {}
macro_rules! m { ($$($$t:tt)*) => { $$($$t)* } }

View file

@ -8,47 +8,37 @@
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![feature(plugin, plugin_registrar, rustc_private)]
#![plugin(proc_macro_plugin)]
// no-prefer-dynamic
extern crate rustc_plugin;
extern crate syntax;
#![crate_type = "proc-macro"]
#![feature(proc_macro, proc_macro_lib)]
use rustc_plugin::Registry;
use syntax::ext::base::SyntaxExtension;
use syntax::tokenstream::TokenStream;
use syntax::symbol::Symbol;
extern crate proc_macro;
#[plugin_registrar]
pub fn plugin_registrar(reg: &mut Registry) {
reg.register_syntax_extension(Symbol::intern("attr_tru"),
SyntaxExtension::AttrProcMacro(Box::new(attr_tru)));
reg.register_syntax_extension(Symbol::intern("attr_identity"),
SyntaxExtension::AttrProcMacro(Box::new(attr_identity)));
reg.register_syntax_extension(Symbol::intern("tru"),
SyntaxExtension::ProcMacro(Box::new(tru)));
reg.register_syntax_extension(Symbol::intern("ret_tru"),
SyntaxExtension::ProcMacro(Box::new(ret_tru)));
reg.register_syntax_extension(Symbol::intern("identity"),
SyntaxExtension::ProcMacro(Box::new(identity)));
use proc_macro::{TokenStream, quote};
#[proc_macro_attribute]
pub fn attr_tru(_attr: TokenStream, item: TokenStream) -> TokenStream {
let name = item.into_iter().skip(1).next().unwrap();
quote!(fn $name() -> bool { true })
}
fn attr_tru(_attr: TokenStream, _item: TokenStream) -> TokenStream {
quote!(fn f1() -> bool { true })
}
fn attr_identity(_attr: TokenStream, item: TokenStream) -> TokenStream {
#[proc_macro_attribute]
pub fn attr_identity(_attr: TokenStream, item: TokenStream) -> TokenStream {
quote!($item)
}
fn tru(_ts: TokenStream) -> TokenStream {
#[proc_macro]
pub fn tru(_ts: TokenStream) -> TokenStream {
quote!(true)
}
fn ret_tru(_ts: TokenStream) -> TokenStream {
#[proc_macro]
pub fn ret_tru(_ts: TokenStream) -> TokenStream {
quote!(return true;)
}
fn identity(ts: TokenStream) -> TokenStream {
#[proc_macro]
pub fn identity(ts: TokenStream) -> TokenStream {
quote!($ts)
}

View file

@ -1,40 +0,0 @@
// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// ignore-stage1
#![feature(plugin)]
#![feature(rustc_private)]
#![plugin(proc_macro_plugin)]
extern crate syntax;
extern crate syntax_pos;
use syntax::ast::{Ident, Name};
use syntax::parse::token::{self, Token, Lit};
use syntax::tokenstream::TokenTree;
fn main() {
let true_tok = token::Ident(Ident::from_str("true"));
assert!(quote!(true).eq_unspanned(&true_tok.into()));
// issue #35829, extended check to proc_macro.
let triple_dot_tok = Token::DotDotDot;
assert!(quote!(...).eq_unspanned(&triple_dot_tok.into()));
let byte_str_tok = Token::Literal(Lit::ByteStr(Name::intern("one")), None);
assert!(quote!(b"one").eq_unspanned(&byte_str_tok.into()));
let byte_str_raw_tok = Token::Literal(Lit::ByteStrRaw(Name::intern("#\"two\"#"), 3), None);
assert!(quote!(br###"#"two"#"###).eq_unspanned(&byte_str_raw_tok.into()));
let str_raw_tok = Token::Literal(Lit::StrRaw(Name::intern("#\"three\"#"), 2), None);
assert!(quote!(r##"#"three"#"##).eq_unspanned(&str_raw_tok.into()));
}

View file

@ -11,9 +11,11 @@
// aux-build:cond_plugin.rs
// ignore-stage1
#![feature(plugin)]
#![feature(rustc_private)]
#![plugin(cond_plugin)]
#![feature(proc_macro)]
extern crate cond_plugin;
use cond_plugin::cond;
fn fact(n : i64) -> i64 {
if n == 0 {

View file

@ -13,10 +13,10 @@
// aux-build:hello_macro.rs
// ignore-stage1
#![feature(plugin)]
#![feature(rustc_private)]
#![plugin(hello_macro)]
#![feature(proc_macro)]
extern crate hello_macro;
fn main() {
hello!();
hello_macro::hello!();
}

View file

@ -12,10 +12,11 @@
// ignore-stage1
// ignore-cross-compile
#![feature(plugin, custom_attribute)]
#![feature(type_macros)]
#![feature(proc_macro)]
#![plugin(proc_macro_def)]
extern crate proc_macro_def;
use proc_macro_def::{attr_tru, attr_identity, identity, ret_tru, tru};
#[attr_tru]
fn f1() -> bool {

View file

@ -91,14 +91,6 @@ fn verify(tomlfile: &Path, libfile: &Path, bad: &mut bool) {
continue
}
// We want the compiler to depend on the proc_macro_plugin crate so
// that it is built and included in the end, but we don't want to
// actually use it in the compiler.
if toml.contains("name = \"rustc_driver\"") &&
krate == "proc_macro_plugin" {
continue
}
if !librs.contains(&format!("extern crate {}", krate)) {
tidy_error!(bad, "{} doesn't have `extern crate {}`, but Cargo.toml \
depends on it", libfile.display(), krate);

View file

@ -245,7 +245,7 @@ fn get_and_check_lib_features(base_src_path: &Path,
let mut err = |msg: &str| {
tidy_error!(bad, "{}:{}: {}", file.display(), line, msg);
};
if lang_features.contains_key(name) {
if lang_features.contains_key(name) && feature_name != "proc_macro" {
err("duplicating a lang feature");
}
if let Some(ref s) = lib_features.get(name) {