diff --git a/src/librustc/hir/lowering.rs b/src/librustc/hir/lowering.rs
index 620ee30c9562..105a9e099b1a 100644
--- a/src/librustc/hir/lowering.rs
+++ b/src/librustc/hir/lowering.rs
@@ -1208,36 +1208,30 @@ impl<'a> LoweringContext<'a> {
ExprKind::Break(opt_ident) => hir::ExprBreak(self.lower_opt_sp_ident(opt_ident)),
ExprKind::Continue(opt_ident) => hir::ExprAgain(self.lower_opt_sp_ident(opt_ident)),
ExprKind::Ret(ref e) => hir::ExprRet(e.as_ref().map(|x| self.lower_expr(x))),
- ExprKind::InlineAsm(InlineAsm {
- ref inputs,
- ref outputs,
- ref asm,
- asm_str_style,
- ref clobbers,
- volatile,
- alignstack,
- dialect,
- expn_id,
- }) => hir::ExprInlineAsm(P(hir::InlineAsm {
- inputs: inputs.iter().map(|&(ref c, _)| c.clone()).collect(),
- outputs: outputs.iter()
- .map(|out| {
- hir::InlineAsmOutput {
- constraint: out.constraint.clone(),
- is_rw: out.is_rw,
- is_indirect: out.is_indirect,
- }
- })
- .collect(),
- asm: asm.clone(),
- asm_str_style: asm_str_style,
- clobbers: clobbers.clone().into(),
- volatile: volatile,
- alignstack: alignstack,
- dialect: dialect,
- expn_id: expn_id,
- }), outputs.iter().map(|out| self.lower_expr(&out.expr)).collect(),
- inputs.iter().map(|&(_, ref input)| self.lower_expr(input)).collect()),
+ ExprKind::InlineAsm(ref asm) => {
+ let hir_asm = hir::InlineAsm {
+ inputs: asm.inputs.iter().map(|&(ref c, _)| c.clone()).collect(),
+ outputs: asm.outputs.iter().map(|out| {
+ hir::InlineAsmOutput {
+ constraint: out.constraint.clone(),
+ is_rw: out.is_rw,
+ is_indirect: out.is_indirect,
+ }
+ }).collect(),
+ asm: asm.asm.clone(),
+ asm_str_style: asm.asm_str_style,
+ clobbers: asm.clobbers.clone().into(),
+ volatile: asm.volatile,
+ alignstack: asm.alignstack,
+ dialect: asm.dialect,
+ expn_id: asm.expn_id,
+ };
+ let outputs =
+ asm.outputs.iter().map(|out| self.lower_expr(&out.expr)).collect();
+ let inputs =
+ asm.inputs.iter().map(|&(_, ref input)| self.lower_expr(input)).collect();
+ hir::ExprInlineAsm(P(hir_asm), outputs, inputs)
+ }
ExprKind::Struct(ref path, ref fields, ref maybe_expr) => {
hir::ExprStruct(self.lower_path(path),
fields.iter().map(|x| self.lower_field(x)).collect(),
diff --git a/src/libsyntax/ast.rs b/src/libsyntax/ast.rs
index f077ead1f8e0..f7581924eb19 100644
--- a/src/libsyntax/ast.rs
+++ b/src/libsyntax/ast.rs
@@ -1050,7 +1050,7 @@ pub enum ExprKind {
Ret(Option
>),
/// Output of the `asm!()` macro
- InlineAsm(InlineAsm),
+ InlineAsm(P),
/// A macro invocation; pre-expansion
Mac(Mac),
diff --git a/src/libsyntax/ext/base.rs b/src/libsyntax/ext/base.rs
index cc097ab0efad..1f47a91fcc13 100644
--- a/src/libsyntax/ext/base.rs
+++ b/src/libsyntax/ext/base.rs
@@ -615,7 +615,9 @@ impl<'a> ExtCtxt<'a> {
pub fn new_parser_from_tts(&self, tts: &[tokenstream::TokenTree])
-> parser::Parser<'a> {
- parse::tts_to_parser(self.parse_sess, tts.to_vec())
+ let mut parser = parse::tts_to_parser(self.parse_sess, tts.to_vec());
+ parser.allow_interpolated_tts = false; // FIXME(jseyfried) `quote!` can't handle these yet
+ parser
}
pub fn codemap(&self) -> &'a CodeMap { self.parse_sess.codemap() }
pub fn parse_sess(&self) -> &'a parse::ParseSess { self.parse_sess }
diff --git a/src/libsyntax/ext/quote.rs b/src/libsyntax/ext/quote.rs
index f21360755bc2..969cfa292ce8 100644
--- a/src/libsyntax/ext/quote.rs
+++ b/src/libsyntax/ext/quote.rs
@@ -80,67 +80,71 @@ pub mod rt {
impl ToTokens for ast::Path {
fn to_tokens(&self, _cx: &ExtCtxt) -> Vec {
- vec![TokenTree::Token(DUMMY_SP,
- token::Interpolated(token::NtPath(Box::new(self.clone()))))]
+ let nt = token::NtPath(self.clone());
+ vec![TokenTree::Token(DUMMY_SP, token::Interpolated(Rc::new(nt)))]
}
}
impl ToTokens for ast::Ty {
fn to_tokens(&self, _cx: &ExtCtxt) -> Vec {
- vec![TokenTree::Token(self.span, token::Interpolated(token::NtTy(P(self.clone()))))]
+ let nt = token::NtTy(P(self.clone()));
+ vec![TokenTree::Token(self.span, token::Interpolated(Rc::new(nt)))]
}
}
impl ToTokens for ast::Block {
fn to_tokens(&self, _cx: &ExtCtxt) -> Vec {
- vec![TokenTree::Token(self.span, token::Interpolated(token::NtBlock(P(self.clone()))))]
+ let nt = token::NtBlock(P(self.clone()));
+ vec![TokenTree::Token(self.span, token::Interpolated(Rc::new(nt)))]
}
}
impl ToTokens for ast::Generics {
fn to_tokens(&self, _cx: &ExtCtxt) -> Vec {
- vec![TokenTree::Token(DUMMY_SP, token::Interpolated(token::NtGenerics(self.clone())))]
+ let nt = token::NtGenerics(self.clone());
+ vec![TokenTree::Token(DUMMY_SP, token::Interpolated(Rc::new(nt)))]
}
}
impl ToTokens for ast::WhereClause {
fn to_tokens(&self, _cx: &ExtCtxt) -> Vec {
- vec![TokenTree::Token(DUMMY_SP,
- token::Interpolated(token::NtWhereClause(self.clone())))]
+ let nt = token::NtWhereClause(self.clone());
+ vec![TokenTree::Token(DUMMY_SP, token::Interpolated(Rc::new(nt)))]
}
}
impl ToTokens for P {
fn to_tokens(&self, _cx: &ExtCtxt) -> Vec {
- vec![TokenTree::Token(self.span, token::Interpolated(token::NtItem(self.clone())))]
+ let nt = token::NtItem(self.clone());
+ vec![TokenTree::Token(self.span, token::Interpolated(Rc::new(nt)))]
}
}
impl ToTokens for ast::ImplItem {
fn to_tokens(&self, _cx: &ExtCtxt) -> Vec {
- vec![TokenTree::Token(self.span,
- token::Interpolated(token::NtImplItem(P(self.clone()))))]
+ let nt = token::NtImplItem(self.clone());
+ vec![TokenTree::Token(self.span, token::Interpolated(Rc::new(nt)))]
}
}
impl ToTokens for P {
fn to_tokens(&self, _cx: &ExtCtxt) -> Vec {
- vec![TokenTree::Token(self.span, token::Interpolated(token::NtImplItem(self.clone())))]
+ let nt = token::NtImplItem((**self).clone());
+ vec![TokenTree::Token(self.span, token::Interpolated(Rc::new(nt)))]
}
}
impl ToTokens for ast::TraitItem {
fn to_tokens(&self, _cx: &ExtCtxt) -> Vec {
- vec![TokenTree::Token(self.span,
- token::Interpolated(token::NtTraitItem(P(self.clone()))))]
+ let nt = token::NtTraitItem(self.clone());
+ vec![TokenTree::Token(self.span, token::Interpolated(Rc::new(nt)))]
}
}
impl ToTokens for ast::Stmt {
fn to_tokens(&self, _cx: &ExtCtxt) -> Vec {
- let mut tts = vec![
- TokenTree::Token(self.span, token::Interpolated(token::NtStmt(P(self.clone()))))
- ];
+ let nt = token::NtStmt(self.clone());
+ let mut tts = vec![TokenTree::Token(self.span, token::Interpolated(Rc::new(nt)))];
// Some statements require a trailing semicolon.
if classify::stmt_ends_with_semi(&self.node) {
@@ -153,31 +157,36 @@ pub mod rt {
impl ToTokens for P {
fn to_tokens(&self, _cx: &ExtCtxt) -> Vec {
- vec![TokenTree::Token(self.span, token::Interpolated(token::NtExpr(self.clone())))]
+ let nt = token::NtExpr(self.clone());
+ vec![TokenTree::Token(self.span, token::Interpolated(Rc::new(nt)))]
}
}
impl ToTokens for P {
fn to_tokens(&self, _cx: &ExtCtxt) -> Vec {
- vec![TokenTree::Token(self.span, token::Interpolated(token::NtPat(self.clone())))]
+ let nt = token::NtPat(self.clone());
+ vec![TokenTree::Token(self.span, token::Interpolated(Rc::new(nt)))]
}
}
impl ToTokens for ast::Arm {
fn to_tokens(&self, _cx: &ExtCtxt) -> Vec {
- vec![TokenTree::Token(DUMMY_SP, token::Interpolated(token::NtArm(self.clone())))]
+ let nt = token::NtArm(self.clone());
+ vec![TokenTree::Token(DUMMY_SP, token::Interpolated(Rc::new(nt)))]
}
}
impl ToTokens for ast::Arg {
fn to_tokens(&self, _cx: &ExtCtxt) -> Vec {
- vec![TokenTree::Token(DUMMY_SP, token::Interpolated(token::NtArg(self.clone())))]
+ let nt = token::NtArg(self.clone());
+ vec![TokenTree::Token(DUMMY_SP, token::Interpolated(Rc::new(nt)))]
}
}
impl ToTokens for P {
fn to_tokens(&self, _cx: &ExtCtxt) -> Vec {
- vec![TokenTree::Token(DUMMY_SP, token::Interpolated(token::NtBlock(self.clone())))]
+ let nt = token::NtBlock(self.clone());
+ vec![TokenTree::Token(DUMMY_SP, token::Interpolated(Rc::new(nt)))]
}
}
@@ -204,7 +213,8 @@ pub mod rt {
impl ToTokens for P {
fn to_tokens(&self, _cx: &ExtCtxt) -> Vec {
- vec![TokenTree::Token(DUMMY_SP, token::Interpolated(token::NtMeta(self.clone())))]
+ let nt = token::NtMeta(self.clone());
+ vec![TokenTree::Token(DUMMY_SP, token::Interpolated(Rc::new(nt)))]
}
}
diff --git a/src/libsyntax/ext/tt/macro_parser.rs b/src/libsyntax/ext/tt/macro_parser.rs
index 7e3fe3285695..1066646aa8e8 100644
--- a/src/libsyntax/ext/tt/macro_parser.rs
+++ b/src/libsyntax/ext/tt/macro_parser.rs
@@ -89,7 +89,6 @@ use parse::token::{DocComment, MatchNt, SubstNt};
use parse::token::{Token, Nonterminal};
use parse::token;
use print::pprust;
-use ptr::P;
use tokenstream::{self, TokenTree};
use util::small_vector::SmallVector;
@@ -198,7 +197,7 @@ pub fn initial_matcher_pos(ms: Vec, sep: Option, lo: BytePos)
pub enum NamedMatch {
MatchedSeq(Vec>, syntax_pos::Span),
- MatchedNonterminal(Nonterminal)
+ MatchedNonterminal(Rc)
}
pub fn nameize(p_s: &ParseSess, ms: &[TokenTree], res: &[Rc])
@@ -279,17 +278,16 @@ pub fn token_name_eq(t1 : &Token, t2 : &Token) -> bool {
}
}
-pub fn parse(sess: &ParseSess, mut rdr: TtReader, ms: &[TokenTree]) -> NamedParseResult {
- let mut cur_eis = SmallVector::one(initial_matcher_pos(ms.to_owned(),
- None,
- rdr.peek().sp.lo));
+pub fn parse(sess: &ParseSess, rdr: TtReader, ms: &[TokenTree]) -> NamedParseResult {
+ let mut parser = Parser::new_with_doc_flag(sess, Box::new(rdr), true);
+ let mut cur_eis = SmallVector::one(initial_matcher_pos(ms.to_owned(), None, parser.span.lo));
loop {
let mut bb_eis = Vec::new(); // black-box parsed by parser.rs
let mut next_eis = Vec::new(); // or proceed normally
let mut eof_eis = Vec::new();
- let TokenAndSpan { tok, sp } = rdr.peek();
+ let (sp, tok) = (parser.span, parser.token.clone());
/* we append new items to this while we go */
loop {
@@ -474,23 +472,19 @@ pub fn parse(sess: &ParseSess, mut rdr: TtReader, ms: &[TokenTree]) -> NamedPars
while !next_eis.is_empty() {
cur_eis.push(next_eis.pop().unwrap());
}
- rdr.next_token();
+ parser.bump();
} else /* bb_eis.len() == 1 */ {
- rdr.next_tok = {
- let mut rust_parser = Parser::new(sess, Box::new(&mut rdr));
- let mut ei = bb_eis.pop().unwrap();
- if let TokenTree::Token(span, MatchNt(_, ident)) = ei.top_elts.get_tt(ei.idx) {
- let match_cur = ei.match_cur;
- (&mut ei.matches[match_cur]).push(Rc::new(MatchedNonterminal(
- parse_nt(&mut rust_parser, span, &ident.name.as_str()))));
- ei.idx += 1;
- ei.match_cur += 1;
- } else {
- unreachable!()
- }
- cur_eis.push(ei);
- Some(TokenAndSpan { tok: rust_parser.token, sp: rust_parser.span })
- };
+ let mut ei = bb_eis.pop().unwrap();
+ if let TokenTree::Token(span, MatchNt(_, ident)) = ei.top_elts.get_tt(ei.idx) {
+ let match_cur = ei.match_cur;
+ (&mut ei.matches[match_cur]).push(Rc::new(MatchedNonterminal(
+ Rc::new(parse_nt(&mut parser, span, &ident.name.as_str())))));
+ ei.idx += 1;
+ ei.match_cur += 1;
+ } else {
+ unreachable!()
+ }
+ cur_eis.push(ei);
}
}
@@ -502,10 +496,19 @@ pub fn parse_nt<'a>(p: &mut Parser<'a>, sp: Span, name: &str) -> Nonterminal {
match name {
"tt" => {
p.quote_depth += 1; //but in theory, non-quoted tts might be useful
- let res: ::parse::PResult<'a, _> = p.parse_token_tree();
- let res = token::NtTT(P(panictry!(res)));
+ let mut tt = panictry!(p.parse_token_tree());
p.quote_depth -= 1;
- return res;
+ loop {
+ let nt = match tt {
+ TokenTree::Token(_, token::Interpolated(ref nt)) => nt.clone(),
+ _ => break,
+ };
+ match *nt {
+ token::NtTT(ref sub_tt) => tt = sub_tt.clone(),
+ _ => break,
+ }
+ }
+ return token::NtTT(tt);
}
_ => {}
}
@@ -521,7 +524,7 @@ pub fn parse_nt<'a>(p: &mut Parser<'a>, sp: Span, name: &str) -> Nonterminal {
},
"block" => token::NtBlock(panictry!(p.parse_block())),
"stmt" => match panictry!(p.parse_stmt()) {
- Some(s) => token::NtStmt(P(s)),
+ Some(s) => token::NtStmt(s),
None => {
p.fatal("expected a statement").emit();
panic!(FatalError);
@@ -534,7 +537,7 @@ pub fn parse_nt<'a>(p: &mut Parser<'a>, sp: Span, name: &str) -> Nonterminal {
"ident" => match p.token {
token::Ident(sn) => {
p.bump();
- token::NtIdent(Box::new(Spanned::{node: sn, span: p.span}))
+ token::NtIdent(Spanned::{node: sn, span: p.span})
}
_ => {
let token_str = pprust::token_to_string(&p.token);
@@ -544,7 +547,7 @@ pub fn parse_nt<'a>(p: &mut Parser<'a>, sp: Span, name: &str) -> Nonterminal {
}
},
"path" => {
- token::NtPath(Box::new(panictry!(p.parse_path(PathStyle::Type))))
+ token::NtPath(panictry!(p.parse_path(PathStyle::Type)))
},
"meta" => token::NtMeta(panictry!(p.parse_meta_item())),
// this is not supposed to happen, since it has been checked
diff --git a/src/libsyntax/ext/tt/macro_rules.rs b/src/libsyntax/ext/tt/macro_rules.rs
index 431e757368c0..552d4de96174 100644
--- a/src/libsyntax/ext/tt/macro_rules.rs
+++ b/src/libsyntax/ext/tt/macro_rules.rs
@@ -236,12 +236,14 @@ pub fn compile(sess: &ParseSess, def: &ast::MacroDef) -> SyntaxExtension {
// Extract the arguments:
let lhses = match **argument_map.get(&lhs_nm).unwrap() {
MatchedSeq(ref s, _) => {
- s.iter().map(|m| match **m {
- MatchedNonterminal(NtTT(ref tt)) => {
- valid &= check_lhs_nt_follows(sess, tt);
- (**tt).clone()
+ s.iter().map(|m| {
+ if let MatchedNonterminal(ref nt) = **m {
+ if let NtTT(ref tt) = **nt {
+ valid &= check_lhs_nt_follows(sess, tt);
+ return (*tt).clone();
+ }
}
- _ => sess.span_diagnostic.span_bug(def.span, "wrong-structured lhs")
+ sess.span_diagnostic.span_bug(def.span, "wrong-structured lhs")
}).collect::>()
}
_ => sess.span_diagnostic.span_bug(def.span, "wrong-structured lhs")
@@ -249,9 +251,13 @@ pub fn compile(sess: &ParseSess, def: &ast::MacroDef) -> SyntaxExtension {
let rhses = match **argument_map.get(&rhs_nm).unwrap() {
MatchedSeq(ref s, _) => {
- s.iter().map(|m| match **m {
- MatchedNonterminal(NtTT(ref tt)) => (**tt).clone(),
- _ => sess.span_diagnostic.span_bug(def.span, "wrong-structured rhs")
+ s.iter().map(|m| {
+ if let MatchedNonterminal(ref nt) = **m {
+ if let NtTT(ref tt) = **nt {
+ return (*tt).clone();
+ }
+ }
+ sess.span_diagnostic.span_bug(def.span, "wrong-structured lhs")
}).collect()
}
_ => sess.span_diagnostic.span_bug(def.span, "wrong-structured rhs")
diff --git a/src/libsyntax/ext/tt/transcribe.rs b/src/libsyntax/ext/tt/transcribe.rs
index 8a6a8e53a3e4..37e329e5d3b2 100644
--- a/src/libsyntax/ext/tt/transcribe.rs
+++ b/src/libsyntax/ext/tt/transcribe.rs
@@ -12,9 +12,7 @@ use self::LockstepIterSize::*;
use ast::Ident;
use errors::{Handler, DiagnosticBuilder};
use ext::tt::macro_parser::{NamedMatch, MatchedSeq, MatchedNonterminal};
-use parse::token::{DocComment, MatchNt, SubstNt};
-use parse::token::{Token, Interpolated, NtIdent, NtTT};
-use parse::token;
+use parse::token::{self, MatchNt, SubstNt, Token, NtIdent};
use parse::lexer::TokenAndSpan;
use syntax_pos::{Span, DUMMY_SP};
use tokenstream::{self, TokenTree};
@@ -46,9 +44,7 @@ pub struct TtReader<'a> {
/* cached: */
pub cur_tok: Token,
pub cur_span: Span,
- pub next_tok: Option,
/// Transform doc comments. Only useful in macro invocations
- pub desugar_doc_comments: bool,
pub fatal_errs: Vec>,
}
@@ -59,20 +55,6 @@ pub fn new_tt_reader(sp_diag: &Handler,
interp: Option>>,
src: Vec)
-> TtReader {
- new_tt_reader_with_doc_flag(sp_diag, interp, src, false)
-}
-
-/// The extra `desugar_doc_comments` flag enables reading doc comments
-/// like any other attribute which consists of `meta` and surrounding #[ ] tokens.
-///
-/// This can do Macro-By-Example transcription. On the other hand, if
-/// `src` contains no `TokenTree::Sequence`s, `MatchNt`s or `SubstNt`s, `interp` can
-/// (and should) be None.
-pub fn new_tt_reader_with_doc_flag(sp_diag: &Handler,
- interp: Option>>,
- src: Vec,
- desugar_doc_comments: bool)
- -> TtReader {
let mut r = TtReader {
sp_diag: sp_diag,
stack: SmallVector::one(TtFrame {
@@ -91,11 +73,9 @@ pub fn new_tt_reader_with_doc_flag(sp_diag: &Handler,
},
repeat_idx: Vec::new(),
repeat_len: Vec::new(),
- desugar_doc_comments: desugar_doc_comments,
/* dummy values, never read: */
cur_tok: token::Eof,
cur_span: DUMMY_SP,
- next_tok: None,
fatal_errs: Vec::new(),
};
tt_next_token(&mut r); /* get cur_tok and cur_span set up */
@@ -174,9 +154,6 @@ fn lockstep_iter_size(t: &TokenTree, r: &TtReader) -> LockstepIterSize {
/// Return the next token from the TtReader.
/// EFFECT: advances the reader's token field
pub fn tt_next_token(r: &mut TtReader) -> TokenAndSpan {
- if let Some(tok) = r.next_tok.take() {
- return tok;
- }
// FIXME(pcwalton): Bad copy?
let ret_val = TokenAndSpan {
tok: r.cur_tok.clone(),
@@ -269,47 +246,35 @@ pub fn tt_next_token(r: &mut TtReader) -> TokenAndSpan {
}
// FIXME #2887: think about span stuff here
TokenTree::Token(sp, SubstNt(ident)) => {
+ r.stack.last_mut().unwrap().idx += 1;
match lookup_cur_matched(r, ident) {
None => {
- r.stack.last_mut().unwrap().idx += 1;
r.cur_span = sp;
r.cur_tok = SubstNt(ident);
return ret_val;
// this can't be 0 length, just like TokenTree::Delimited
}
- Some(cur_matched) => {
- match *cur_matched {
+ Some(cur_matched) => if let MatchedNonterminal(ref nt) = *cur_matched {
+ match **nt {
// sidestep the interpolation tricks for ident because
// (a) idents can be in lots of places, so it'd be a pain
// (b) we actually can, since it's a token.
- MatchedNonterminal(NtIdent(ref sn)) => {
- r.stack.last_mut().unwrap().idx += 1;
+ NtIdent(ref sn) => {
r.cur_span = sn.span;
r.cur_tok = token::Ident(sn.node);
return ret_val;
}
- MatchedNonterminal(NtTT(ref tt)) => {
- r.stack.push(TtFrame {
- forest: TokenTree::Token(sp, Interpolated(NtTT(tt.clone()))),
- idx: 0,
- dotdotdoted: false,
- sep: None,
- });
- }
- MatchedNonterminal(ref other_whole_nt) => {
- r.stack.last_mut().unwrap().idx += 1;
+ _ => {
// FIXME(pcwalton): Bad copy.
r.cur_span = sp;
- r.cur_tok = Interpolated((*other_whole_nt).clone());
+ r.cur_tok = token::Interpolated(nt.clone());
return ret_val;
}
- MatchedSeq(..) => {
- panic!(r.sp_diag.span_fatal(
- sp, /* blame the macro writer */
- &format!("variable '{}' is still repeating at this depth",
- ident)));
- }
}
+ } else {
+ panic!(r.sp_diag.span_fatal(
+ sp, /* blame the macro writer */
+ &format!("variable '{}' is still repeating at this depth", ident)));
}
}
}
@@ -324,14 +289,6 @@ pub fn tt_next_token(r: &mut TtReader) -> TokenAndSpan {
});
// if this could be 0-length, we'd need to potentially recur here
}
- TokenTree::Token(sp, DocComment(name)) if r.desugar_doc_comments => {
- r.stack.push(TtFrame {
- forest: TokenTree::Token(sp, DocComment(name)),
- idx: 0,
- dotdotdoted: false,
- sep: None
- });
- }
TokenTree::Token(sp, tok) => {
r.cur_span = sp;
r.cur_tok = tok;
diff --git a/src/libsyntax/fold.rs b/src/libsyntax/fold.rs
index 4bf6e55d6743..1deeaf422316 100644
--- a/src/libsyntax/fold.rs
+++ b/src/libsyntax/fold.rs
@@ -576,7 +576,13 @@ pub fn noop_fold_token(t: token::Token, fld: &mut T) -> token::Token
match t {
token::Ident(id) => token::Ident(fld.fold_ident(id)),
token::Lifetime(id) => token::Lifetime(fld.fold_ident(id)),
- token::Interpolated(nt) => token::Interpolated(fld.fold_interpolated(nt)),
+ token::Interpolated(nt) => {
+ let nt = match Rc::try_unwrap(nt) {
+ Ok(nt) => nt,
+ Err(nt) => (*nt).clone(),
+ };
+ token::Interpolated(Rc::new(fld.fold_interpolated(nt)))
+ }
token::SubstNt(ident) => token::SubstNt(fld.fold_ident(ident)),
token::MatchNt(name, kind) => token::MatchNt(fld.fold_ident(name), fld.fold_ident(kind)),
_ => t
@@ -614,26 +620,25 @@ pub fn noop_fold_interpolated(nt: token::Nonterminal, fld: &mut T)
.expect_one("expected fold to produce exactly one item")),
token::NtBlock(block) => token::NtBlock(fld.fold_block(block)),
token::NtStmt(stmt) =>
- token::NtStmt(stmt.map(|stmt| fld.fold_stmt(stmt)
+ token::NtStmt(fld.fold_stmt(stmt)
// this is probably okay, because the only folds likely
// to peek inside interpolated nodes will be renamings/markings,
// which map single items to single items
- .expect_one("expected fold to produce exactly one statement"))),
+ .expect_one("expected fold to produce exactly one statement")),
token::NtPat(pat) => token::NtPat(fld.fold_pat(pat)),
token::NtExpr(expr) => token::NtExpr(fld.fold_expr(expr)),
token::NtTy(ty) => token::NtTy(fld.fold_ty(ty)),
- token::NtIdent(id) =>
- token::NtIdent(Box::new(Spanned::{node: fld.fold_ident(id.node), ..*id})),
+ token::NtIdent(id) => token::NtIdent(Spanned::{node: fld.fold_ident(id.node), ..id}),
token::NtMeta(meta_item) => token::NtMeta(fld.fold_meta_item(meta_item)),
- token::NtPath(path) => token::NtPath(Box::new(fld.fold_path(*path))),
- token::NtTT(tt) => token::NtTT(P(fld.fold_tt(&tt))),
+ token::NtPath(path) => token::NtPath(fld.fold_path(path)),
+ token::NtTT(tt) => token::NtTT(fld.fold_tt(&tt)),
token::NtArm(arm) => token::NtArm(fld.fold_arm(arm)),
- token::NtImplItem(arm) =>
- token::NtImplItem(arm.map(|arm| fld.fold_impl_item(arm)
- .expect_one("expected fold to produce exactly one item"))),
- token::NtTraitItem(arm) =>
- token::NtTraitItem(arm.map(|arm| fld.fold_trait_item(arm)
- .expect_one("expected fold to produce exactly one item"))),
+ token::NtImplItem(item) =>
+ token::NtImplItem(fld.fold_impl_item(item)
+ .expect_one("expected fold to produce exactly one item")),
+ token::NtTraitItem(item) =>
+ token::NtTraitItem(fld.fold_trait_item(item)
+ .expect_one("expected fold to produce exactly one item")),
token::NtGenerics(generics) => token::NtGenerics(fld.fold_generics(generics)),
token::NtWhereClause(where_clause) =>
token::NtWhereClause(fld.fold_where_clause(where_clause)),
@@ -1244,36 +1249,22 @@ pub fn noop_fold_expr(Expr {id, node, span, attrs}: Expr, folder: &mu
folder.fold_ident(label.node)))
),
ExprKind::Ret(e) => ExprKind::Ret(e.map(|x| folder.fold_expr(x))),
- ExprKind::InlineAsm(InlineAsm {
- inputs,
- outputs,
- asm,
- asm_str_style,
- clobbers,
- volatile,
- alignstack,
- dialect,
- expn_id,
- }) => ExprKind::InlineAsm(InlineAsm {
- inputs: inputs.move_map(|(c, input)| {
- (c, folder.fold_expr(input))
- }),
- outputs: outputs.move_map(|out| {
- InlineAsmOutput {
- constraint: out.constraint,
- expr: folder.fold_expr(out.expr),
- is_rw: out.is_rw,
- is_indirect: out.is_indirect,
- }
- }),
- asm: asm,
- asm_str_style: asm_str_style,
- clobbers: clobbers,
- volatile: volatile,
- alignstack: alignstack,
- dialect: dialect,
- expn_id: expn_id,
- }),
+ ExprKind::InlineAsm(asm) => ExprKind::InlineAsm(asm.map(|asm| {
+ InlineAsm {
+ inputs: asm.inputs.move_map(|(c, input)| {
+ (c, folder.fold_expr(input))
+ }),
+ outputs: asm.outputs.move_map(|out| {
+ InlineAsmOutput {
+ constraint: out.constraint,
+ expr: folder.fold_expr(out.expr),
+ is_rw: out.is_rw,
+ is_indirect: out.is_indirect,
+ }
+ }),
+ ..asm
+ }
+ })),
ExprKind::Mac(mac) => ExprKind::Mac(folder.fold_mac(mac)),
ExprKind::Struct(path, fields, maybe_expr) => {
ExprKind::Struct(folder.fold_path(path),
diff --git a/src/libsyntax/parse/attr.rs b/src/libsyntax/parse/attr.rs
index 3cb34fa3c91c..983c882eafca 100644
--- a/src/libsyntax/parse/attr.rs
+++ b/src/libsyntax/parse/attr.rs
@@ -215,7 +215,10 @@ impl<'a> Parser<'a> {
/// meta_item_inner : (meta_item | UNSUFFIXED_LIT) (',' meta_item_inner)? ;
pub fn parse_meta_item(&mut self) -> PResult<'a, P> {
let nt_meta = match self.token {
- token::Interpolated(token::NtMeta(ref e)) => Some(e.clone()),
+ token::Interpolated(ref nt) => match **nt {
+ token::NtMeta(ref e) => Some(e.clone()),
+ _ => None,
+ },
_ => None,
};
diff --git a/src/libsyntax/parse/lexer/mod.rs b/src/libsyntax/parse/lexer/mod.rs
index 5e20f6e41927..cf48c445c80e 100644
--- a/src/libsyntax/parse/lexer/mod.rs
+++ b/src/libsyntax/parse/lexer/mod.rs
@@ -22,7 +22,7 @@ use std::char;
use std::mem::replace;
use std::rc::Rc;
-pub use ext::tt::transcribe::{TtReader, new_tt_reader, new_tt_reader_with_doc_flag};
+pub use ext::tt::transcribe::{TtReader, new_tt_reader};
pub mod comments;
mod unicode_chars;
@@ -171,31 +171,10 @@ impl<'a> Reader for TtReader<'a> {
self.fatal_errs.clear();
}
fn peek(&self) -> TokenAndSpan {
- self.next_tok.clone().unwrap_or(TokenAndSpan {
+ TokenAndSpan {
tok: self.cur_tok.clone(),
sp: self.cur_span,
- })
- }
-}
-
-impl<'a, 'b> Reader for &'b mut TtReader<'a> {
- fn is_eof(&self) -> bool {
- (**self).is_eof()
- }
- fn try_next_token(&mut self) -> Result {
- (**self).try_next_token()
- }
- fn fatal(&self, m: &str) -> FatalError {
- (**self).fatal(m)
- }
- fn err(&self, m: &str) {
- (**self).err(m)
- }
- fn emit_fatal_errors(&mut self) {
- (**self).emit_fatal_errors()
- }
- fn peek(&self) -> TokenAndSpan {
- (**self).peek()
+ }
}
}
diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs
index b80aa667be6a..b670a7384739 100644
--- a/src/libsyntax/parse/parser.rs
+++ b/src/libsyntax/parse/parser.rs
@@ -107,125 +107,41 @@ pub enum SemiColonMode {
/// be. The important thing is to make sure that lookahead doesn't balk at
/// `token::Interpolated` tokens.
macro_rules! maybe_whole_expr {
- ($p:expr) => (
- {
- let found = match $p.token {
- token::Interpolated(token::NtExpr(ref e)) => {
- Some((*e).clone())
+ ($p:expr) => {
+ if let token::Interpolated(nt) = $p.token.clone() {
+ match *nt {
+ token::NtExpr(ref e) => {
+ $p.bump();
+ return Ok((*e).clone());
}
- token::Interpolated(token::NtPath(_)) => {
- // FIXME: The following avoids an issue with lexical borrowck scopes,
- // but the clone is unfortunate.
- let pt = match $p.token {
- token::Interpolated(token::NtPath(ref pt)) => (**pt).clone(),
- _ => unreachable!()
- };
+ token::NtPath(ref path) => {
+ $p.bump();
let span = $p.span;
- Some($p.mk_expr(span.lo, span.hi, ExprKind::Path(None, pt), ThinVec::new()))
+ let kind = ExprKind::Path(None, (*path).clone());
+ return Ok($p.mk_expr(span.lo, span.hi, kind, ThinVec::new()));
}
- token::Interpolated(token::NtBlock(_)) => {
- // FIXME: The following avoids an issue with lexical borrowck scopes,
- // but the clone is unfortunate.
- let b = match $p.token {
- token::Interpolated(token::NtBlock(ref b)) => (*b).clone(),
- _ => unreachable!()
- };
+ token::NtBlock(ref block) => {
+ $p.bump();
let span = $p.span;
- Some($p.mk_expr(span.lo, span.hi, ExprKind::Block(b), ThinVec::new()))
+ let kind = ExprKind::Block((*block).clone());
+ return Ok($p.mk_expr(span.lo, span.hi, kind, ThinVec::new()));
}
- _ => None
+ _ => {},
};
- match found {
- Some(e) => {
- $p.bump();
- return Ok(e);
- }
- None => ()
- }
}
- )
+ }
}
/// As maybe_whole_expr, but for things other than expressions
macro_rules! maybe_whole {
- ($p:expr, $constructor:ident) => (
- {
- let found = match ($p).token {
- token::Interpolated(token::$constructor(_)) => {
- Some(($p).bump_and_get())
- }
- _ => None
- };
- if let Some(token::Interpolated(token::$constructor(x))) = found {
- return Ok(x.clone());
+ ($p:expr, $constructor:ident, |$x:ident| $e:expr) => {
+ if let token::Interpolated(nt) = $p.token.clone() {
+ if let token::$constructor($x) = (*nt).clone() {
+ $p.bump();
+ return Ok($e);
}
}
- );
- (no_clone $p:expr, $constructor:ident) => (
- {
- let found = match ($p).token {
- token::Interpolated(token::$constructor(_)) => {
- Some(($p).bump_and_get())
- }
- _ => None
- };
- if let Some(token::Interpolated(token::$constructor(x))) = found {
- return Ok(x);
- }
- }
- );
- (no_clone_from_p $p:expr, $constructor:ident) => (
- {
- let found = match ($p).token {
- token::Interpolated(token::$constructor(_)) => {
- Some(($p).bump_and_get())
- }
- _ => None
- };
- if let Some(token::Interpolated(token::$constructor(x))) = found {
- return Ok(x.unwrap());
- }
- }
- );
- (deref $p:expr, $constructor:ident) => (
- {
- let found = match ($p).token {
- token::Interpolated(token::$constructor(_)) => {
- Some(($p).bump_and_get())
- }
- _ => None
- };
- if let Some(token::Interpolated(token::$constructor(x))) = found {
- return Ok((*x).clone());
- }
- }
- );
- (Some deref $p:expr, $constructor:ident) => (
- {
- let found = match ($p).token {
- token::Interpolated(token::$constructor(_)) => {
- Some(($p).bump_and_get())
- }
- _ => None
- };
- if let Some(token::Interpolated(token::$constructor(x))) = found {
- return Ok(Some((*x).clone()));
- }
- }
- );
- (pair_empty $p:expr, $constructor:ident) => (
- {
- let found = match ($p).token {
- token::Interpolated(token::$constructor(_)) => {
- Some(($p).bump_and_get())
- }
- _ => None
- };
- if let Some(token::Interpolated(token::$constructor(x))) = found {
- return Ok((Vec::new(), x));
- }
- }
- )
+ };
}
fn maybe_append(mut lhs: Vec, rhs: Option>)
@@ -294,6 +210,9 @@ pub struct Parser<'a> {
/// into modules, and sub-parsers have new values for this name.
pub root_module_name: Option,
pub expected_tokens: Vec,
+ pub tts: Vec<(TokenTree, usize)>,
+ pub desugar_doc_comments: bool,
+ pub allow_interpolated_tts: bool,
}
#[derive(PartialEq, Eq, Clone)]
@@ -357,21 +276,18 @@ impl From> for LhsExpr {
}
impl<'a> Parser<'a> {
- pub fn new(sess: &'a ParseSess, mut rdr: Box) -> Self {
- let tok0 = rdr.real_token();
- let span = tok0.sp;
- let mut directory = match span {
- syntax_pos::DUMMY_SP => PathBuf::new(),
- _ => PathBuf::from(sess.codemap().span_to_filename(span)),
- };
- directory.pop();
+ pub fn new(sess: &'a ParseSess, rdr: Box) -> Self {
+ Parser::new_with_doc_flag(sess, rdr, false)
+ }
- Parser {
+ pub fn new_with_doc_flag(sess: &'a ParseSess, rdr: Box, desugar_doc_comments: bool)
+ -> Self {
+ let mut parser = Parser {
reader: rdr,
sess: sess,
- token: tok0.tok,
- span: span,
- prev_span: span,
+ token: token::Underscore,
+ span: syntax_pos::DUMMY_SP,
+ prev_span: syntax_pos::DUMMY_SP,
prev_token_kind: PrevTokenKind::Other,
lookahead_buffer: Default::default(),
tokens_consumed: 0,
@@ -379,11 +295,63 @@ impl<'a> Parser<'a> {
quote_depth: 0,
parsing_token_tree: false,
obsolete_set: HashSet::new(),
- directory: directory,
+ directory: PathBuf::new(),
open_braces: Vec::new(),
owns_directory: true,
root_module_name: None,
expected_tokens: Vec::new(),
+ tts: Vec::new(),
+ desugar_doc_comments: desugar_doc_comments,
+ allow_interpolated_tts: true,
+ };
+
+ let tok = parser.next_tok();
+ parser.token = tok.tok;
+ parser.span = tok.sp;
+ if parser.span != syntax_pos::DUMMY_SP {
+ parser.directory = PathBuf::from(sess.codemap().span_to_filename(parser.span));
+ parser.directory.pop();
+ }
+ parser
+ }
+
+ fn next_tok(&mut self) -> TokenAndSpan {
+ 'outer: loop {
+ let mut tok = if let Some((tts, i)) = self.tts.pop() {
+ let tt = tts.get_tt(i);
+ if i + 1 < tts.len() {
+ self.tts.push((tts, i + 1));
+ }
+ if let TokenTree::Token(sp, tok) = tt {
+ TokenAndSpan { tok: tok, sp: sp }
+ } else {
+ self.tts.push((tt, 0));
+ continue
+ }
+ } else {
+ self.reader.real_token()
+ };
+
+ loop {
+ let nt = match tok.tok {
+ token::Interpolated(ref nt) => nt.clone(),
+ token::DocComment(name) if self.desugar_doc_comments => {
+ self.tts.push((TokenTree::Token(tok.sp, token::DocComment(name)), 0));
+ continue 'outer
+ }
+ _ => return tok,
+ };
+ match *nt {
+ token::NtTT(TokenTree::Token(sp, ref t)) => {
+ tok = TokenAndSpan { tok: t.clone(), sp: sp };
+ }
+ token::NtTT(ref tt) => {
+ self.tts.push((tt.clone(), 0));
+ continue 'outer
+ }
+ _ => return tok,
+ }
+ }
}
}
@@ -516,9 +484,6 @@ impl<'a> Parser<'a> {
self.bump();
Ok(i)
}
- token::Interpolated(token::NtIdent(..)) => {
- self.bug("ident interpolation not converted to real token");
- }
_ => {
Err(if self.prev_token_kind == PrevTokenKind::DocComment {
self.span_fatal_help(self.prev_span,
@@ -935,7 +900,7 @@ impl<'a> Parser<'a> {
};
let next = if self.lookahead_buffer.start == self.lookahead_buffer.end {
- self.reader.real_token()
+ self.next_tok()
} else {
// Avoid token copies with `replace`.
let old_start = self.lookahead_buffer.start;
@@ -980,7 +945,7 @@ impl<'a> Parser<'a> {
f(&self.token)
} else if dist < LOOKAHEAD_BUFFER_CAPACITY {
while self.lookahead_buffer.len() < dist {
- self.lookahead_buffer.buffer[self.lookahead_buffer.end] = self.reader.real_token();
+ self.lookahead_buffer.buffer[self.lookahead_buffer.end] = self.next_tok();
self.lookahead_buffer.end =
(self.lookahead_buffer.end + 1) % LOOKAHEAD_BUFFER_CAPACITY;
}
@@ -1162,7 +1127,7 @@ impl<'a> Parser<'a> {
/// Parse the items in a trait declaration
pub fn parse_trait_item(&mut self) -> PResult<'a, TraitItem> {
- maybe_whole!(no_clone_from_p self, NtTraitItem);
+ maybe_whole!(self, NtTraitItem, |x| x);
let mut attrs = self.parse_outer_attributes()?;
let lo = self.span.lo;
@@ -1331,7 +1296,7 @@ impl<'a> Parser<'a> {
/// Parse a type.
pub fn parse_ty(&mut self) -> PResult<'a, P> {
- maybe_whole!(no_clone self, NtTy);
+ maybe_whole!(self, NtTy, |x| x);
let lo = self.span.lo;
@@ -1476,7 +1441,7 @@ impl<'a> Parser<'a> {
/// This version of parse arg doesn't necessarily require
/// identifier names.
pub fn parse_arg_general(&mut self, require_name: bool) -> PResult<'a, Arg> {
- maybe_whole!(no_clone self, NtArg);
+ maybe_whole!(self, NtArg, |x| x);
let pat = if require_name || self.is_named_argument() {
debug!("parse_arg_general parse_pat (require_name:{})",
@@ -1542,12 +1507,13 @@ impl<'a> Parser<'a> {
/// Matches token_lit = LIT_INTEGER | ...
pub fn parse_lit_token(&mut self) -> PResult<'a, LitKind> {
let out = match self.token {
- token::Interpolated(token::NtExpr(ref v)) => {
- match v.node {
+ token::Interpolated(ref nt) => match **nt {
+ token::NtExpr(ref v) => match v.node {
ExprKind::Lit(ref lit) => { lit.node.clone() }
_ => { return self.unexpected_last(&self.token); }
- }
- }
+ },
+ _ => { return self.unexpected_last(&self.token); }
+ },
token::Literal(lit, suf) => {
let (suffix_illegal, out) = match lit {
token::Byte(i) => (true, LitKind::Byte(parse::byte_lit(&i.as_str()).0)),
@@ -1703,14 +1669,7 @@ impl<'a> Parser<'a> {
/// bounds are permitted and whether `::` must precede type parameter
/// groups.
pub fn parse_path(&mut self, mode: PathStyle) -> PResult<'a, ast::Path> {
- // Check for a whole path...
- let found = match self.token {
- token::Interpolated(token::NtPath(_)) => Some(self.bump_and_get()),
- _ => None,
- };
- if let Some(token::Interpolated(token::NtPath(path))) = found {
- return Ok(*path);
- }
+ maybe_whole!(self, NtPath, |x| x);
let lo = self.span.lo;
let is_global = self.eat(&token::ModSep);
@@ -2746,8 +2705,6 @@ impl<'a> Parser<'a> {
// and token::SubstNt's; it's too early to know yet
// whether something will be a nonterminal or a seq
// yet.
- maybe_whole!(deref self, NtTT);
-
match self.token {
token::Eof => {
let mut err: DiagnosticBuilder<'a> =
@@ -2760,6 +2717,17 @@ impl<'a> Parser<'a> {
Err(err)
},
token::OpenDelim(delim) => {
+ if self.tts.last().map(|&(_, i)| i == 1).unwrap_or(false) {
+ let tt = self.tts.pop().unwrap().0;
+ self.bump();
+ return Ok(if self.allow_interpolated_tts {
+ // avoid needlessly reparsing token trees in recursive macro expansions
+ TokenTree::Token(tt.span(), token::Interpolated(Rc::new(token::NtTT(tt))))
+ } else {
+ tt
+ });
+ }
+
let parsing_token_tree = ::std::mem::replace(&mut self.parsing_token_tree, true);
// The span for beginning of the delimited section
let pre_span = self.span;
@@ -2833,29 +2801,20 @@ impl<'a> Parser<'a> {
close_span: close_span,
})))
},
+ token::CloseDelim(_) => {
+ // An unexpected closing delimiter (i.e., there is no
+ // matching opening delimiter).
+ let token_str = self.this_token_to_string();
+ let err = self.diagnostic().struct_span_err(self.span,
+ &format!("unexpected close delimiter: `{}`", token_str));
+ Err(err)
+ },
+ /* we ought to allow different depths of unquotation */
+ token::Dollar | token::SubstNt(..) if self.quote_depth > 0 => {
+ self.parse_unquoted()
+ }
_ => {
- // invariants: the current token is not a left-delimiter,
- // not an EOF, and not the desired right-delimiter (if
- // it were, parse_seq_to_before_end would have prevented
- // reaching this point).
- maybe_whole!(deref self, NtTT);
- match self.token {
- token::CloseDelim(_) => {
- // An unexpected closing delimiter (i.e., there is no
- // matching opening delimiter).
- let token_str = self.this_token_to_string();
- let err = self.diagnostic().struct_span_err(self.span,
- &format!("unexpected close delimiter: `{}`", token_str));
- Err(err)
- },
- /* we ought to allow different depths of unquotation */
- token::Dollar | token::SubstNt(..) if self.quote_depth > 0 => {
- self.parse_unquoted()
- }
- _ => {
- Ok(TokenTree::Token(self.span, self.bump_and_get()))
- }
- }
+ Ok(TokenTree::Token(self.span, self.bump_and_get()))
}
}
}
@@ -3336,7 +3295,7 @@ impl<'a> Parser<'a> {
}
pub fn parse_arm(&mut self) -> PResult<'a, Arm> {
- maybe_whole!(no_clone self, NtArm);
+ maybe_whole!(self, NtArm, |x| x);
let attrs = self.parse_outer_attributes()?;
let pats = self.parse_pats()?;
@@ -3592,7 +3551,7 @@ impl<'a> Parser<'a> {
/// Parse a pattern.
pub fn parse_pat(&mut self) -> PResult<'a, P> {
- maybe_whole!(self, NtPat);
+ maybe_whole!(self, NtPat, |x| x);
let lo = self.span.lo;
let pat;
@@ -3897,7 +3856,7 @@ impl<'a> Parser<'a> {
fn parse_stmt_without_recovery(&mut self,
macro_legacy_warnings: bool)
-> PResult<'a, Option> {
- maybe_whole!(Some deref self, NtStmt);
+ maybe_whole!(self, NtStmt, |x| Some(x));
let attrs = self.parse_outer_attributes()?;
let lo = self.span.lo;
@@ -4086,7 +4045,7 @@ impl<'a> Parser<'a> {
/// Parse a block. No inner attrs are allowed.
pub fn parse_block(&mut self) -> PResult<'a, P> {
- maybe_whole!(no_clone self, NtBlock);
+ maybe_whole!(self, NtBlock, |x| x);
let lo = self.span.lo;
@@ -4124,7 +4083,7 @@ impl<'a> Parser<'a> {
/// Parse a block. Inner attrs are allowed.
fn parse_inner_attrs_and_block(&mut self) -> PResult<'a, (Vec, P)> {
- maybe_whole!(pair_empty self, NtBlock);
+ maybe_whole!(self, NtBlock, |x| (Vec::new(), x));
let lo = self.span.lo;
self.expect(&token::OpenDelim(token::Brace))?;
@@ -4299,7 +4258,7 @@ impl<'a> Parser<'a> {
/// | ( < lifetimes , typaramseq ( , )? > )
/// where typaramseq = ( typaram ) | ( typaram , typaramseq )
pub fn parse_generics(&mut self) -> PResult<'a, ast::Generics> {
- maybe_whole!(self, NtGenerics);
+ maybe_whole!(self, NtGenerics, |x| x);
let span_lo = self.span.lo;
if self.eat(&token::Lt) {
@@ -4440,7 +4399,7 @@ impl<'a> Parser<'a> {
/// where T : Trait + 'b, 'a : 'b
/// ```
pub fn parse_where_clause(&mut self) -> PResult<'a, ast::WhereClause> {
- maybe_whole!(self, NtWhereClause);
+ maybe_whole!(self, NtWhereClause, |x| x);
let mut where_clause = WhereClause {
id: ast::DUMMY_NODE_ID,
@@ -4848,7 +4807,7 @@ impl<'a> Parser<'a> {
/// Parse an impl item.
pub fn parse_impl_item(&mut self) -> PResult<'a, ImplItem> {
- maybe_whole!(no_clone_from_p self, NtImplItem);
+ maybe_whole!(self, NtImplItem, |x| x);
let mut attrs = self.parse_outer_attributes()?;
let lo = self.span.lo;
@@ -5716,19 +5675,13 @@ impl<'a> Parser<'a> {
/// extern crate.
fn parse_item_(&mut self, attrs: Vec,
macros_allowed: bool, attributes_allowed: bool) -> PResult<'a, Option>> {
- let nt_item = match self.token {
- token::Interpolated(token::NtItem(ref item)) => {
- Some((**item).clone())
- }
- _ => None
- };
- if let Some(mut item) = nt_item {
- self.bump();
+ maybe_whole!(self, NtItem, |item| {
+ let mut item = item.unwrap();
let mut attrs = attrs;
mem::swap(&mut item.attrs, &mut attrs);
item.attrs.extend(attrs);
- return Ok(Some(P(item)));
- }
+ Some(P(item))
+ });
let lo = self.span.lo;
diff --git a/src/libsyntax/parse/token.rs b/src/libsyntax/parse/token.rs
index 4d0da660302a..0198ee073d23 100644
--- a/src/libsyntax/parse/token.rs
+++ b/src/libsyntax/parse/token.rs
@@ -123,7 +123,7 @@ pub enum Token {
Lifetime(ast::Ident),
/* For interpolation */
- Interpolated(Nonterminal),
+ Interpolated(Rc),
// Can be expanded into several tokens.
/// Doc comment
DocComment(ast::Name),
@@ -172,12 +172,15 @@ impl Token {
DotDot | DotDotDot => true, // range notation
Lt | BinOp(Shl) => true, // associated path
ModSep => true,
- Interpolated(NtExpr(..)) => true,
- Interpolated(NtIdent(..)) => true,
- Interpolated(NtBlock(..)) => true,
- Interpolated(NtPath(..)) => true,
Pound => true, // for expression attributes
- _ => false,
+ Interpolated(ref nt) => match **nt {
+ NtExpr(..) => true,
+ NtIdent(..) => true,
+ NtBlock(..) => true,
+ NtPath(..) => true,
+ _ => false,
+ },
+ _ => false,
}
}
@@ -215,10 +218,12 @@ impl Token {
/// Returns `true` if the token is an interpolated path.
pub fn is_path(&self) -> bool {
- match *self {
- Interpolated(NtPath(..)) => true,
- _ => false,
+ if let Interpolated(ref nt) = *self {
+ if let NtPath(..) = **nt {
+ return true;
+ }
}
+ false
}
/// Returns `true` if the token is a lifetime.
@@ -290,19 +295,19 @@ impl Token {
pub enum Nonterminal {
NtItem(P),
NtBlock(P),
- NtStmt(P),
+ NtStmt(ast::Stmt),
NtPat(P),
NtExpr(P),
NtTy(P),
- NtIdent(Box),
+ NtIdent(ast::SpannedIdent),
/// Stuff inside brackets for attributes
NtMeta(P),
- NtPath(Box),
- NtTT(P), // needs P'ed to break a circularity
+ NtPath(ast::Path),
+ NtTT(tokenstream::TokenTree),
// These are not exposed to macros, but are used by quasiquote.
NtArm(ast::Arm),
- NtImplItem(P),
- NtTraitItem(P),
+ NtImplItem(ast::ImplItem),
+ NtTraitItem(ast::TraitItem),
NtGenerics(ast::Generics),
NtWhereClause(ast::WhereClause),
NtArg(ast::Arg),
diff --git a/src/libsyntax/print/pprust.rs b/src/libsyntax/print/pprust.rs
index b0bd64467430..7352792a8a25 100644
--- a/src/libsyntax/print/pprust.rs
+++ b/src/libsyntax/print/pprust.rs
@@ -285,7 +285,7 @@ pub fn token_to_string(tok: &Token) -> String {
token::Comment => "/* */".to_string(),
token::Shebang(s) => format!("/* shebang: {}*/", s),
- token::Interpolated(ref nt) => match *nt {
+ token::Interpolated(ref nt) => match **nt {
token::NtExpr(ref e) => expr_to_string(&e),
token::NtMeta(ref e) => meta_item_to_string(&e),
token::NtTy(ref e) => ty_to_string(&e),
diff --git a/src/libsyntax/tokenstream.rs b/src/libsyntax/tokenstream.rs
index 9e644e59e86a..9ef6c07e489d 100644
--- a/src/libsyntax/tokenstream.rs
+++ b/src/libsyntax/tokenstream.rs
@@ -134,8 +134,10 @@ impl TokenTree {
AttrStyle::Inner => 3,
}
}
+ TokenTree::Token(_, token::Interpolated(ref nt)) => {
+ if let Nonterminal::NtTT(..) = **nt { 1 } else { 0 }
+ },
TokenTree::Token(_, token::MatchNt(..)) => 3,
- TokenTree::Token(_, token::Interpolated(Nonterminal::NtTT(..))) => 1,
TokenTree::Delimited(_, ref delimed) => delimed.tts.len() + 2,
TokenTree::Sequence(_, ref seq) => seq.tts.len(),
TokenTree::Token(..) => 0,
@@ -193,9 +195,6 @@ impl TokenTree {
TokenTree::Token(sp, token::Ident(kind))];
v[index].clone()
}
- (&TokenTree::Token(_, token::Interpolated(Nonterminal::NtTT(ref tt))), _) => {
- tt.clone().unwrap()
- }
(&TokenTree::Sequence(_, ref seq), _) => seq.tts[index].clone(),
_ => panic!("Cannot expand a token tree"),
}
@@ -215,11 +214,9 @@ impl TokenTree {
mtch: &[TokenTree],
tts: &[TokenTree])
-> macro_parser::NamedParseResult {
+ let diag = &cx.parse_sess().span_diagnostic;
// `None` is because we're not interpolating
- let arg_rdr = lexer::new_tt_reader_with_doc_flag(&cx.parse_sess().span_diagnostic,
- None,
- tts.iter().cloned().collect(),
- true);
+ let arg_rdr = lexer::new_tt_reader(diag, None, tts.iter().cloned().collect());
macro_parser::parse(cx.parse_sess(), arg_rdr, mtch)
}
diff --git a/src/libsyntax_ext/asm.rs b/src/libsyntax_ext/asm.rs
index 24c515e50280..e4d0cb740460 100644
--- a/src/libsyntax_ext/asm.rs
+++ b/src/libsyntax_ext/asm.rs
@@ -250,7 +250,7 @@ pub fn expand_asm<'cx>(cx: &'cx mut ExtCtxt,
MacEager::expr(P(ast::Expr {
id: ast::DUMMY_NODE_ID,
- node: ast::ExprKind::InlineAsm(ast::InlineAsm {
+ node: ast::ExprKind::InlineAsm(P(ast::InlineAsm {
asm: token::intern_and_get_ident(&asm),
asm_str_style: asm_str_style.unwrap(),
outputs: outputs,
@@ -260,7 +260,7 @@ pub fn expand_asm<'cx>(cx: &'cx mut ExtCtxt,
alignstack: alignstack,
dialect: dialect,
expn_id: expn_id,
- }),
+ })),
span: sp,
attrs: ast::ThinVec::new(),
}))
diff --git a/src/test/run-pass-fulldeps/auxiliary/procedural_mbe_matching.rs b/src/test/run-pass-fulldeps/auxiliary/procedural_mbe_matching.rs
index 5229d42f1fdd..6ac0d5ad1a3b 100644
--- a/src/test/run-pass-fulldeps/auxiliary/procedural_mbe_matching.rs
+++ b/src/test/run-pass-fulldeps/auxiliary/procedural_mbe_matching.rs
@@ -18,10 +18,10 @@ extern crate syntax_pos;
extern crate rustc;
extern crate rustc_plugin;
-use syntax::parse::token::{self, str_to_ident, NtExpr, NtPat};
+use syntax::parse::token::{str_to_ident, NtExpr, NtPat};
use syntax::ast::{Pat};
use syntax::tokenstream::{TokenTree};
-use syntax::ext::base::{ExtCtxt, MacResult, DummyResult, MacEager};
+use syntax::ext::base::{ExtCtxt, MacResult, MacEager};
use syntax::ext::build::AstBuilder;
use syntax::ext::tt::macro_parser::{MatchedSeq, MatchedNonterminal};
use syntax::ext::tt::macro_parser::{Success, Failure, Error};
@@ -30,35 +30,12 @@ use syntax::ptr::P;
use syntax_pos::Span;
use rustc_plugin::Registry;
-fn expand_mbe_matches(cx: &mut ExtCtxt, sp: Span, args: &[TokenTree])
+fn expand_mbe_matches(cx: &mut ExtCtxt, _: Span, args: &[TokenTree])
-> Box {
let mbe_matcher = quote_matcher!(cx, $matched:expr, $($pat:pat)|+);
-
- let mac_expr = match TokenTree::parse(cx, &mbe_matcher[..], args) {
- Success(map) => {
- match (&*map[&str_to_ident("matched")], &*map[&str_to_ident("pat")]) {
- (&MatchedNonterminal(NtExpr(ref matched_expr)),
- &MatchedSeq(ref pats, seq_sp)) => {
- let pats: Vec> = pats.iter().map(|pat_nt|
- if let &MatchedNonterminal(NtPat(ref pat)) = &**pat_nt {
- pat.clone()
- } else {
- unreachable!()
- }
- ).collect();
- let arm = cx.arm(seq_sp, pats, cx.expr_bool(seq_sp, true));
-
- quote_expr!(cx,
- match $matched_expr {
- $arm
- _ => false
- }
- )
- }
- _ => unreachable!()
- }
- }
+ let map = match TokenTree::parse(cx, &mbe_matcher, args) {
+ Success(map) => map,
Failure(_, tok) => {
panic!("expected Success, but got Failure: {}", parse_failure_msg(tok));
}
@@ -67,6 +44,34 @@ fn expand_mbe_matches(cx: &mut ExtCtxt, sp: Span, args: &[TokenTree])
}
};
+ let matched_nt = match *map[&str_to_ident("matched")] {
+ MatchedNonterminal(ref nt) => nt.clone(),
+ _ => unreachable!(),
+ };
+
+ let mac_expr = match (&*matched_nt, &*map[&str_to_ident("pat")]) {
+ (&NtExpr(ref matched_expr), &MatchedSeq(ref pats, seq_sp)) => {
+ let pats: Vec
> = pats.iter().map(|pat_nt| {
+ match **pat_nt {
+ MatchedNonterminal(ref nt) => match **nt {
+ NtPat(ref pat) => pat.clone(),
+ _ => unreachable!(),
+ },
+ _ => unreachable!(),
+ }
+ }).collect();
+ let arm = cx.arm(seq_sp, pats, cx.expr_bool(seq_sp, true));
+
+ quote_expr!(cx,
+ match $matched_expr {
+ $arm
+ _ => false
+ }
+ )
+ }
+ _ => unreachable!()
+ };
+
MacEager::expr(mac_expr)
}