Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[WIP] Macro rules backtracking #33840

Closed
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions src/libsyntax/ast.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1276,8 +1276,8 @@ impl TokenTree {
}

/// Use this token tree as a matcher to parse given tts.
pub fn parse(cx: &base::ExtCtxt, mtch: &[TokenTree], tts: &[TokenTree])
-> macro_parser::NamedParseResult {
pub fn parse<'a>(cx: &'a base::ExtCtxt, mtch: &[TokenTree], tts: &[TokenTree])
-> macro_parser::NamedParseResult<'a> {
// `None` is because we're not interpolating
let arg_rdr = lexer::new_tt_reader_with_doc_flag(&cx.parse_sess().span_diagnostic,
None,
Expand Down
4 changes: 4 additions & 0 deletions src/libsyntax/codemap.rs
Original file line number Diff line number Diff line change
Expand Up @@ -330,6 +330,10 @@ impl MultiSpan {
&self.primary_spans
}

pub fn primary_spans_mut(&mut self) -> &mut [Span] {
&mut self.primary_spans
}

/// Returns the strings to highlight. We always ensure that there
/// is an entry for each of the primary spans -- for each primary
/// span P, if there is at least one label with span P, we return
Expand Down
23 changes: 9 additions & 14 deletions src/libsyntax/errors/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -184,7 +184,7 @@ pub struct DiagnosticBuilder<'a> {
level: Level,
message: String,
code: Option<String>,
span: MultiSpan,
pub span: MultiSpan,
children: Vec<SubDiagnostic>,
}

Expand Down Expand Up @@ -302,11 +302,6 @@ impl<'a> DiagnosticBuilder<'a> {
self
}

pub fn set_span<S: Into<MultiSpan>>(&mut self, sp: S) -> &mut Self {
self.span = sp.into();
self
}

pub fn code(&mut self, s: String) -> &mut Self {
self.code = Some(s);
self
Expand Down Expand Up @@ -421,7 +416,7 @@ impl Handler {
msg: &str)
-> DiagnosticBuilder<'a> {
let mut result = DiagnosticBuilder::new(self, Level::Warning, msg);
result.set_span(sp);
result.span = sp.into();
if !self.can_emit_warnings {
result.cancel();
}
Expand All @@ -433,7 +428,7 @@ impl Handler {
code: &str)
-> DiagnosticBuilder<'a> {
let mut result = DiagnosticBuilder::new(self, Level::Warning, msg);
result.set_span(sp);
result.span = sp.into();
result.code(code.to_owned());
if !self.can_emit_warnings {
result.cancel();
Expand All @@ -453,7 +448,7 @@ impl Handler {
-> DiagnosticBuilder<'a> {
self.bump_err_count();
let mut result = DiagnosticBuilder::new(self, Level::Error, msg);
result.set_span(sp);
result.span = sp.into();
result
}
pub fn struct_span_err_with_code<'a, S: Into<MultiSpan>>(&'a self,
Expand All @@ -463,7 +458,7 @@ impl Handler {
-> DiagnosticBuilder<'a> {
self.bump_err_count();
let mut result = DiagnosticBuilder::new(self, Level::Error, msg);
result.set_span(sp);
result.span = sp.into();
result.code(code.to_owned());
result
}
Expand All @@ -477,7 +472,7 @@ impl Handler {
-> DiagnosticBuilder<'a> {
self.bump_err_count();
let mut result = DiagnosticBuilder::new(self, Level::Fatal, msg);
result.set_span(sp);
result.span = sp.into();
result
}
pub fn struct_span_fatal_with_code<'a, S: Into<MultiSpan>>(&'a self,
Expand All @@ -487,7 +482,7 @@ impl Handler {
-> DiagnosticBuilder<'a> {
self.bump_err_count();
let mut result = DiagnosticBuilder::new(self, Level::Fatal, msg);
result.set_span(sp);
result.span = sp.into();
result.code(code.to_owned());
result
}
Expand All @@ -496,10 +491,10 @@ impl Handler {
DiagnosticBuilder::new(self, Level::Fatal, msg)
}

pub fn cancel(&mut self, err: &mut DiagnosticBuilder) {
pub fn cancel(&self, err: &mut DiagnosticBuilder) {
if err.level == Level::Error || err.level == Level::Fatal {
assert!(self.has_errors());
self.err_count.set(self.err_count.get() + 1);
self.err_count.set(self.err_count.get() - 1);
}
err.cancel();
}
Expand Down
88 changes: 43 additions & 45 deletions src/libsyntax/ext/tt/macro_parser.rs
Original file line number Diff line number Diff line change
Expand Up @@ -82,9 +82,9 @@ use ast;
use ast::{TokenTree, Name, Ident};
use codemap::{BytePos, mk_sp, Span, Spanned};
use codemap;
use errors::FatalError;
use errors::DiagnosticBuilder;
use parse::lexer::*; //resolve bug?
use parse::ParseSess;
use parse::{ParseSess, PResult};
use parse::parser::{PathStyle, Parser};
use parse::token::{DocComment, MatchNt, SubstNt};
use parse::token::{Token, Nonterminal};
Expand Down Expand Up @@ -200,8 +200,8 @@ pub enum NamedMatch {
MatchedNonterminal(Nonterminal)
}

pub fn nameize(p_s: &ParseSess, ms: &[TokenTree], res: &[Rc<NamedMatch>])
-> ParseResult<HashMap<Name, Rc<NamedMatch>>> {
pub fn nameize<'a>(p_s: &'a ParseSess, ms: &[TokenTree], res: &[Rc<NamedMatch>])
-> ParseResult<'a, HashMap<Name, Rc<NamedMatch>>> {
fn n_rec(p_s: &ParseSess, m: &TokenTree, res: &[Rc<NamedMatch>],
ret_val: &mut HashMap<Name, Rc<NamedMatch>>, idx: &mut usize)
-> Result<(), (codemap::Span, String)> {
Expand Down Expand Up @@ -248,16 +248,16 @@ pub fn nameize(p_s: &ParseSess, ms: &[TokenTree], res: &[Rc<NamedMatch>])
Success(ret_val)
}

pub enum ParseResult<T> {
pub enum ParseResult<'a, T> {
Success(T),
/// Arm failed to match
Failure(codemap::Span, String),
Failure(DiagnosticBuilder<'a>),
/// Fatal error (malformed macro?). Abort compilation.
Error(codemap::Span, String)
}

pub type NamedParseResult = ParseResult<HashMap<Name, Rc<NamedMatch>>>;
pub type PositionalParseResult = ParseResult<Vec<Rc<NamedMatch>>>;
pub type NamedParseResult<'a> = ParseResult<'a, HashMap<Name, Rc<NamedMatch>>>;
pub type PositionalParseResult<'a> = ParseResult<'a, Vec<Rc<NamedMatch>>>;

/// Perform a token equality check, ignoring syntax context (that is, an
/// unhygienic comparison)
Expand All @@ -270,11 +270,11 @@ pub fn token_name_eq(t1 : &Token, t2 : &Token) -> bool {
}
}

pub fn parse(sess: &ParseSess,
cfg: ast::CrateConfig,
mut rdr: TtReader,
ms: &[TokenTree])
-> NamedParseResult {
pub fn parse<'a>(sess: &'a ParseSess,
cfg: ast::CrateConfig,
mut rdr: TtReader<'a>,
ms: &[TokenTree])
-> NamedParseResult<'a> {
let mut cur_eis = Vec::new();
cur_eis.push(initial_matcher_pos(Rc::new(ms.iter()
.cloned()
Expand Down Expand Up @@ -445,7 +445,9 @@ pub fn parse(sess: &ParseSess,
} else if eof_eis.len() > 1 {
return Error(sp, "ambiguity: multiple successful parses".to_string());
} else {
return Failure(sp, "unexpected end of macro invocation".to_string());
return Failure(sess.span_diagnostic.struct_span_err(
sp, "unexpected end of macro invocation"
));
}
} else {
if (!bb_eis.is_empty() && !next_eis.is_empty())
Expand All @@ -466,8 +468,10 @@ pub fn parse(sess: &ParseSess,
}
))
} else if bb_eis.is_empty() && next_eis.is_empty() {
return Failure(sp, format!("no rules expected the token `{}`",
pprust::token_to_string(&tok)));
return Failure(sess.span_diagnostic.struct_span_err(
sp, &format!("no rules expected the token `{}`",
pprust::token_to_string(&tok))
));
} else if !next_eis.is_empty() {
/* Now process the next token */
while !next_eis.is_empty() {
Expand All @@ -481,8 +485,12 @@ pub fn parse(sess: &ParseSess,
match ei.top_elts.get_tt(ei.idx) {
TokenTree::Token(span, MatchNt(_, ident)) => {
let match_cur = ei.match_cur;
(&mut ei.matches[match_cur]).push(Rc::new(MatchedNonterminal(
parse_nt(&mut rust_parser, span, &ident.name.as_str()))));
let nt = match parse_nt(&mut rust_parser, span,
&ident.name.as_str()) {
Ok(nt) => Rc::new(MatchedNonterminal(nt)),
Err(diag) => return Failure(diag)
Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Note to myself: we must not fail here, but rather just remove the current parsing item from the stack.

};
(&mut ei.matches[match_cur]).push(nt);
ei.idx += 1;
ei.match_cur += 1;
}
Expand All @@ -500,55 +508,45 @@ pub fn parse(sess: &ParseSess,
}
}

pub fn parse_nt<'a>(p: &mut Parser<'a>, sp: Span, name: &str) -> Nonterminal {
pub fn parse_nt<'a>(p: &mut Parser<'a>, sp: Span, name: &str) -> PResult<'a, Nonterminal> {
match name {
"tt" => {
p.quote_depth += 1; //but in theory, non-quoted tts might be useful
let res: ::parse::PResult<'a, _> = p.parse_token_tree();
let res = token::NtTT(P(panictry!(res)));
let res = token::NtTT(P(res?));
p.quote_depth -= 1;
return res;
return Ok(res);
}
_ => {}
}
// check at the beginning and the parser checks after each bump
p.check_unknown_macro_variable();
match name {
"item" => match panictry!(p.parse_item()) {
Some(i) => token::NtItem(i),
None => {
p.fatal("expected an item keyword").emit();
panic!(FatalError);
}
"item" => match p.parse_item()? {
Some(i) => Ok(token::NtItem(i)),
None => Err(p.fatal("expected an item keyword"))
},
"block" => token::NtBlock(panictry!(p.parse_block())),
"stmt" => match panictry!(p.parse_stmt()) {
Some(s) => token::NtStmt(P(s)),
None => {
p.fatal("expected a statement").emit();
panic!(FatalError);
}
"block" => Ok(token::NtBlock(p.parse_block()?)),
"stmt" => match p.parse_stmt()? {
Some(s) => Ok(token::NtStmt(P(s))),
None => Err(p.fatal("expected a statement"))
},
"pat" => token::NtPat(panictry!(p.parse_pat())),
"expr" => token::NtExpr(panictry!(p.parse_expr())),
"ty" => token::NtTy(panictry!(p.parse_ty())),
"pat" => Ok(token::NtPat(p.parse_pat()?)),
"expr" => Ok(token::NtExpr(p.parse_expr()?)),
"ty" => Ok(token::NtTy(p.parse_ty()?)),
// this could be handled like a token, since it is one
"ident" => match p.token {
token::Ident(sn) => {
p.bump();
token::NtIdent(Box::new(Spanned::<Ident>{node: sn, span: p.span}))
Ok(token::NtIdent(Box::new(Spanned::<Ident>{node: sn, span: p.span})))
}
_ => {
let token_str = pprust::token_to_string(&p.token);
p.fatal(&format!("expected ident, found {}",
&token_str[..])).emit();
panic!(FatalError)
Err(p.fatal(&format!("expected ident, found {}", &token_str[..])))
}
},
"path" => {
token::NtPath(Box::new(panictry!(p.parse_path(PathStyle::Type))))
},
"meta" => token::NtMeta(panictry!(p.parse_meta_item())),
"path" => Ok(token::NtPath(Box::new(p.parse_path(PathStyle::Type)?))),
"meta" => Ok(token::NtMeta(p.parse_meta_item()?)),
// this is not supposed to happen, since it has been checked
// when compiling the macro.
_ => p.span_bug(sp, "invalid fragment specifier")
Expand Down
43 changes: 35 additions & 8 deletions src/libsyntax/ext/tt/macro_rules.rs
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@

use ast::{self, TokenTree};
use codemap::{Span, DUMMY_SP};
use errors::FatalError;
use ext::base::{DummyResult, ExtCtxt, MacResult, SyntaxExtension};
use ext::base::{NormalTT, TTMacroExpander};
use ext::tt::macro_parser::{Success, Error, Failure};
Expand Down Expand Up @@ -158,7 +159,7 @@ impl TTMacroExpander for MacroRulesMacroExpander {
}

/// Given `lhses` and `rhses`, this is the new macro we create
fn generic_extension<'cx>(cx: &'cx ExtCtxt,
fn generic_extension<'cx>(cx: &'cx mut ExtCtxt,
sp: Span,
name: ast::Ident,
imported_from: Option<ast::Ident>,
Expand All @@ -174,7 +175,7 @@ fn generic_extension<'cx>(cx: &'cx ExtCtxt,

// Which arm's failure should we report? (the one furthest along)
let mut best_fail_spot = DUMMY_SP;
let mut best_fail_msg = "internal error: ran no matchers".to_string();
let mut best_fail_diag = None;

for (i, lhs) in lhses.iter().enumerate() { // try each arm's matchers
let lhs_tt = match *lhs {
Expand All @@ -184,6 +185,8 @@ fn generic_extension<'cx>(cx: &'cx ExtCtxt,

match TokenTree::parse(cx, lhs_tt, arg) {
Success(named_matches) => {
best_fail_diag.map(|mut d| cx.parse_sess.span_diagnostic.cancel(&mut d));

let rhs = match rhses[i] {
// ignore delimiters
TokenTree::Delimited(_, ref delimed) => delimed.tts.clone(),
Expand Down Expand Up @@ -214,17 +217,34 @@ fn generic_extension<'cx>(cx: &'cx ExtCtxt,
macro_ident: name
})
}
Failure(sp, ref msg) => if sp.lo >= best_fail_spot.lo {
best_fail_spot = sp;
best_fail_msg = (*msg).clone();
},
Failure(diag) => {
let sp = diag.span.primary_span();
let mut new_diag = Some(diag);
if let Some(sp) = sp {
if sp.lo >= best_fail_spot.lo {
best_fail_spot = sp;
::std::mem::swap(&mut best_fail_diag, &mut new_diag);
}
}
// remove the previous diag if we swapped, of the new one if we didn't.
new_diag.map(|mut diag| cx.parse_sess.span_diagnostic.cancel(&mut diag));
}
Error(err_sp, ref msg) => {
cx.span_fatal(err_sp.substitute_dummy(sp), &msg[..])
}
}
}

cx.span_fatal(best_fail_spot.substitute_dummy(sp), &best_fail_msg[..]);
match best_fail_diag {
None => cx.span_bug(sp, "internal error: ran no matchers"),
Some(mut diag) => {
for span in diag.span.primary_spans_mut() {
*span = span.substitute_dummy(sp);
}
diag.emit();
panic!(FatalError);
}
}
}

// Note that macro-by-example's input is also matched against a token tree:
Expand Down Expand Up @@ -279,7 +299,14 @@ pub fn compile<'cx>(cx: &'cx mut ExtCtxt,
arg_reader,
&argument_gram) {
Success(m) => m,
Failure(sp, str) | Error(sp, str) => {
Failure(mut diag) => {
for span in diag.span.primary_spans_mut() {
*span = span.substitute_dummy(def.span);
}
diag.emit();
panic!(FatalError);
}
Error(sp, str) => {
panic!(cx.parse_sess().span_diagnostic
.span_fatal(sp.substitute_dummy(def.span), &str[..]));
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -56,8 +56,12 @@ fn expand_mbe_matches(cx: &mut ExtCtxt, sp: Span, args: &[TokenTree])
_ => unreachable!()
}
}
Failure(_, s) | Error(_, s) => {
panic!("expected Success, but got Error/Failure: {}", s);
Failure(diag) => {
diag.emit();
panic!("expected Success, but got Failure");
}
Error(_, s) => {
panic!("expected Success, but got Error: {}", s);
}
};

Expand Down
Loading