diff --git a/src/librustc_plugin/registry.rs b/src/librustc_plugin/registry.rs index f8bce297a42ad..7b048c0670d5a 100644 --- a/src/librustc_plugin/registry.rs +++ b/src/librustc_plugin/registry.rs @@ -15,8 +15,7 @@ use rustc::session::Session; use rustc::mir::transform::MirMapPass; -use syntax::ext::base::{SyntaxExtension, NamedSyntaxExtension, NormalTT}; -use syntax::ext::base::{IdentTT, MultiModifier, MultiDecorator}; +use syntax::ext::base::{SyntaxExtension, NamedSyntaxExtension, NormalTT, IdentTT}; use syntax::ext::base::MacroExpanderFn; use syntax::parse::token; use syntax::ast; @@ -109,10 +108,7 @@ impl<'a> Registry<'a> { IdentTT(ext, _, allow_internal_unstable) => { IdentTT(ext, Some(self.krate_span), allow_internal_unstable) } - MultiDecorator(ext) => MultiDecorator(ext), - MultiModifier(ext) => MultiModifier(ext), - SyntaxExtension::ProcMacro(ext) => SyntaxExtension::ProcMacro(ext), - SyntaxExtension::AttrProcMacro(ext) => SyntaxExtension::AttrProcMacro(ext), + _ => extension, })); } diff --git a/src/librustc_resolve/macros.rs b/src/librustc_resolve/macros.rs index 67ee4c307d3c3..67e725b6e727d 100644 --- a/src/librustc_resolve/macros.rs +++ b/src/librustc_resolve/macros.rs @@ -73,7 +73,9 @@ impl<'a> base::Resolver for Resolver<'a> { let name = intern(&attrs[i].name()); match self.expansion_data[0].module.macros.borrow().get(&name) { Some(ext) => match **ext { - MultiModifier(..) | MultiDecorator(..) => return Some(attrs.remove(i)), + MultiModifier(..) | MultiDecorator(..) | SyntaxExtension::AttrProcMacro(..) => { + return Some(attrs.remove(i)) + } _ => {} }, None => {} diff --git a/src/libsyntax/ext/base.rs b/src/libsyntax/ext/base.rs index 82db9ffca83a4..917426807110e 100644 --- a/src/libsyntax/ext/base.rs +++ b/src/libsyntax/ext/base.rs @@ -8,7 +8,7 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -pub use self::SyntaxExtension::{MultiDecorator, MultiModifier, NormalTT, IdentTT, MacroRulesTT}; +pub use self::SyntaxExtension::{MultiDecorator, MultiModifier, NormalTT, IdentTT}; use ast::{self, Attribute, Name, PatKind}; use attr::HasAttrs; @@ -18,6 +18,7 @@ use errors::DiagnosticBuilder; use ext::expand::{self, Invocation, Expansion}; use ext::hygiene::Mark; use ext::tt::macro_rules; +use fold; use parse; use parse::parser::{self, Parser}; use parse::token; @@ -61,14 +62,6 @@ impl HasAttrs for Annotatable { } impl Annotatable { - pub fn span(&self) -> Span { - match *self { - Annotatable::Item(ref item) => item.span, - Annotatable::TraitItem(ref trait_item) => trait_item.span, - Annotatable::ImplItem(ref impl_item) => impl_item.span, - } - } - pub fn expect_item(self) -> P { match self { Annotatable::Item(i) => i, @@ -160,21 +153,19 @@ pub trait ProcMacro { ecx: &'cx mut ExtCtxt, span: Span, ts: TokenStream) - -> Box; + -> TokenStream; } impl ProcMacro for F where F: Fn(TokenStream) -> TokenStream { fn expand<'cx>(&self, - ecx: &'cx mut ExtCtxt, - span: Span, + _ecx: &'cx mut ExtCtxt, + _span: Span, ts: TokenStream) - -> Box { - let result = (*self)(ts); + -> TokenStream { // FIXME setup implicit context in TLS before calling self. - let parser = ecx.new_parser_from_tts(&result.to_tts()); - Box::new(TokResult { parser: parser, span: span }) + (*self)(ts) } } @@ -184,50 +175,63 @@ pub trait AttrProcMacro { span: Span, annotation: TokenStream, annotated: TokenStream) - -> Box; + -> TokenStream; } impl AttrProcMacro for F where F: Fn(TokenStream, TokenStream) -> TokenStream { fn expand<'cx>(&self, - ecx: &'cx mut ExtCtxt, - span: Span, + _ecx: &'cx mut ExtCtxt, + _span: Span, annotation: TokenStream, annotated: TokenStream) - -> Box { + -> TokenStream { // FIXME setup implicit context in TLS before calling self. - let parser = ecx.new_parser_from_tts(&(*self)(annotation, annotated).to_tts()); - Box::new(TokResult { parser: parser, span: span }) + (*self)(annotation, annotated) } } -struct TokResult<'a> { - parser: Parser<'a>, - span: Span, +pub struct TokResult<'a> { + pub parser: Parser<'a>, + pub span: Span, +} + +impl<'a> TokResult<'a> { + // There is quite a lot of overlap here with ParserAnyMacro in ext/tt/macro_rules.rs + // We could probably share more code. + // FIXME(#36641) Unify TokResult and ParserAnyMacro. + fn ensure_complete_parse(&mut self, allow_semi: bool) { + let macro_span = &self.span; + self.parser.ensure_complete_parse(allow_semi, |parser| { + let token_str = parser.this_token_to_string(); + let msg = format!("macro expansion ignores token `{}` and any following", token_str); + let span = parser.span; + parser.diagnostic() + .struct_span_err(span, &msg) + .span_note(*macro_span, "caused by the macro expansion here") + .emit(); + }); + } } impl<'a> MacResult for TokResult<'a> { fn make_items(mut self: Box) -> Option>> { if self.parser.sess.span_diagnostic.has_errors() { - return None; + return Some(SmallVector::zero()); } let mut items = SmallVector::zero(); loop { match self.parser.parse_item() { - Ok(Some(item)) => { - // FIXME better span info. - let mut item = item.unwrap(); - item.span = self.span; - items.push(P(item)); - } + Ok(Some(item)) => items.push(item), Ok(None) => { + self.ensure_complete_parse(false); return Some(items); } Err(mut e) => { e.emit(); - return None; + return Some(SmallVector::zero()); } } } @@ -236,57 +240,61 @@ impl<'a> MacResult for TokResult<'a> { fn make_impl_items(mut self: Box) -> Option> { let mut items = SmallVector::zero(); loop { + if self.parser.token == token::Eof { + break; + } match self.parser.parse_impl_item() { - Ok(mut item) => { - // FIXME better span info. - item.span = self.span; - items.push(item); - - return Some(items); - } + Ok(item) => items.push(item), Err(mut e) => { e.emit(); - return None; + return Some(SmallVector::zero()); } } } + self.ensure_complete_parse(false); + Some(items) } fn make_trait_items(mut self: Box) -> Option> { let mut items = SmallVector::zero(); loop { + if self.parser.token == token::Eof { + break; + } match self.parser.parse_trait_item() { - Ok(mut item) => { - // FIXME better span info. - item.span = self.span; - items.push(item); - - return Some(items); - } + Ok(item) => items.push(item), Err(mut e) => { e.emit(); - return None; + return Some(SmallVector::zero()); } } } + self.ensure_complete_parse(false); + Some(items) } fn make_expr(mut self: Box) -> Option> { match self.parser.parse_expr() { - Ok(e) => Some(e), + Ok(e) => { + self.ensure_complete_parse(true); + Some(e) + } Err(mut e) => { e.emit(); - return None; + Some(DummyResult::raw_expr(self.span)) } } } fn make_pat(mut self: Box) -> Option> { match self.parser.parse_pat() { - Ok(e) => Some(e), + Ok(e) => { + self.ensure_complete_parse(false); + Some(e) + } Err(mut e) => { e.emit(); - return None; + Some(P(DummyResult::raw_pat(self.span))) } } } @@ -295,28 +303,30 @@ impl<'a> MacResult for TokResult<'a> { let mut stmts = SmallVector::zero(); loop { if self.parser.token == token::Eof { - return Some(stmts); + break; } - match self.parser.parse_full_stmt(true) { - Ok(Some(mut stmt)) => { - stmt.span = self.span; - stmts.push(stmt); - } + match self.parser.parse_full_stmt(false) { + Ok(Some(stmt)) => stmts.push(stmt), Ok(None) => { /* continue */ } Err(mut e) => { e.emit(); - return None; + return Some(SmallVector::zero()); } } } + self.ensure_complete_parse(false); + Some(stmts) } fn make_ty(mut self: Box) -> Option> { match self.parser.parse_ty() { - Ok(e) => Some(e), + Ok(e) => { + self.ensure_complete_parse(false); + Some(e) + } Err(mut e) => { e.emit(); - return None; + Some(DummyResult::raw_ty(self.span)) } } } @@ -1004,3 +1014,17 @@ pub fn get_exprs_from_tts(cx: &mut ExtCtxt, } Some(es) } + +pub struct ChangeSpan { + pub span: Span +} + +impl Folder for ChangeSpan { + fn new_span(&mut self, _sp: Span) -> Span { + self.span + } + + fn fold_mac(&mut self, mac: ast::Mac) -> ast::Mac { + fold::noop_fold_mac(mac, self) + } +} diff --git a/src/libsyntax/ext/expand.rs b/src/libsyntax/ext/expand.rs index f022dd3a08b5e..eaa7684d8fb3b 100644 --- a/src/libsyntax/ext/expand.rs +++ b/src/libsyntax/ext/expand.rs @@ -21,8 +21,10 @@ use ext::base::*; use feature_gate::{self, Features}; use fold; use fold::*; +use parse::{ParseSess, lexer}; +use parse::parser::Parser; use parse::token::{intern, keywords}; -use parse::span_to_tts; +use print::pprust; use ptr::P; use tokenstream::{TokenTree, TokenStream}; use util::small_vector::SmallVector; @@ -310,29 +312,18 @@ impl<'a, 'b> MacroExpander<'a, 'b> { kind.expect_from_annotatables(items) } SyntaxExtension::AttrProcMacro(ref mac) => { - let attr_toks = TokenStream::from_tts(span_to_tts(&fld.cx.parse_sess, - attr.span)); - let item_toks = TokenStream::from_tts(span_to_tts(&fld.cx.parse_sess, - item.span())); - let result = mac.expand(self.cx, attr.span, attr_toks, item_toks); - let items = match item { - Annotatable::Item(_) => result.make_items() - .unwrap_or(SmallVector::zero()) - .into_iter() - .map(|i| Annotatable::Item(i)) - .collect(), - Annotatable::TraitItem(_) => result.make_trait_items() - .unwrap_or(SmallVector::zero()) - .into_iter() - .map(|i| Annotatable::TraitItem(P(i))) - .collect(), - Annotatable::ImplItem(_) => result.make_impl_items() - .unwrap_or(SmallVector::zero()) - .into_iter() - .map(|i| Annotatable::ImplItem(P(i))) - .collect(), - }; - kind.expect_from_annotatables(items) + let attr_toks = TokenStream::from_tts(tts_for_attr(&attr, &self.cx.parse_sess)); + let item_toks = TokenStream::from_tts(tts_for_item(&item, &self.cx.parse_sess)); + + let tok_result = mac.expand(self.cx, attr.span, attr_toks, item_toks); + let parser = self.cx.new_parser_from_tts(&tok_result.to_tts()); + let result = Box::new(TokResult { parser: parser, span: attr.span }); + + kind.make_from(result).unwrap_or_else(|| { + let msg = format!("macro could not be expanded into {} position", kind.name()); + self.cx.span_err(attr.span, &msg); + kind.dummy(attr.span) + }) } _ => unreachable!(), } @@ -413,12 +404,12 @@ impl<'a, 'b> MacroExpander<'a, 'b> { if ident.name != keywords::Invalid.name() { let msg = format!("macro {}! expects no ident argument, given '{}'", extname, ident); - fld.cx.span_err(path.span, &msg); - return None; + self.cx.span_err(path.span, &msg); + return kind.dummy(span); } - fld.cx.bt_push(ExpnInfo { - call_site: call_site, + self.cx.bt_push(ExpnInfo { + call_site: span, callee: NameAndSpan { format: MacroBang(extname), // FIXME procedural macros do not have proper span info @@ -429,7 +420,14 @@ impl<'a, 'b> MacroExpander<'a, 'b> { }, }); - Some(expandfun.expand(fld.cx, call_site, TokenStream::from_tts(marked_tts))) + + let tok_result = expandfun.expand(self.cx, + span, + TokenStream::from_tts(marked_tts)); + let parser = self.cx.new_parser_from_tts(&tok_result.to_tts()); + let result = Box::new(TokResult { parser: parser, span: span }); + // FIXME better span info. + kind.make_from(result).map(|i| i.fold_with(&mut ChangeSpan { span: span })) } }; @@ -502,6 +500,36 @@ impl<'a, 'b> InvocationCollector<'a, 'b> { } } +// These are pretty nasty. Ideally, we would keep the tokens around, linked from +// the AST. However, we don't so we need to create new ones. Since the item might +// have come from a macro expansion (possibly only in part), we can't use the +// existing codemap. +// +// Therefore, we must use the pretty printer (yuck) to turn the AST node into a +// string, which we then re-tokenise (double yuck), but first we have to patch +// the pretty-printed string on to the end of the existing codemap (infinity-yuck). +fn tts_for_item(item: &Annotatable, parse_sess: &ParseSess) -> Vec { + let text = match *item { + Annotatable::Item(ref i) => pprust::item_to_string(i), + Annotatable::TraitItem(ref ti) => pprust::trait_item_to_string(ti), + Annotatable::ImplItem(ref ii) => pprust::impl_item_to_string(ii), + }; + string_to_tts(text, parse_sess) +} + +fn tts_for_attr(attr: &ast::Attribute, parse_sess: &ParseSess) -> Vec { + string_to_tts(pprust::attr_to_string(attr), parse_sess) +} + +fn string_to_tts(text: String, parse_sess: &ParseSess) -> Vec { + let filemap = parse_sess.codemap() + .new_filemap(String::from(""), None, text); + + let lexer = lexer::StringReader::new(&parse_sess.span_diagnostic, filemap); + let mut parser = Parser::new(parse_sess, Vec::new(), Box::new(lexer)); + panictry!(parser.parse_all_token_trees()) +} + impl<'a, 'b> Folder for InvocationCollector<'a, 'b> { fn fold_expr(&mut self, expr: P) -> P { let mut expr = self.cfg.configure_expr(expr).unwrap(); diff --git a/src/libsyntax/ext/tt/macro_rules.rs b/src/libsyntax/ext/tt/macro_rules.rs index da82c9ffab1cb..3746a51d359bd 100644 --- a/src/libsyntax/ext/tt/macro_rules.rs +++ b/src/libsyntax/ext/tt/macro_rules.rs @@ -49,22 +49,19 @@ impl<'a> ParserAnyMacro<'a> { /// allowed to be there. fn ensure_complete_parse(&self, allow_semi: bool, context: &str) { let mut parser = self.parser.borrow_mut(); - if allow_semi && parser.token == token::Semi { - parser.bump(); - } - if parser.token != token::Eof { + parser.ensure_complete_parse(allow_semi, |parser| { let token_str = parser.this_token_to_string(); let msg = format!("macro expansion ignores token `{}` and any \ following", token_str); let span = parser.span; - let mut err = parser.diagnostic().struct_span_err(span, &msg[..]); + let mut err = parser.diagnostic().struct_span_err(span, &msg); let msg = format!("caused by the macro expansion here; the usage \ of `{}!` is likely invalid in {} context", self.macro_ident, context); - err.span_note(self.site_span, &msg[..]) + err.span_note(self.site_span, &msg) .emit(); - } + }); } } diff --git a/src/libsyntax/parse/lexer/mod.rs b/src/libsyntax/parse/lexer/mod.rs index 53294e78710bc..6c0e2425d37ad 100644 --- a/src/libsyntax/parse/lexer/mod.rs +++ b/src/libsyntax/parse/lexer/mod.rs @@ -110,7 +110,6 @@ impl<'a> Reader for StringReader<'a> { Some(t) => self.pos > t, None => false, } - } /// Return the next token. EFFECT: advances the string_reader. fn try_next_token(&mut self) -> Result { @@ -222,28 +221,6 @@ impl<'a> StringReader<'a> { sr } - pub fn from_span<'b>(span_diagnostic: &'b Handler, - span: Span, - codemap: &CodeMap) - -> StringReader<'b> { - let start_pos = codemap.lookup_byte_offset(span.lo); - let last_pos = codemap.lookup_byte_offset(span.hi); - assert!(start_pos.fm.name == last_pos.fm.name, "Attempt to lex span which crosses files"); - let mut sr = StringReader::new_raw_internal(span_diagnostic, start_pos.fm.clone()); - sr.pos = span.lo; - sr.last_pos = span.lo; - sr.terminator = Some(span.hi); - sr.save_new_lines = false; - - sr.bump(); - - if let Err(_) = sr.advance_token() { - sr.emit_fatal_errors(); - panic!(FatalError); - } - sr - } - pub fn curr_is(&self, c: char) -> bool { self.curr == Some(c) } diff --git a/src/libsyntax/parse/mod.rs b/src/libsyntax/parse/mod.rs index 4ad8e227cbb54..5aa0efdec11a2 100644 --- a/src/libsyntax/parse/mod.rs +++ b/src/libsyntax/parse/mod.rs @@ -258,13 +258,6 @@ fn file_to_filemap(sess: &ParseSess, path: &Path, spanopt: Option) } } -pub fn span_to_tts(sess: &ParseSess, span: Span) -> Vec { - let cfg = Vec::new(); - let srdr = lexer::StringReader::from_span(&sess.span_diagnostic, span, &sess.code_map); - let mut p1 = Parser::new(sess, cfg, Box::new(srdr)); - panictry!(p1.parse_all_token_trees()) -} - /// Given a filemap, produce a sequence of token-trees pub fn filemap_to_tts(sess: &ParseSess, filemap: Rc) -> Vec { diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs index 5cd4a04657716..23085fadc5e60 100644 --- a/src/libsyntax/parse/parser.rs +++ b/src/libsyntax/parse/parser.rs @@ -3872,15 +3872,17 @@ impl<'a> Parser<'a> { } } - fn parse_stmt_(&mut self, macro_expanded: bool) -> Option { - self.parse_stmt_without_recovery(macro_expanded).unwrap_or_else(|mut e| { + fn parse_stmt_(&mut self, macro_legacy_warnings: bool) -> Option { + self.parse_stmt_without_recovery(macro_legacy_warnings).unwrap_or_else(|mut e| { e.emit(); self.recover_stmt_(SemiColonMode::Break); None }) } - fn parse_stmt_without_recovery(&mut self, macro_expanded: bool) -> PResult<'a, Option> { + fn parse_stmt_without_recovery(&mut self, + macro_legacy_warnings: bool) + -> PResult<'a, Option> { maybe_whole!(Some deref self, NtStmt); let attrs = self.parse_outer_attributes()?; @@ -3950,7 +3952,7 @@ impl<'a> Parser<'a> { // We used to incorrectly stop parsing macro-expanded statements here. // If the next token will be an error anyway but could have parsed with the // earlier behavior, stop parsing here and emit a warning to avoid breakage. - else if macro_expanded && self.token.can_begin_expr() && match self.token { + else if macro_legacy_warnings && self.token.can_begin_expr() && match self.token { // These can continue an expression, so we can't stop parsing and warn. token::OpenDelim(token::Paren) | token::OpenDelim(token::Bracket) | token::BinOp(token::Minus) | token::BinOp(token::Star) | @@ -4125,8 +4127,8 @@ impl<'a> Parser<'a> { } /// Parse a statement, including the trailing semicolon. - pub fn parse_full_stmt(&mut self, macro_expanded: bool) -> PResult<'a, Option> { - let mut stmt = match self.parse_stmt_(macro_expanded) { + pub fn parse_full_stmt(&mut self, macro_legacy_warnings: bool) -> PResult<'a, Option> { + let mut stmt = match self.parse_stmt_(macro_legacy_warnings) { Some(stmt) => stmt, None => return Ok(None), }; @@ -4146,7 +4148,7 @@ impl<'a> Parser<'a> { } StmtKind::Local(..) => { // We used to incorrectly allow a macro-expanded let statement to lack a semicolon. - if macro_expanded && self.token != token::Semi { + if macro_legacy_warnings && self.token != token::Semi { self.warn_missing_semicolon(); } else { self.expect_one_of(&[token::Semi], &[])?; @@ -6169,4 +6171,15 @@ impl<'a> Parser<'a> { _ => Err(self.fatal("expected string literal")) } } + + pub fn ensure_complete_parse(&mut self, allow_semi: bool, on_err: F) + where F: FnOnce(&Parser) + { + if allow_semi && self.token == token::Semi { + self.bump(); + } + if self.token != token::Eof { + on_err(self); + } + } } diff --git a/src/libsyntax_ext/deriving/custom.rs b/src/libsyntax_ext/deriving/custom.rs index 465fc0016e5e8..624fabd1424e1 100644 --- a/src/libsyntax_ext/deriving/custom.rs +++ b/src/libsyntax_ext/deriving/custom.rs @@ -15,7 +15,7 @@ use rustc_macro::{TokenStream, __internal}; use syntax::ast::{self, ItemKind}; use syntax::codemap::{ExpnInfo, MacroAttribute, NameAndSpan, Span}; use syntax::ext::base::*; -use syntax::fold::{self, Folder}; +use syntax::fold::Folder; use syntax::parse::token::intern; use syntax::print::pprust; @@ -97,14 +97,3 @@ impl MultiItemModifier for CustomDerive { } } -struct ChangeSpan { span: Span } - -impl Folder for ChangeSpan { - fn new_span(&mut self, _sp: Span) -> Span { - self.span - } - - fn fold_mac(&mut self, mac: ast::Mac) -> ast::Mac { - fold::noop_fold_mac(mac, self) - } -}