From 2d9831dea598d8a45c69e8c799503e8a397aacc0 Mon Sep 17 00:00:00 2001 From: Geoffry Song Date: Thu, 5 Mar 2015 15:06:49 -0500 Subject: [PATCH 1/3] Interpolate AST nodes in quasiquote. This changes the `ToTokens` implementations for expressions, statements, etc. with almost-trivial ones that produce `Interpolated(*Nt(...))` pseudo-tokens. In this way, quasiquote now works the same way as macros do: already-parsed AST fragments are used as-is, not reparsed. The `ToSource` trait is removed. Quasiquote no longer involves pretty-printing at all, which removes the need for the `encode_with_hygiene` hack. All associated machinery is removed. A new `Nonterminal` is added, NtArm, which the parser now interpolates. This is just for quasiquote, not macros (although it could be in the future). `ToTokens` is no longer implemented for `Arg` (although this could be added again) and `Generics` (which I don't think makes sense). This breaks any compiler extensions that relied on the ability of `ToTokens` to turn AST fragments back into inspectable token trees. For this reason, this closes #16987. As such, this is a [breaking-change]. Fixes #16472. Fixes #15962. Fixes #17397. Fixes #16617. --- src/libsyntax/ast.rs | 6 - src/libsyntax/ext/quote.rs | 363 ++++++++++----------------- src/libsyntax/fold.rs | 7 + src/libsyntax/parse/lexer/mod.rs | 107 -------- src/libsyntax/parse/mod.rs | 72 ------ src/libsyntax/parse/parser.rs | 7 +- src/libsyntax/parse/token.rs | 7 + src/libsyntax/print/pprust.rs | 122 +++------ src/test/run-pass-fulldeps/qquote.rs | 79 +++--- 9 files changed, 227 insertions(+), 543 deletions(-) diff --git a/src/libsyntax/ast.rs b/src/libsyntax/ast.rs index 94dc36b16ba0f..07fb6cbe5c6af 100644 --- a/src/libsyntax/ast.rs +++ b/src/libsyntax/ast.rs @@ -89,12 +89,6 @@ impl Ident { pub fn as_str<'a>(&'a self) -> &'a str { self.name.as_str() } - - pub fn encode_with_hygiene(&self) -> String { - format!("\x00name_{},ctxt_{}\x00", - self.name.usize(), - self.ctxt) - } } impl fmt::Debug for Ident { diff --git a/src/libsyntax/ext/quote.rs b/src/libsyntax/ext/quote.rs index 5776fa9974076..e100b7705d817 100644 --- a/src/libsyntax/ext/quote.rs +++ b/src/libsyntax/ext/quote.rs @@ -30,16 +30,16 @@ pub mod rt { use ext::base::ExtCtxt; use parse::token; use parse; - use print::pprust; use ptr::P; + use std::rc::Rc; - use ast::{TokenTree, Generics, Expr}; + use ast::{TokenTree, Expr}; pub use parse::new_parser_from_tts; - pub use codemap::{BytePos, Span, dummy_spanned}; + pub use codemap::{BytePos, Span, dummy_spanned, DUMMY_SP}; pub trait ToTokens { - fn to_tokens(&self, _cx: &ExtCtxt) -> Vec ; + fn to_tokens(&self, _cx: &ExtCtxt) -> Vec; } impl ToTokens for TokenTree { @@ -70,277 +70,189 @@ pub mod rt { } } - /* Should be (when bugs in default methods are fixed): - - trait ToSource : ToTokens { - // Takes a thing and generates a string containing rust code for it. - pub fn to_source() -> String; - - // If you can make source, you can definitely make tokens. - pub fn to_tokens(cx: &ExtCtxt) -> ~[TokenTree] { - cx.parse_tts(self.to_source()) + impl ToTokens for ast::Ident { + fn to_tokens(&self, _cx: &ExtCtxt) -> Vec { + vec![ast::TtToken(DUMMY_SP, token::Ident(*self, token::Plain))] } } - */ - - // FIXME: Move this trait to pprust and get rid of *_to_str? - pub trait ToSource { - // Takes a thing and generates a string containing rust code for it. - fn to_source(&self) -> String; + impl ToTokens for ast::Path { + fn to_tokens(&self, _cx: &ExtCtxt) -> Vec { + vec![ast::TtToken(DUMMY_SP, token::Interpolated(token::NtPath(Box::new(self.clone()))))] + } } - // FIXME (Issue #16472): This should go away after ToToken impls - // are revised to go directly to token-trees. - trait ToSourceWithHygiene : ToSource { - // Takes a thing and generates a string containing rust code - // for it, encoding Idents as special byte sequences to - // maintain hygiene across serialization and deserialization. - fn to_source_with_hygiene(&self) -> String; + impl ToTokens for ast::Ty { + fn to_tokens(&self, _cx: &ExtCtxt) -> Vec { + vec![ast::TtToken(self.span, token::Interpolated(token::NtTy(P(self.clone()))))] + } } - macro_rules! impl_to_source { - (P<$t:ty>, $pp:ident) => ( - impl ToSource for P<$t> { - fn to_source(&self) -> String { - pprust::$pp(&**self) - } - } - impl ToSourceWithHygiene for P<$t> { - fn to_source_with_hygiene(&self) -> String { - pprust::with_hygiene::$pp(&**self) - } - } - ); - ($t:ty, $pp:ident) => ( - impl ToSource for $t { - fn to_source(&self) -> String { - pprust::$pp(self) - } - } - impl ToSourceWithHygiene for $t { - fn to_source_with_hygiene(&self) -> String { - pprust::with_hygiene::$pp(self) - } - } - ); + impl ToTokens for ast::Block { + fn to_tokens(&self, _cx: &ExtCtxt) -> Vec { + vec![ast::TtToken(self.span, token::Interpolated(token::NtBlock(P(self.clone()))))] + } } - fn slice_to_source<'a, T: ToSource>(sep: &'static str, xs: &'a [T]) -> String { - xs.iter() - .map(|i| i.to_source()) - .collect::>() - .connect(sep) - .to_string() + impl ToTokens for P { + fn to_tokens(&self, _cx: &ExtCtxt) -> Vec { + vec![ast::TtToken(self.span, token::Interpolated(token::NtItem(self.clone())))] + } } - fn slice_to_source_with_hygiene<'a, T: ToSourceWithHygiene>( - sep: &'static str, xs: &'a [T]) -> String { - xs.iter() - .map(|i| i.to_source_with_hygiene()) - .collect::>() - .connect(sep) - .to_string() + impl ToTokens for P { + fn to_tokens(&self, _cx: &ExtCtxt) -> Vec { + vec![ast::TtToken(self.span, token::Interpolated(token::NtImplItem(self.clone())))] + } } - macro_rules! impl_to_source_slice { - ($t:ty, $sep:expr) => ( - impl ToSource for [$t] { - fn to_source(&self) -> String { - slice_to_source($sep, self) - } - } - - impl ToSourceWithHygiene for [$t] { - fn to_source_with_hygiene(&self) -> String { - slice_to_source_with_hygiene($sep, self) - } - } - ) + impl ToTokens for P { + fn to_tokens(&self, _cx: &ExtCtxt) -> Vec { + vec![ast::TtToken(self.span, token::Interpolated(token::NtTraitItem(self.clone())))] + } } - impl ToSource for ast::Ident { - fn to_source(&self) -> String { - token::get_ident(*self).to_string() + impl ToTokens for P { + fn to_tokens(&self, _cx: &ExtCtxt) -> Vec { + vec![ast::TtToken(self.span, token::Interpolated(token::NtStmt(self.clone())))] } } - impl ToSourceWithHygiene for ast::Ident { - fn to_source_with_hygiene(&self) -> String { - self.encode_with_hygiene() + impl ToTokens for P { + fn to_tokens(&self, _cx: &ExtCtxt) -> Vec { + vec![ast::TtToken(self.span, token::Interpolated(token::NtExpr(self.clone())))] } } - impl_to_source! { ast::Path, path_to_string } - impl_to_source! { ast::Ty, ty_to_string } - impl_to_source! { ast::Block, block_to_string } - impl_to_source! { ast::Arg, arg_to_string } - impl_to_source! { Generics, generics_to_string } - impl_to_source! { ast::WhereClause, where_clause_to_string } - impl_to_source! { P, item_to_string } - impl_to_source! { P, impl_item_to_string } - impl_to_source! { P, trait_item_to_string } - impl_to_source! { P, stmt_to_string } - impl_to_source! { P, expr_to_string } - impl_to_source! { P, pat_to_string } - impl_to_source! { ast::Arm, arm_to_string } - impl_to_source_slice! { ast::Ty, ", " } - impl_to_source_slice! { P, "\n\n" } - - impl ToSource for ast::Attribute_ { - fn to_source(&self) -> String { - pprust::attribute_to_string(&dummy_spanned(self.clone())) + impl ToTokens for P { + fn to_tokens(&self, _cx: &ExtCtxt) -> Vec { + vec![ast::TtToken(self.span, token::Interpolated(token::NtPat(self.clone())))] } } - impl ToSourceWithHygiene for ast::Attribute_ { - fn to_source_with_hygiene(&self) -> String { - self.to_source() + + impl ToTokens for ast::Arm { + fn to_tokens(&self, _cx: &ExtCtxt) -> Vec { + vec![ast::TtToken(DUMMY_SP, token::Interpolated(token::NtArm(self.clone())))] } } - impl ToSource for str { - fn to_source(&self) -> String { - let lit = dummy_spanned(ast::LitStr( - token::intern_and_get_ident(self), ast::CookedStr)); - pprust::lit_to_string(&lit) - } + macro_rules! impl_to_tokens_slice { + ($t: ty, $sep: expr) => { + impl ToTokens for [$t] { + fn to_tokens(&self, cx: &ExtCtxt) -> Vec { + let mut v = vec![]; + for (i, x) in self.iter().enumerate() { + if i > 0 { + v.push_all(&$sep); + } + v.extend(x.to_tokens(cx)); + } + v + } + } + }; } - impl ToSourceWithHygiene for str { - fn to_source_with_hygiene(&self) -> String { - self.to_source() + + impl_to_tokens_slice! { ast::Ty, [ast::TtToken(DUMMY_SP, token::Comma)] } + impl_to_tokens_slice! { P, [] } + + impl ToTokens for P { + fn to_tokens(&self, _cx: &ExtCtxt) -> Vec { + vec![ast::TtToken(DUMMY_SP, token::Interpolated(token::NtMeta(self.clone())))] } } - impl ToSource for () { - fn to_source(&self) -> String { - "()".to_string() + impl ToTokens for ast::Attribute { + fn to_tokens(&self, cx: &ExtCtxt) -> Vec { + let mut r = vec![]; + // FIXME: The spans could be better + r.push(ast::TtToken(self.span, token::Pound)); + if self.node.style == ast::AttrInner { + r.push(ast::TtToken(self.span, token::Not)); + } + r.push(ast::TtDelimited(self.span, Rc::new(ast::Delimited { + delim: token::Bracket, + open_span: self.span, + tts: self.node.value.to_tokens(cx), + close_span: self.span, + }))); + r } } - impl ToSourceWithHygiene for () { - fn to_source_with_hygiene(&self) -> String { - self.to_source() + + impl ToTokens for str { + fn to_tokens(&self, cx: &ExtCtxt) -> Vec { + let lit = ast::LitStr( + token::intern_and_get_ident(self), ast::CookedStr); + dummy_spanned(lit).to_tokens(cx) } } - impl ToSource for bool { - fn to_source(&self) -> String { - let lit = dummy_spanned(ast::LitBool(*self)); - pprust::lit_to_string(&lit) + impl ToTokens for () { + fn to_tokens(&self, _cx: &ExtCtxt) -> Vec { + vec![ast::TtDelimited(DUMMY_SP, Rc::new(ast::Delimited { + delim: token::Paren, + open_span: DUMMY_SP, + tts: vec![], + close_span: DUMMY_SP, + }))] } } - impl ToSourceWithHygiene for bool { - fn to_source_with_hygiene(&self) -> String { - self.to_source() + + impl ToTokens for ast::Lit { + fn to_tokens(&self, cx: &ExtCtxt) -> Vec { + // FIXME: This is wrong + P(ast::Expr { + id: ast::DUMMY_NODE_ID, + node: ast::ExprLit(P(self.clone())), + span: DUMMY_SP, + }).to_tokens(cx) } } - impl ToSource for char { - fn to_source(&self) -> String { - let lit = dummy_spanned(ast::LitChar(*self)); - pprust::lit_to_string(&lit) + impl ToTokens for bool { + fn to_tokens(&self, cx: &ExtCtxt) -> Vec { + dummy_spanned(ast::LitBool(*self)).to_tokens(cx) } } - impl ToSourceWithHygiene for char { - fn to_source_with_hygiene(&self) -> String { - self.to_source() + + impl ToTokens for char { + fn to_tokens(&self, cx: &ExtCtxt) -> Vec { + dummy_spanned(ast::LitChar(*self)).to_tokens(cx) } } - macro_rules! impl_to_source_int { + macro_rules! impl_to_tokens_int { (signed, $t:ty, $tag:expr) => ( - impl ToSource for $t { - fn to_source(&self) -> String { + impl ToTokens for $t { + fn to_tokens(&self, cx: &ExtCtxt) -> Vec { let lit = ast::LitInt(*self as u64, ast::SignedIntLit($tag, ast::Sign::new(*self))); - pprust::lit_to_string(&dummy_spanned(lit)) - } - } - impl ToSourceWithHygiene for $t { - fn to_source_with_hygiene(&self) -> String { - self.to_source() + dummy_spanned(lit).to_tokens(cx) } } ); (unsigned, $t:ty, $tag:expr) => ( - impl ToSource for $t { - fn to_source(&self) -> String { + impl ToTokens for $t { + fn to_tokens(&self, cx: &ExtCtxt) -> Vec { let lit = ast::LitInt(*self as u64, ast::UnsignedIntLit($tag)); - pprust::lit_to_string(&dummy_spanned(lit)) - } - } - impl ToSourceWithHygiene for $t { - fn to_source_with_hygiene(&self) -> String { - self.to_source() + dummy_spanned(lit).to_tokens(cx) } } ); } - impl_to_source_int! { signed, isize, ast::TyIs } - impl_to_source_int! { signed, i8, ast::TyI8 } - impl_to_source_int! { signed, i16, ast::TyI16 } - impl_to_source_int! { signed, i32, ast::TyI32 } - impl_to_source_int! { signed, i64, ast::TyI64 } - - impl_to_source_int! { unsigned, usize, ast::TyUs } - impl_to_source_int! { unsigned, u8, ast::TyU8 } - impl_to_source_int! { unsigned, u16, ast::TyU16 } - impl_to_source_int! { unsigned, u32, ast::TyU32 } - impl_to_source_int! { unsigned, u64, ast::TyU64 } - - // Alas ... we write these out instead. All redundant. - - macro_rules! impl_to_tokens { - ($t:ty) => ( - impl ToTokens for $t { - fn to_tokens(&self, cx: &ExtCtxt) -> Vec { - cx.parse_tts_with_hygiene(self.to_source_with_hygiene()) - } - } - ) - } + impl_to_tokens_int! { signed, isize, ast::TyIs } + impl_to_tokens_int! { signed, i8, ast::TyI8 } + impl_to_tokens_int! { signed, i16, ast::TyI16 } + impl_to_tokens_int! { signed, i32, ast::TyI32 } + impl_to_tokens_int! { signed, i64, ast::TyI64 } - macro_rules! impl_to_tokens_lifetime { - ($t:ty) => ( - impl<'a> ToTokens for $t { - fn to_tokens(&self, cx: &ExtCtxt) -> Vec { - cx.parse_tts_with_hygiene(self.to_source_with_hygiene()) - } - } - ) - } - - impl_to_tokens! { ast::Ident } - impl_to_tokens! { ast::Path } - impl_to_tokens! { P } - impl_to_tokens! { P } - impl_to_tokens! { P } - impl_to_tokens! { P } - impl_to_tokens! { ast::Arm } - impl_to_tokens_lifetime! { &'a [P] } - impl_to_tokens! { ast::Ty } - impl_to_tokens_lifetime! { &'a [ast::Ty] } - impl_to_tokens! { Generics } - impl_to_tokens! { ast::WhereClause } - impl_to_tokens! { P } - impl_to_tokens! { P } - impl_to_tokens! { ast::Block } - impl_to_tokens! { ast::Arg } - impl_to_tokens! { ast::Attribute_ } - impl_to_tokens_lifetime! { &'a str } - impl_to_tokens! { () } - impl_to_tokens! { char } - impl_to_tokens! { bool } - impl_to_tokens! { isize } - impl_to_tokens! { i8 } - impl_to_tokens! { i16 } - impl_to_tokens! { i32 } - impl_to_tokens! { i64 } - impl_to_tokens! { usize } - impl_to_tokens! { u8 } - impl_to_tokens! { u16 } - impl_to_tokens! { u32 } - impl_to_tokens! { u64 } + impl_to_tokens_int! { unsigned, usize, ast::TyUs } + impl_to_tokens_int! { unsigned, u8, ast::TyU8 } + impl_to_tokens_int! { unsigned, u16, ast::TyU16 } + impl_to_tokens_int! { unsigned, u32, ast::TyU32 } + impl_to_tokens_int! { unsigned, u64, ast::TyU64 } pub trait ExtParseUtils { fn parse_item(&self, s: String) -> P; @@ -349,12 +261,6 @@ pub mod rt { fn parse_tts(&self, s: String) -> Vec; } - trait ExtParseUtilsWithHygiene { - // FIXME (Issue #16472): This should go away after ToToken impls - // are revised to go directly to token-trees. - fn parse_tts_with_hygiene(&self, s: String) -> Vec; - } - impl<'a> ExtParseUtils for ExtCtxt<'a> { fn parse_item(&self, s: String) -> P { @@ -386,19 +292,6 @@ pub mod rt { self.parse_sess()) } } - - impl<'a> ExtParseUtilsWithHygiene for ExtCtxt<'a> { - - fn parse_tts_with_hygiene(&self, s: String) -> Vec { - use parse::with_hygiene::parse_tts_from_source_str; - parse_tts_from_source_str("".to_string(), - s, - self.cfg(), - self.parse_sess()) - } - - } - } pub fn expand_quote_tokens<'cx>(cx: &'cx mut ExtCtxt, diff --git a/src/libsyntax/fold.rs b/src/libsyntax/fold.rs index c857d4403cbf8..5352a191b095f 100644 --- a/src/libsyntax/fold.rs +++ b/src/libsyntax/fold.rs @@ -682,6 +682,13 @@ pub fn noop_fold_interpolated(nt: token::Nonterminal, fld: &mut T) token::NtMeta(meta_item) => token::NtMeta(fld.fold_meta_item(meta_item)), token::NtPath(path) => token::NtPath(Box::new(fld.fold_path(*path))), token::NtTT(tt) => token::NtTT(P(fld.fold_tt(&*tt))), + token::NtArm(arm) => token::NtArm(fld.fold_arm(arm)), + token::NtImplItem(arm) => + token::NtImplItem(fld.fold_impl_item(arm) + .expect_one("expected fold to produce exactly one item")), + token::NtTraitItem(arm) => + token::NtTraitItem(fld.fold_trait_item(arm) + .expect_one("expected fold to produce exactly one item")), } } diff --git a/src/libsyntax/parse/lexer/mod.rs b/src/libsyntax/parse/lexer/mod.rs index d13ab65d72b2b..6b0674c9a41b4 100644 --- a/src/libsyntax/parse/lexer/mod.rs +++ b/src/libsyntax/parse/lexer/mod.rs @@ -19,7 +19,6 @@ use str::char_at; use std::borrow::Cow; use std::char; -use std::fmt; use std::mem::replace; use std::rc::Rc; @@ -71,11 +70,6 @@ pub struct StringReader<'a> { pub peek_tok: token::Token, pub peek_span: Span, - // FIXME (Issue #16472): This field should go away after ToToken impls - // are revised to go directly to token-trees. - /// Is \x00,\x00 is interpreted as encoded ast::Ident? - read_embedded_ident: bool, - // cache a direct reference to the source text, so that we don't have to // retrieve it via `self.filemap.src.as_ref().unwrap()` all the time. source_text: Rc @@ -130,17 +124,6 @@ impl<'a> Reader for TtReader<'a> { } } -// FIXME (Issue #16472): This function should go away after -// ToToken impls are revised to go directly to token-trees. -pub fn make_reader_with_embedded_idents<'b>(span_diagnostic: &'b SpanHandler, - filemap: Rc) - -> StringReader<'b> { - let mut sr = StringReader::new_raw(span_diagnostic, filemap); - sr.read_embedded_ident = true; - sr.advance_token(); - sr -} - impl<'a> StringReader<'a> { /// For comments.rs, which hackily pokes into pos and curr pub fn new_raw<'b>(span_diagnostic: &'b SpanHandler, @@ -162,7 +145,6 @@ impl<'a> StringReader<'a> { /* dummy values; not read */ peek_tok: token::Eof, peek_span: codemap::DUMMY_SP, - read_embedded_ident: false, source_text: source_text }; sr.bump(); @@ -578,81 +560,6 @@ impl<'a> StringReader<'a> { }) } - // FIXME (Issue #16472): The scan_embedded_hygienic_ident function - // should go away after we revise the syntax::ext::quote::ToToken - // impls to go directly to token-trees instead of thing -> string - // -> token-trees. (The function is currently used to resolve - // Issues #15750 and #15962.) - // - // Since this function is only used for certain internal macros, - // and the functionality it provides is not exposed to end user - // programs, pnkfelix deliberately chose to write it in a way that - // favors rustc debugging effectiveness over runtime efficiency. - - /// Scan through input of form \x00name_NNNNNN,ctxt_CCCCCCC\x00 - /// whence: `NNNNNN` is a string of characters forming an integer - /// (the name) and `CCCCCCC` is a string of characters forming an - /// integer (the ctxt), separate by a comma and delimited by a - /// `\x00` marker. - #[inline(never)] - fn scan_embedded_hygienic_ident(&mut self) -> ast::Ident { - fn bump_expecting_char<'a,D:fmt::Debug>(r: &mut StringReader<'a>, - c: char, - described_c: D, - whence: &str) { - match r.curr { - Some(r_c) if r_c == c => r.bump(), - Some(r_c) => panic!("expected {:?}, hit {:?}, {}", described_c, r_c, whence), - None => panic!("expected {:?}, hit EOF, {}", described_c, whence), - } - } - - let whence = "while scanning embedded hygienic ident"; - - // skip over the leading `\x00` - bump_expecting_char(self, '\x00', "nul-byte", whence); - - // skip over the "name_" - for c in "name_".chars() { - bump_expecting_char(self, c, c, whence); - } - - let start_bpos = self.last_pos; - let base = 10; - - // find the integer representing the name - self.scan_digits(base, base); - let encoded_name : u32 = self.with_str_from(start_bpos, |s| { - u32::from_str_radix(s, 10).unwrap_or_else(|_| { - panic!("expected digits representing a name, got {:?}, {}, range [{:?},{:?}]", - s, whence, start_bpos, self.last_pos); - }) - }); - - // skip over the `,` - bump_expecting_char(self, ',', "comma", whence); - - // skip over the "ctxt_" - for c in "ctxt_".chars() { - bump_expecting_char(self, c, c, whence); - } - - // find the integer representing the ctxt - let start_bpos = self.last_pos; - self.scan_digits(base, base); - let encoded_ctxt : ast::SyntaxContext = self.with_str_from(start_bpos, |s| { - u32::from_str_radix(s, 10).unwrap_or_else(|_| { - panic!("expected digits representing a ctxt, got {:?}, {}", s, whence); - }) - }); - - // skip over the `\x00` - bump_expecting_char(self, '\x00', "nul-byte", whence); - - ast::Ident { name: ast::Name(encoded_name), - ctxt: encoded_ctxt, } - } - /// Scan through any digits (base `scan_radix`) or underscores, /// and return how many digits there were. /// @@ -1020,20 +927,6 @@ impl<'a> StringReader<'a> { return token::Literal(num, suffix) } - if self.read_embedded_ident { - match (c.unwrap(), self.nextch(), self.nextnextch()) { - ('\x00', Some('n'), Some('a')) => { - let ast_ident = self.scan_embedded_hygienic_ident(); - return if self.curr_is(':') && self.nextch_is(':') { - token::Ident(ast_ident, token::ModName) - } else { - token::Ident(ast_ident, token::Plain) - }; - } - _ => {} - } - } - match c.expect("next_token_inner called at EOF") { // One-byte tokens. ';' => { self.bump(); return token::Semi; } diff --git a/src/libsyntax/parse/mod.rs b/src/libsyntax/parse/mod.rs index dee3e5fee74df..8c9ce5f78d482 100644 --- a/src/libsyntax/parse/mod.rs +++ b/src/libsyntax/parse/mod.rs @@ -166,9 +166,6 @@ pub fn parse_stmt_from_source_str(name: String, maybe_aborted(p.parse_stmt(), p) } -// Note: keep in sync with `with_hygiene::parse_tts_from_source_str` -// until #16472 is resolved. -// // Warning: This parses with quote_depth > 0, which is not the default. pub fn parse_tts_from_source_str(name: String, source: String, @@ -186,8 +183,6 @@ pub fn parse_tts_from_source_str(name: String, maybe_aborted(panictry!(p.parse_all_token_trees()),p) } -// Note: keep in sync with `with_hygiene::new_parser_from_source_str` -// until #16472 is resolved. // Create a new parser from a source string pub fn new_parser_from_source_str<'a>(sess: &'a ParseSess, cfg: ast::CrateConfig, @@ -220,8 +215,6 @@ pub fn new_sub_parser_from_file<'a>(sess: &'a ParseSess, p } -// Note: keep this in sync with `with_hygiene::filemap_to_parser` until -// #16472 is resolved. /// Given a filemap and config, return a parser pub fn filemap_to_parser<'a>(sess: &'a ParseSess, filemap: Rc, @@ -277,8 +270,6 @@ pub fn string_to_filemap(sess: &ParseSess, source: String, path: String) sess.span_diagnostic.cm.new_filemap(path, source) } -// Note: keep this in sync with `with_hygiene::filemap_to_tts` (apart -// from the StringReader constructor), until #16472 is resolved. /// Given a filemap, produce a sequence of token-trees pub fn filemap_to_tts(sess: &ParseSess, filemap: Rc) -> Vec { @@ -300,69 +291,6 @@ pub fn tts_to_parser<'a>(sess: &'a ParseSess, p } -// FIXME (Issue #16472): The `with_hygiene` mod should go away after -// ToToken impls are revised to go directly to token-trees. -pub mod with_hygiene { - use ast; - use codemap::FileMap; - use parse::parser::Parser; - use std::rc::Rc; - use super::ParseSess; - use super::{maybe_aborted, string_to_filemap, tts_to_parser}; - - // Note: keep this in sync with `super::parse_tts_from_source_str` until - // #16472 is resolved. - // - // Warning: This parses with quote_depth > 0, which is not the default. - pub fn parse_tts_from_source_str(name: String, - source: String, - cfg: ast::CrateConfig, - sess: &ParseSess) -> Vec { - let mut p = new_parser_from_source_str( - sess, - cfg, - name, - source - ); - p.quote_depth += 1; - // right now this is re-creating the token trees from ... token trees. - maybe_aborted(panictry!(p.parse_all_token_trees()),p) - } - - // Note: keep this in sync with `super::new_parser_from_source_str` until - // #16472 is resolved. - // Create a new parser from a source string - fn new_parser_from_source_str<'a>(sess: &'a ParseSess, - cfg: ast::CrateConfig, - name: String, - source: String) -> Parser<'a> { - filemap_to_parser(sess, string_to_filemap(sess, source, name), cfg) - } - - // Note: keep this in sync with `super::filemap_to_parserr` until - // #16472 is resolved. - /// Given a filemap and config, return a parser - fn filemap_to_parser<'a>(sess: &'a ParseSess, - filemap: Rc, - cfg: ast::CrateConfig) -> Parser<'a> { - tts_to_parser(sess, filemap_to_tts(sess, filemap), cfg) - } - - // Note: keep this in sync with `super::filemap_to_tts` until - // #16472 is resolved. - /// Given a filemap, produce a sequence of token-trees - fn filemap_to_tts(sess: &ParseSess, filemap: Rc) - -> Vec { - // it appears to me that the cfg doesn't matter here... indeed, - // parsing tt's probably shouldn't require a parser at all. - use super::lexer::make_reader_with_embedded_idents as make_reader; - let cfg = Vec::new(); - let srdr = make_reader(&sess.span_diagnostic, filemap); - let mut p1 = Parser::new(sess, cfg, Box::new(srdr)); - panictry!(p1.parse_all_token_trees()) - } -} - /// Abort if necessary pub fn maybe_aborted(result: T, p: Parser) -> T { p.abort_if_errors(); diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs index 47ea8d556fa0b..5f097256318c0 100644 --- a/src/libsyntax/parse/parser.rs +++ b/src/libsyntax/parse/parser.rs @@ -1152,7 +1152,8 @@ impl<'a> Parser<'a> { &token::OpenDelim(token::Brace), &token::CloseDelim(token::Brace), seq_sep_none(), - |p| { + |p| -> PResult> { + maybe_whole!(no_clone p, NtTraitItem); let mut attrs = p.parse_outer_attributes(); let lo = p.span.lo; @@ -2943,6 +2944,8 @@ impl<'a> Parser<'a> { } pub fn parse_arm_nopanic(&mut self) -> PResult { + maybe_whole!(no_clone self, NtArm); + let attrs = self.parse_outer_attributes(); let pats = try!(self.parse_pats()); let mut guard = None; @@ -4335,6 +4338,8 @@ impl<'a> Parser<'a> { /// Parse an impl item. pub fn parse_impl_item(&mut self) -> PResult> { + maybe_whole!(no_clone self, NtImplItem); + let mut attrs = self.parse_outer_attributes(); let lo = self.span.lo; let vis = try!(self.parse_visibility()); diff --git a/src/libsyntax/parse/token.rs b/src/libsyntax/parse/token.rs index e33b1391a108e..0106de913bb87 100644 --- a/src/libsyntax/parse/token.rs +++ b/src/libsyntax/parse/token.rs @@ -381,6 +381,10 @@ pub enum Nonterminal { NtMeta(P), NtPath(Box), NtTT(P), // needs P'ed to break a circularity + // These is not exposed to macros, but is used by quasiquote. + NtArm(ast::Arm), + NtImplItem(P), + NtTraitItem(P), } impl fmt::Debug for Nonterminal { @@ -396,6 +400,9 @@ impl fmt::Debug for Nonterminal { NtMeta(..) => f.pad("NtMeta(..)"), NtPath(..) => f.pad("NtPath(..)"), NtTT(..) => f.pad("NtTT(..)"), + NtArm(..) => f.pad("NtArm(..)"), + NtImplItem(..) => f.pad("NtImplItem(..)"), + NtTraitItem(..) => f.pad("NtTraitItem(..)"), } } } diff --git a/src/libsyntax/print/pprust.rs b/src/libsyntax/print/pprust.rs index 980ce720026c3..36364eb9bf3d5 100644 --- a/src/libsyntax/print/pprust.rs +++ b/src/libsyntax/print/pprust.rs @@ -66,7 +66,6 @@ pub struct State<'a> { cur_cmnt_and_lit: CurrentCommentAndLiteral, boxes: Vec, ann: &'a (PpAnn+'a), - encode_idents_with_hygiene: bool, } pub fn rust_printer<'a>(writer: Box) -> State<'a> { @@ -87,7 +86,6 @@ pub fn rust_printer_annotated<'a>(writer: Box, }, boxes: Vec::new(), ann: ann, - encode_idents_with_hygiene: false, } } @@ -179,7 +177,6 @@ impl<'a> State<'a> { }, boxes: Vec::new(), ann: ann, - encode_idents_with_hygiene: false, } } } @@ -290,103 +287,99 @@ pub fn token_to_string(tok: &Token) -> String { token::SpecialVarNt(var) => format!("${}", var.as_str()), token::Interpolated(ref nt) => match *nt { - token::NtExpr(ref e) => expr_to_string(&**e), - token::NtMeta(ref e) => meta_item_to_string(&**e), - token::NtTy(ref e) => ty_to_string(&**e), - token::NtPath(ref e) => path_to_string(&**e), - token::NtItem(..) => "an interpolated item".to_string(), - token::NtBlock(..) => "an interpolated block".to_string(), - token::NtStmt(..) => "an interpolated statement".to_string(), - token::NtPat(..) => "an interpolated pattern".to_string(), - token::NtIdent(..) => "an interpolated identifier".to_string(), - token::NtTT(..) => "an interpolated tt".to_string(), + token::NtExpr(ref e) => expr_to_string(&**e), + token::NtMeta(ref e) => meta_item_to_string(&**e), + token::NtTy(ref e) => ty_to_string(&**e), + token::NtPath(ref e) => path_to_string(&**e), + token::NtItem(..) => "an interpolated item".to_string(), + token::NtBlock(..) => "an interpolated block".to_string(), + token::NtStmt(..) => "an interpolated statement".to_string(), + token::NtPat(..) => "an interpolated pattern".to_string(), + token::NtIdent(..) => "an interpolated identifier".to_string(), + token::NtTT(..) => "an interpolated tt".to_string(), + token::NtArm(..) => "an interpolated arm".to_string(), + token::NtImplItem(..) => "an interpolated impl item".to_string(), + token::NtTraitItem(..) => "an interpolated trait item".to_string(), } } } -// FIXME (Issue #16472): the thing_to_string_impls macro should go away -// after we revise the syntax::ext::quote::ToToken impls to go directly -// to token-trees instead of thing -> string -> token-trees. - -macro_rules! thing_to_string_impls { - ($to_string:ident) => { - pub fn ty_to_string(ty: &ast::Ty) -> String { - $to_string(|s| s.print_type(ty)) + to_string(|s| s.print_type(ty)) } pub fn bounds_to_string(bounds: &[ast::TyParamBound]) -> String { - $to_string(|s| s.print_bounds("", bounds)) + to_string(|s| s.print_bounds("", bounds)) } pub fn pat_to_string(pat: &ast::Pat) -> String { - $to_string(|s| s.print_pat(pat)) + to_string(|s| s.print_pat(pat)) } pub fn arm_to_string(arm: &ast::Arm) -> String { - $to_string(|s| s.print_arm(arm)) + to_string(|s| s.print_arm(arm)) } pub fn expr_to_string(e: &ast::Expr) -> String { - $to_string(|s| s.print_expr(e)) + to_string(|s| s.print_expr(e)) } pub fn lifetime_to_string(e: &ast::Lifetime) -> String { - $to_string(|s| s.print_lifetime(e)) + to_string(|s| s.print_lifetime(e)) } pub fn tt_to_string(tt: &ast::TokenTree) -> String { - $to_string(|s| s.print_tt(tt)) + to_string(|s| s.print_tt(tt)) } pub fn tts_to_string(tts: &[ast::TokenTree]) -> String { - $to_string(|s| s.print_tts(tts)) + to_string(|s| s.print_tts(tts)) } pub fn stmt_to_string(stmt: &ast::Stmt) -> String { - $to_string(|s| s.print_stmt(stmt)) + to_string(|s| s.print_stmt(stmt)) } pub fn attr_to_string(attr: &ast::Attribute) -> String { - $to_string(|s| s.print_attribute(attr)) + to_string(|s| s.print_attribute(attr)) } pub fn item_to_string(i: &ast::Item) -> String { - $to_string(|s| s.print_item(i)) + to_string(|s| s.print_item(i)) } pub fn impl_item_to_string(i: &ast::ImplItem) -> String { - $to_string(|s| s.print_impl_item(i)) + to_string(|s| s.print_impl_item(i)) } pub fn trait_item_to_string(i: &ast::TraitItem) -> String { - $to_string(|s| s.print_trait_item(i)) + to_string(|s| s.print_trait_item(i)) } pub fn generics_to_string(generics: &ast::Generics) -> String { - $to_string(|s| s.print_generics(generics)) + to_string(|s| s.print_generics(generics)) } pub fn where_clause_to_string(i: &ast::WhereClause) -> String { - $to_string(|s| s.print_where_clause(i)) + to_string(|s| s.print_where_clause(i)) } pub fn fn_block_to_string(p: &ast::FnDecl) -> String { - $to_string(|s| s.print_fn_block_args(p)) + to_string(|s| s.print_fn_block_args(p)) } pub fn path_to_string(p: &ast::Path) -> String { - $to_string(|s| s.print_path(p, false, 0)) + to_string(|s| s.print_path(p, false, 0)) } pub fn ident_to_string(id: &ast::Ident) -> String { - $to_string(|s| s.print_ident(*id)) + to_string(|s| s.print_ident(*id)) } pub fn fun_to_string(decl: &ast::FnDecl, unsafety: ast::Unsafety, name: ast::Ident, opt_explicit_self: Option<&ast::ExplicitSelf_>, generics: &ast::Generics) -> String { - $to_string(|s| { + to_string(|s| { try!(s.head("")); try!(s.print_fn(decl, unsafety, abi::Rust, Some(name), generics, opt_explicit_self, ast::Inherited)); @@ -396,7 +389,7 @@ pub fn fun_to_string(decl: &ast::FnDecl, unsafety: ast::Unsafety, name: ast::Ide } pub fn block_to_string(blk: &ast::Block) -> String { - $to_string(|s| { + to_string(|s| { // containing cbox, will be closed by print-block at } try!(s.cbox(indent_unit)); // head-ibox, will be closed by print-block after { @@ -406,59 +399,31 @@ pub fn block_to_string(blk: &ast::Block) -> String { } pub fn meta_item_to_string(mi: &ast::MetaItem) -> String { - $to_string(|s| s.print_meta_item(mi)) + to_string(|s| s.print_meta_item(mi)) } pub fn attribute_to_string(attr: &ast::Attribute) -> String { - $to_string(|s| s.print_attribute(attr)) + to_string(|s| s.print_attribute(attr)) } pub fn lit_to_string(l: &ast::Lit) -> String { - $to_string(|s| s.print_literal(l)) + to_string(|s| s.print_literal(l)) } pub fn explicit_self_to_string(explicit_self: &ast::ExplicitSelf_) -> String { - $to_string(|s| s.print_explicit_self(explicit_self, ast::MutImmutable).map(|_| {})) + to_string(|s| s.print_explicit_self(explicit_self, ast::MutImmutable).map(|_| {})) } pub fn variant_to_string(var: &ast::Variant) -> String { - $to_string(|s| s.print_variant(var)) + to_string(|s| s.print_variant(var)) } pub fn arg_to_string(arg: &ast::Arg) -> String { - $to_string(|s| s.print_arg(arg)) + to_string(|s| s.print_arg(arg)) } pub fn mac_to_string(arg: &ast::Mac) -> String { - $to_string(|s| s.print_mac(arg, ::parse::token::Paren)) -} - -} } - -thing_to_string_impls! { to_string } - -// FIXME (Issue #16472): the whole `with_hygiene` mod should go away -// after we revise the syntax::ext::quote::ToToken impls to go directly -// to token-trees instea of thing -> string -> token-trees. - -pub mod with_hygiene { - use abi; - use ast; - use std::io; - use super::indent_unit; - - // This function is the trick that all the rest of the routines - // hang on. - pub fn to_string_hyg(f: F) -> String where - F: FnOnce(&mut super::State) -> io::Result<()>, - { - super::to_string(move |s| { - s.encode_idents_with_hygiene = true; - f(s) - }) - } - - thing_to_string_impls! { to_string_hyg } + to_string(|s| s.print_mac(arg, ::parse::token::Paren)) } pub fn visibility_qualified(vis: ast::Visibility, s: &str) -> String { @@ -2006,12 +1971,7 @@ impl<'a> State<'a> { } pub fn print_ident(&mut self, ident: ast::Ident) -> io::Result<()> { - if self.encode_idents_with_hygiene { - let encoded = ident.encode_with_hygiene(); - try!(word(&mut self.s, &encoded[..])) - } else { - try!(word(&mut self.s, &token::get_ident(ident))) - } + try!(word(&mut self.s, &token::get_ident(ident))); self.ann.post(self, NodeIdent(&ident)) } diff --git a/src/test/run-pass-fulldeps/qquote.rs b/src/test/run-pass-fulldeps/qquote.rs index 995dd80df65a0..ceface384847f 100644 --- a/src/test/run-pass-fulldeps/qquote.rs +++ b/src/test/run-pass-fulldeps/qquote.rs @@ -9,59 +9,56 @@ // except according to those terms. // ignore-cross-compile -// ignore-pretty #![feature(quote, rustc_private)] extern crate syntax; -use syntax::ast; -use syntax::codemap; -use syntax::parse; -use syntax::print::pprust; +use syntax::codemap::DUMMY_SP; +use syntax::print::pprust::*; -trait FakeExtCtxt { - fn call_site(&self) -> codemap::Span; - fn cfg(&self) -> ast::CrateConfig; - fn ident_of(&self, st: &str) -> ast::Ident; - fn name_of(&self, st: &str) -> ast::Name; - fn parse_sess(&self) -> &parse::ParseSess; -} - -impl FakeExtCtxt for parse::ParseSess { - fn call_site(&self) -> codemap::Span { - codemap::Span { - lo: codemap::BytePos(0), - hi: codemap::BytePos(0), - expn_id: codemap::NO_EXPANSION, +fn main() { + let ps = syntax::parse::new_parse_sess(); + let mut cx = syntax::ext::base::ExtCtxt::new( + &ps, vec![], + syntax::ext::expand::ExpansionConfig::default("qquote".to_string())); + cx.bt_push(syntax::codemap::ExpnInfo { + call_site: DUMMY_SP, + callee: syntax::codemap::NameAndSpan { + name: "".to_string(), + format: syntax::codemap::MacroBang, + allow_internal_unstable: false, + span: None, } + }); + let cx = &mut cx; + + macro_rules! check { + ($f: ident, $($e: expr),+; $expect: expr) => ({ + $(assert_eq!($f(&$e), $expect);)+ + }); } - fn cfg(&self) -> ast::CrateConfig { Vec::new() } - fn ident_of(&self, st: &str) -> ast::Ident { - parse::token::str_to_ident(st) - } - fn name_of(&self, st: &str) -> ast::Name { - parse::token::intern(st) - } - fn parse_sess(&self) -> &parse::ParseSess { self } -} -fn main() { - let cx = parse::new_parse_sess(); + let abc = quote_expr!(cx, 23); + check!(expr_to_string, abc, *quote_expr!(cx, $abc); "23"); + + let ty = quote_ty!(cx, isize); + check!(ty_to_string, ty, *quote_ty!(cx, $ty); "isize"); - assert_eq!(pprust::expr_to_string(&*quote_expr!(&cx, 23)), "23"); - assert_eq!(pprust::pat_to_string(&*quote_pat!(&cx, Some(_))), "Some(_)"); - assert_eq!(pprust::ty_to_string(&*quote_ty!(&cx, isize)), "isize"); + let item = quote_item!(cx, static x: $ty = 10;).unwrap(); + check!(item_to_string, item, quote_item!(cx, $item).unwrap(); "static x: isize = 10;"); - let arm = quote_arm!(&cx, (ref x, ref y) => (x, y),); - assert_eq!(pprust::arm_to_string(&arm), " (ref x, ref y) => (x, y),"); + let twenty: u16 = 20; + let stmt = quote_stmt!(cx, let x = $twenty;).unwrap(); + check!(stmt_to_string, stmt, *quote_stmt!(cx, $stmt).unwrap(); "let x = 20u16;"); - let attr = quote_attr!(&cx, #![cfg(foo = "bar")]); - assert_eq!(pprust::attr_to_string(&attr), "#![cfg(foo = \"bar\")]"); + let pat = quote_pat!(cx, Some(_)); + check!(pat_to_string, pat, *quote_pat!(cx, $pat); "Some(_)"); - let item = quote_item!(&cx, static x : isize = 10;).unwrap(); - assert_eq!(pprust::item_to_string(&*item), "static x: isize = 10;"); + let expr = quote_expr!(cx, (x, y)); + let arm = quote_arm!(cx, (ref x, ref y) => $expr,); + check!(arm_to_string, arm, quote_arm!(cx, $arm); " (ref x, ref y) => (x, y),"); - let stmt = quote_stmt!(&cx, let x = 20;).unwrap(); - assert_eq!(pprust::stmt_to_string(&*stmt), "let x = 20;"); + let attr = quote_attr!(cx, #![cfg(foo = "bar")]); + check!(attribute_to_string, attr, quote_attr!(cx, $attr); r#"#![cfg(foo = "bar")]"#); } From ea892dc70b1a773b85410716b5822cc814542ecc Mon Sep 17 00:00:00 2001 From: Geoffry Song Date: Sun, 8 Mar 2015 14:41:08 -0400 Subject: [PATCH 2/3] Remove remaining tests for hygiene-encoded identifiers. Such things no longer exist. --- src/test/auxiliary/macro_crate_test.rs | 26 ----------- .../macro-crate-cannot-read-embedded-ident.rs | 28 ------------ .../cannot-read-embedded-idents/Makefile | 28 ------------ .../create_and_compile.rs | 44 ------------------- 4 files changed, 126 deletions(-) delete mode 100644 src/test/compile-fail-fulldeps/macro-crate-cannot-read-embedded-ident.rs delete mode 100644 src/test/run-make/cannot-read-embedded-idents/Makefile delete mode 100644 src/test/run-make/cannot-read-embedded-idents/create_and_compile.rs diff --git a/src/test/auxiliary/macro_crate_test.rs b/src/test/auxiliary/macro_crate_test.rs index 5b7e52e9164e9..54aac5195aee3 100644 --- a/src/test/auxiliary/macro_crate_test.rs +++ b/src/test/auxiliary/macro_crate_test.rs @@ -32,7 +32,6 @@ macro_rules! unexported_macro { () => (3) } #[plugin_registrar] pub fn plugin_registrar(reg: &mut Registry) { reg.register_macro("make_a_1", expand_make_a_1); - reg.register_macro("forged_ident", expand_forged_ident); reg.register_macro("identity", expand_identity); reg.register_syntax_extension( token::intern("into_foo"), @@ -104,29 +103,4 @@ fn expand_into_foo_multi(cx: &mut ExtCtxt, } } -fn expand_forged_ident(cx: &mut ExtCtxt, sp: Span, tts: &[TokenTree]) -> Box { - use syntax::ext::quote::rt::*; - - if !tts.is_empty() { - cx.span_fatal(sp, "forged_ident takes no arguments"); - } - - // Most of this is modelled after the expansion of the `quote_expr!` - // macro ... - let parse_sess = cx.parse_sess(); - let cfg = cx.cfg(); - - // ... except this is where we inject a forged identifier, - // and deliberately do not call `cx.parse_tts_with_hygiene` - // (because we are testing that this will be *rejected* - // by the default parser). - - let expr = { - let tt = cx.parse_tts("\x00name_2,ctxt_0\x00".to_string()); - let mut parser = new_parser_from_tts(parse_sess, cfg, tt); - parser.parse_expr() - }; - MacEager::expr(expr) -} - pub fn foo() {} diff --git a/src/test/compile-fail-fulldeps/macro-crate-cannot-read-embedded-ident.rs b/src/test/compile-fail-fulldeps/macro-crate-cannot-read-embedded-ident.rs deleted file mode 100644 index fd1deffb59d4d..0000000000000 --- a/src/test/compile-fail-fulldeps/macro-crate-cannot-read-embedded-ident.rs +++ /dev/null @@ -1,28 +0,0 @@ -// Copyright 2014 The Rust Project Developers. See the COPYRIGHT -// file at the top-level directory of this distribution and at -// http://rust-lang.org/COPYRIGHT. -// -// Licensed under the Apache License, Version 2.0 or the MIT license -// , at your -// option. This file may not be copied, modified, or distributed -// except according to those terms. - -// aux-build:macro_crate_test.rs -// ignore-stage1 -// error-pattern: unknown start of token: \u{0} - -// Issue #15750 and #15962 : this test is checking that the standard -// parser rejects embedded idents. pnkfelix did not want to attempt -// to make a test file that itself used the embedded ident input form, -// since he worried that would be difficult to work with in many text -// editors, so instead he made a macro that expands into the embedded -// ident form. - -#![feature(plugin)] -#![plugin(macro_crate_test)] - -fn main() { - let x = 0; - assert_eq!(3, forged_ident!()); -} diff --git a/src/test/run-make/cannot-read-embedded-idents/Makefile b/src/test/run-make/cannot-read-embedded-idents/Makefile deleted file mode 100644 index 0d047be02ca1d..0000000000000 --- a/src/test/run-make/cannot-read-embedded-idents/Makefile +++ /dev/null @@ -1,28 +0,0 @@ --include ../tools.mk - -# Issue #15750, #15962 : This test ensures that our special embedded -# ident syntax hack is not treated as legitimate input by the lexer in -# normal mode. -# -# It is modelled after the `unicode-input/` test, since we need to -# create files with syntax that can trip up normal text editting tools -# (namely text with embedded nul-bytes). - -# This test attempts to run rustc itself from the compiled binary; but -# that means that you need to set the LD_LIBRARY_PATH for rustc itself -# while running create_and_compile, and that won't work for stage1. - -# FIXME ignore windows -ifndef IS_WINDOWS -ifeq ($(RUST_BUILD_STAGE),1) -DOTEST= -else -DOTEST=dotest -endif -endif - -all: $(DOTEST) - -dotest: - $(RUSTC) create_and_compile.rs - $(call RUN,create_and_compile) "$(RUSTC)" "$(TMPDIR)" diff --git a/src/test/run-make/cannot-read-embedded-idents/create_and_compile.rs b/src/test/run-make/cannot-read-embedded-idents/create_and_compile.rs deleted file mode 100644 index fd69d2786b8d0..0000000000000 --- a/src/test/run-make/cannot-read-embedded-idents/create_and_compile.rs +++ /dev/null @@ -1,44 +0,0 @@ -// Copyright 2014 The Rust Project Developers. See the COPYRIGHT -// file at the top-level directory of this distribution and at -// http://rust-lang.org/COPYRIGHT. -// -// Licensed under the Apache License, Version 2.0 or the MIT license -// , at your -// option. This file may not be copied, modified, or distributed -// except according to those terms. - -use std::env; -use std::fs::File; -use std::process::Command; -use std::io::Write; -use std::path::Path; - -// creates broken.rs, which has the Ident \x00name_0,ctxt_0\x00 -// embedded within it, and then attempts to compile broken.rs with the -// provided `rustc` - -fn main() { - let args: Vec = env::args().collect(); - let rustc = &args[1]; - let tmpdir = Path::new(&args[2]); - - let main_file = tmpdir.join("broken.rs"); - let _ = File::create(&main_file).unwrap() - .write_all(b"pub fn main() { - let \x00name_0,ctxt_0\x00 = 3; - println!(\"{}\", \x00name_0,ctxt_0\x00); - }").unwrap(); - - // rustc is passed to us with --out-dir and -L etc., so we - // can't exec it directly - let result = Command::new("sh") - .arg("-c") - .arg(&format!("{} {}", rustc, main_file.display())) - .output().unwrap(); - let err = String::from_utf8_lossy(&result.stderr); - - // positive test so that this test will be updated when the - // compiler changes. - assert!(err.contains("unknown start of token")) -} From 24ef90527351bb1a52c8b54e948cdbba8db3eef6 Mon Sep 17 00:00:00 2001 From: Geoffry Song Date: Sun, 26 Apr 2015 01:09:36 -0400 Subject: [PATCH 3/3] Remove FakeExtCtxt from qquote tests. Instead create an ExtCtxt structure. --- src/test/compile-fail-fulldeps/qquote.rs | 43 +++++++++--------------- src/test/run-fail/qquote.rs | 43 +++++++++--------------- 2 files changed, 30 insertions(+), 56 deletions(-) diff --git a/src/test/compile-fail-fulldeps/qquote.rs b/src/test/compile-fail-fulldeps/qquote.rs index 8b410dc257249..cf68efe5855b7 100644 --- a/src/test/compile-fail-fulldeps/qquote.rs +++ b/src/test/compile-fail-fulldeps/qquote.rs @@ -15,38 +15,25 @@ extern crate syntax; use syntax::ast; -use syntax::codemap; +use syntax::codemap::{self, DUMMY_SP}; use syntax::parse; use syntax::print::pprust; -trait FakeExtCtxt { - fn call_site(&self) -> codemap::Span; - fn cfg(&self) -> ast::CrateConfig; - fn ident_of(&self, st: &str) -> ast::Ident; - fn name_of(&self, st: &str) -> ast::Name; - fn parse_sess(&self) -> &parse::ParseSess; -} - -impl FakeExtCtxt for parse::ParseSess { - fn call_site(&self) -> codemap::Span { - codemap::Span { - lo: codemap::BytePos(0), - hi: codemap::BytePos(0), - expn_id: codemap::NO_EXPANSION, - } - } - fn cfg(&self) -> ast::CrateConfig { Vec::new() } - fn ident_of(&self, st: &str) -> ast::Ident { - parse::token::str_to_ident(st) - } - fn name_of(&self, st: &str) -> ast::Name { - parse::token::intern(st) - } - fn parse_sess(&self) -> &parse::ParseSess { self } -} - fn main() { - let cx = parse::new_parse_sess(); + let ps = syntax::parse::new_parse_sess(); + let mut cx = syntax::ext::base::ExtCtxt::new( + &ps, vec![], + syntax::ext::expand::ExpansionConfig::default("qquote".to_string())); + cx.bt_push(syntax::codemap::ExpnInfo { + call_site: DUMMY_SP, + callee: syntax::codemap::NameAndSpan { + name: "".to_string(), + format: syntax::codemap::MacroBang, + allow_internal_unstable: false, + span: None, + } + }); + let cx = &mut cx; assert_eq!(pprust::expr_to_string(&*quote_expr!(&cx, 23)), "23"); diff --git a/src/test/run-fail/qquote.rs b/src/test/run-fail/qquote.rs index fe582bc9bf780..6ae22392b939e 100644 --- a/src/test/run-fail/qquote.rs +++ b/src/test/run-fail/qquote.rs @@ -17,38 +17,25 @@ extern crate syntax; use syntax::ast; -use syntax::codemap; +use syntax::codemap::{self, DUMMY_SP}; use syntax::parse; use syntax::print::pprust; -trait FakeExtCtxt { - fn call_site(&self) -> codemap::Span; - fn cfg(&self) -> ast::CrateConfig; - fn ident_of(&self, st: &str) -> ast::Ident; - fn name_of(&self, st: &str) -> ast::Name; - fn parse_sess(&self) -> &parse::ParseSess; -} - -impl FakeExtCtxt for parse::ParseSess { - fn call_site(&self) -> codemap::Span { - codemap::Span { - lo: codemap::BytePos(0), - hi: codemap::BytePos(0), - expn_id: codemap::NO_EXPANSION, - } - } - fn cfg(&self) -> ast::CrateConfig { Vec::new() } - fn ident_of(&self, st: &str) -> ast::Ident { - parse::token::str_to_ident(st) - } - fn name_of(&self, st: &str) -> ast::Name { - parse::token::intern(st) - } - fn parse_sess(&self) -> &parse::ParseSess { self } -} - fn main() { - let cx = parse::new_parse_sess(); + let ps = syntax::parse::new_parse_sess(); + let mut cx = syntax::ext::base::ExtCtxt::new( + &ps, vec![], + syntax::ext::expand::ExpansionConfig::default("qquote".to_string())); + cx.bt_push(syntax::codemap::ExpnInfo { + call_site: DUMMY_SP, + callee: syntax::codemap::NameAndSpan { + name: "".to_string(), + format: syntax::codemap::MacroBang, + allow_internal_unstable: false, + span: None, + } + }); + let cx = &mut cx; assert_eq!(pprust::expr_to_string(&*quote_expr!(&cx, 23)), "23");