Skip to content

Commit

Permalink
Move jointness info from TokenStream to Token
Browse files Browse the repository at this point in the history
  • Loading branch information
matklad committed Sep 26, 2019
1 parent b7820b2 commit 42f0795
Show file tree
Hide file tree
Showing 13 changed files with 124 additions and 121 deletions.
11 changes: 10 additions & 1 deletion src/librustc/ich/impls_syntax.rs
Original file line number Diff line number Diff line change
Expand Up @@ -349,9 +349,18 @@ impl<'a> HashStable<StableHashingContext<'a>> for token::TokenKind {
}
}

impl<'a> HashStable<StableHashingContext<'a>> for token::IsJoint {
fn hash_stable<W: StableHasherResult>(&self,
hcx: &mut StableHashingContext<'a>,
hasher: &mut StableHasher<W>) {
mem::discriminant(self).hash_stable(hcx, hasher);
}
}

impl_stable_hash_for!(struct token::Token {
kind,
span
span,
joint
});

impl_stable_hash_for!(enum ::syntax::ast::NestedMetaItem {
Expand Down
20 changes: 10 additions & 10 deletions src/libsyntax/attr/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -478,12 +478,12 @@ impl MetaItem {
let mod_sep_span = Span::new(last_pos,
segment.ident.span.lo(),
segment.ident.span.ctxt());
idents.push(TokenTree::token(token::ModSep, mod_sep_span).into());
idents.push(TokenTree::token(token::ModSep, mod_sep_span));
}
idents.push(TokenTree::Token(Token::from_ast_ident(segment.ident)).into());
idents.push(TokenTree::Token(Token::from_ast_ident(segment.ident)));
last_pos = segment.ident.span.hi();
}
self.node.tokens(self.span).append_to_tree_and_joint_vec(&mut idents);
self.node.tokens(self.span).append_to(&mut idents);
TokenStream::new(idents)
}

Expand All @@ -492,8 +492,8 @@ impl MetaItem {
{
// FIXME: Share code with `parse_path`.
let path = match tokens.next() {
Some(TokenTree::Token(Token { kind: kind @ token::Ident(..), span })) |
Some(TokenTree::Token(Token { kind: kind @ token::ModSep, span })) => 'arm: {
Some(TokenTree::Token(Token { kind: kind @ token::Ident(..), span, .. })) |
Some(TokenTree::Token(Token { kind: kind @ token::ModSep, span, .. })) => 'arm: {
let mut segments = if let token::Ident(name, _) = kind {
if let Some(TokenTree::Token(Token { kind: token::ModSep, .. }))
= tokens.peek() {
Expand All @@ -506,7 +506,7 @@ impl MetaItem {
vec![PathSegment::path_root(span)]
};
loop {
if let Some(TokenTree::Token(Token { kind: token::Ident(name, _), span }))
if let Some(TokenTree::Token(Token { kind: token::Ident(name, _), span, .. }))
= tokens.next() {
segments.push(PathSegment::from_ident(Ident::new(name, span)));
} else {
Expand Down Expand Up @@ -547,17 +547,17 @@ impl MetaItemKind {
match *self {
MetaItemKind::Word => TokenStream::empty(),
MetaItemKind::NameValue(ref lit) => {
let mut vec = vec![TokenTree::token(token::Eq, span).into()];
lit.tokens().append_to_tree_and_joint_vec(&mut vec);
let mut vec = vec![TokenTree::token(token::Eq, span)];
lit.tokens().append_to(&mut vec);
TokenStream::new(vec)
}
MetaItemKind::List(ref list) => {
let mut tokens = Vec::new();
for (i, item) in list.iter().enumerate() {
if i > 0 {
tokens.push(TokenTree::token(token::Comma, span).into());
tokens.push(TokenTree::token(token::Comma, span));
}
item.tokens().append_to_tree_and_joint_vec(&mut tokens);
item.tokens().append_to(&mut tokens);
}
TokenTree::Delimited(
DelimSpan::from_single(span),
Expand Down
6 changes: 4 additions & 2 deletions src/libsyntax/ext/mbe/quoted.rs
Original file line number Diff line number Diff line change
Expand Up @@ -66,7 +66,7 @@ pub(super) fn parse(
match tree {
TokenTree::MetaVar(start_sp, ident) if expect_matchers => {
let span = match trees.next() {
Some(tokenstream::TokenTree::Token(Token { kind: token::Colon, span })) => {
Some(tokenstream::TokenTree::Token(Token { kind: token::Colon, span, .. })) => {
match trees.next() {
Some(tokenstream::TokenTree::Token(token)) => match token.ident() {
Some((kind, _)) => {
Expand Down Expand Up @@ -120,7 +120,9 @@ fn parse_tree(
// Depending on what `tree` is, we could be parsing different parts of a macro
match tree {
// `tree` is a `$` token. Look at the next token in `trees`
tokenstream::TokenTree::Token(Token { kind: token::Dollar, span }) => match trees.next() {
tokenstream::TokenTree::Token(Token {
kind: token::Dollar, span, ..
}) => match trees.next() {
// `tree` is followed by a delimited set of token trees. This indicates the beginning
// of a repetition sequence in the macro (e.g. `$(pat)*`).
Some(tokenstream::TokenTree::Delimited(span, delim, tts)) => {
Expand Down
16 changes: 8 additions & 8 deletions src/libsyntax/ext/mbe/transcribe.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ use crate::ext::mbe;
use crate::ext::mbe::macro_parser::{MatchedNonterminal, MatchedSeq, NamedMatch};
use crate::mut_visit::{self, MutVisitor};
use crate::parse::token::{self, NtTT, Token};
use crate::tokenstream::{DelimSpan, TokenStream, TokenTree, TreeAndJoint};
use crate::tokenstream::{DelimSpan, TokenStream, TokenTree};

use smallvec::{smallvec, SmallVec};

Expand Down Expand Up @@ -118,7 +118,7 @@ pub(super) fn transcribe(
//
// Thus, if we try to pop the `result_stack` and it is empty, we have reached the top-level
// again, and we are done transcribing.
let mut result: Vec<TreeAndJoint> = Vec::new();
let mut result: Vec<TokenTree> = Vec::new();
let mut result_stack = Vec::new();
let mut marker = Marker(cx.current_expansion.id, transparency);

Expand All @@ -138,7 +138,7 @@ pub(super) fn transcribe(
if repeat_idx < repeat_len {
*idx = 0;
if let Some(sep) = sep {
result.push(TokenTree::Token(sep.clone()).into());
result.push(TokenTree::Token(sep.clone()));
}
continue;
}
Expand Down Expand Up @@ -241,11 +241,11 @@ pub(super) fn transcribe(
// (e.g. `$x:tt`), but not when we are matching any other type of token
// tree?
if let NtTT(ref tt) = **nt {
result.push(tt.clone().into());
result.push(tt.clone());
} else {
marker.visit_span(&mut sp);
let token = TokenTree::token(token::Interpolated(nt.clone()), sp);
result.push(token.into());
result.push(token);
}
} else {
// We were unable to descend far enough. This is an error.
Expand All @@ -259,8 +259,8 @@ pub(super) fn transcribe(
// with modified syntax context. (I believe this supports nested macros).
marker.visit_span(&mut sp);
marker.visit_ident(&mut ident);
result.push(TokenTree::token(token::Dollar, sp).into());
result.push(TokenTree::Token(Token::from_ast_ident(ident)).into());
result.push(TokenTree::token(token::Dollar, sp));
result.push(TokenTree::Token(Token::from_ast_ident(ident)));
}
}

Expand All @@ -280,7 +280,7 @@ pub(super) fn transcribe(
mbe::TokenTree::Token(token) => {
let mut tt = TokenTree::Token(token);
marker.visit_tt(&mut tt);
result.push(tt.into());
result.push(tt);
}

// There should be no meta-var declarations in the invocation of a macro.
Expand Down
19 changes: 11 additions & 8 deletions src/libsyntax/ext/proc_macro_server.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ use crate::ast;
use crate::ext::base::ExtCtxt;
use crate::parse::{self, token, ParseSess};
use crate::parse::lexer::comments;
use crate::tokenstream::{self, DelimSpan, IsJoint::*, TokenStream, TreeAndJoint};
use crate::tokenstream::{self, DelimSpan, TokenStream};

use errors::Diagnostic;
use rustc_data_structures::sync::Lrc;
Expand Down Expand Up @@ -44,15 +44,14 @@ impl ToInternal<token::DelimToken> for Delimiter {
}
}

impl FromInternal<(TreeAndJoint, &'_ ParseSess, &'_ mut Vec<Self>)>
impl FromInternal<(tokenstream::TokenTree, &'_ ParseSess, &'_ mut Vec<Self>)>
for TokenTree<Group, Punct, Ident, Literal>
{
fn from_internal(((tree, is_joint), sess, stack): (TreeAndJoint, &ParseSess, &mut Vec<Self>))
fn from_internal((tree, sess, stack): (tokenstream::TokenTree, &ParseSess, &mut Vec<Self>))
-> Self {
use crate::parse::token::*;

let joint = is_joint == Joint;
let Token { kind, span } = match tree {
let Token { kind, span, joint } = match tree {
tokenstream::TokenTree::Delimited(span, delim, tts) => {
let delimiter = Delimiter::from_internal(delim);
return TokenTree::Group(Group {
Expand All @@ -63,6 +62,7 @@ impl FromInternal<(TreeAndJoint, &'_ ParseSess, &'_ mut Vec<Self>)>
}
tokenstream::TokenTree::Token(token) => token,
};
let joint = joint == Joint;

macro_rules! tt {
($ty:ident { $($field:ident $(: $value:expr)*),+ $(,)? }) => (
Expand Down Expand Up @@ -262,8 +262,11 @@ impl ToInternal<TokenStream> for TokenTree<Group, Punct, Ident, Literal> {
_ => unreachable!(),
};

let tree = tokenstream::TokenTree::token(kind, span);
TokenStream::new(vec![(tree, if joint { Joint } else { NonJoint })])
let token = Token::new(kind, span)
.with_joint(if joint { Joint } else { NonJoint });
let tree = tokenstream::TokenTree::Token(token);

TokenStream::new(vec![(tree)])
}
}

Expand Down Expand Up @@ -440,7 +443,7 @@ impl server::TokenStreamIter for Rustc<'_> {
) -> Option<TokenTree<Self::Group, Self::Punct, Self::Ident, Self::Literal>> {
loop {
let tree = iter.stack.pop().or_else(|| {
let next = iter.cursor.next_with_joint()?;
let next = iter.cursor.next()?;
Some(TokenTree::from_internal((next, self.sess, &mut iter.stack)))
})?;
// HACK: The condition "dummy span + group with empty delimiter" represents an AST
Expand Down
4 changes: 2 additions & 2 deletions src/libsyntax/mut_visit.rs
Original file line number Diff line number Diff line change
Expand Up @@ -611,15 +611,15 @@ pub fn noop_visit_tt<T: MutVisitor>(tt: &mut TokenTree, vis: &mut T) {
pub fn noop_visit_tts<T: MutVisitor>(TokenStream(tts): &mut TokenStream, vis: &mut T) {
visit_opt(tts, |tts| {
let tts = Lrc::make_mut(tts);
visit_vec(tts, |(tree, _is_joint)| vis.visit_tt(tree));
visit_vec(tts, |tree| vis.visit_tt(tree));
})
}

// Applies ident visitor if it's an ident; applies other visits to interpolated nodes.
// In practice the ident part is not actually used by specific visitors right now,
// but there's a test below checking that it works.
pub fn noop_visit_token<T: MutVisitor>(t: &mut Token, vis: &mut T) {
let Token { kind, span } = t;
let Token { kind, span, .. } = t;
match kind {
token::Ident(name, _) | token::Lifetime(name) => {
let mut ident = Ident::new(*name, *span);
Expand Down
24 changes: 12 additions & 12 deletions src/libsyntax/parse/lexer/tokentrees.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,9 +2,9 @@ use syntax_pos::Span;

use crate::print::pprust::token_to_string;
use crate::parse::lexer::{StringReader, UnmatchedBrace};
use crate::parse::token::{self, Token};
use crate::parse::token::{self, Token, IsJoint::{self, *}};
use crate::parse::PResult;
use crate::tokenstream::{DelimSpan, IsJoint::{self, *}, TokenStream, TokenTree, TreeAndJoint};
use crate::tokenstream::{DelimSpan, TokenStream, TokenTree};

impl<'a> StringReader<'a> {
crate fn into_token_trees(self) -> (PResult<'a, TokenStream>, Vec<UnmatchedBrace>) {
Expand Down Expand Up @@ -67,7 +67,7 @@ impl<'a> TokenTreesReader<'a> {
}
}

fn parse_token_tree(&mut self) -> PResult<'a, TreeAndJoint> {
fn parse_token_tree(&mut self) -> PResult<'a, TokenTree> {
let sm = self.string_reader.sess.source_map();
match self.token.kind {
token::Eof => {
Expand Down Expand Up @@ -191,7 +191,7 @@ impl<'a> TokenTreesReader<'a> {
delim_span,
delim,
tts.into()
).into())
))
},
token::CloseDelim(_) => {
// An unexpected closing delimiter (i.e., there is no
Expand All @@ -204,10 +204,10 @@ impl<'a> TokenTreesReader<'a> {
Err(err)
},
_ => {
let tt = TokenTree::Token(self.token.take());
let token = self.token.take();
self.real_token();
let is_joint = self.joint_to_prev == Joint && self.token.is_op();
Ok((tt, if is_joint { Joint } else { NonJoint }))
let is_joint = self.joint_to_prev == Joint && token.is_op() && self.token.is_op();
Ok(TokenTree::Token(token.with_joint(if is_joint { Joint } else { NonJoint })))
}
}
}
Expand All @@ -231,21 +231,21 @@ impl<'a> TokenTreesReader<'a> {

#[derive(Default)]
struct TokenStreamBuilder {
buf: Vec<TreeAndJoint>,
buf: Vec<TokenTree>,
}

impl TokenStreamBuilder {
fn push(&mut self, (tree, joint): TreeAndJoint) {
if let Some((TokenTree::Token(prev_token), Joint)) = self.buf.last() {
fn push(&mut self, tree: TokenTree) {
if let Some(TokenTree::Token(prev_token)) = self.buf.last() {
if let TokenTree::Token(token) = &tree {
if let Some(glued) = prev_token.glue(token) {
self.buf.pop();
self.buf.push((TokenTree::Token(glued), joint));
self.buf.push(TokenTree::Token(glued));
return;
}
}
}
self.buf.push((tree, joint))
self.buf.push(tree)
}

fn into_token_stream(self) -> TokenStream {
Expand Down
14 changes: 7 additions & 7 deletions src/libsyntax/parse/parser.rs
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ use crate::print::pprust;
use crate::ptr::P;
use crate::source_map::{self, respan};
use crate::symbol::{kw, sym, Symbol};
use crate::tokenstream::{self, DelimSpan, TokenTree, TokenStream, TreeAndJoint};
use crate::tokenstream::{self, DelimSpan, TokenTree, TokenStream};
use crate::ThinVec;

use errors::{Applicability, DiagnosticId, FatalError};
Expand Down Expand Up @@ -195,8 +195,8 @@ struct TokenCursorFrame {
/// on the parser.
#[derive(Clone)]
crate enum LastToken {
Collecting(Vec<TreeAndJoint>),
Was(Option<TreeAndJoint>),
Collecting(Vec<TokenTree>),
Was(Option<TokenTree>),
}

impl TokenCursorFrame {
Expand Down Expand Up @@ -231,8 +231,8 @@ impl TokenCursor {
};

match self.frame.last_token {
LastToken::Collecting(ref mut v) => v.push(tree.clone().into()),
LastToken::Was(ref mut t) => *t = Some(tree.clone().into()),
LastToken::Collecting(ref mut v) => v.push(tree.clone()),
LastToken::Was(ref mut t) => *t = Some(tree.clone()),
}

match tree {
Expand All @@ -247,7 +247,7 @@ impl TokenCursor {

fn next_desugared(&mut self) -> Token {
let (name, sp) = match self.next() {
Token { kind: token::DocComment(name), span } => (name, span),
Token { kind: token::DocComment(name), span, .. } => (name, span),
tok => return tok,
};

Expand Down Expand Up @@ -1173,7 +1173,7 @@ impl<'a> Parser<'a> {
loop {
match self.token.kind {
token::Eof | token::CloseDelim(..) => break,
_ => result.push(self.parse_token_tree().into()),
_ => result.push(self.parse_token_tree()),
}
}
TokenStream::new(result)
Expand Down
Loading

0 comments on commit 42f0795

Please sign in to comment.