Skip to content

Commit

Permalink
Auto merge of #36969 - nnethercote:rename-Parser-fields, r=eddyb
Browse files Browse the repository at this point in the history
Clarify the positions of the lexer and parser

The lexer and parser use unclear names to indicate their positions in the
source code. I propose the following renamings.

Lexer:
```
pos      -> next_pos      # it's actually the next pos!
last_pos -> pos           # it's actually the current pos!
curr     -> ch            # the current char
curr_is  -> ch_is         # tests the current char
col (unchanged)           # the current column
```
parser
```
- last_span       -> prev_span          # the previous token's span
- last_token_kind -> prev_token_kind    # the previous token's kind
- LastTokenKind   -> PrevTokenKind      # ditto (but the type)
- token (unchanged)                     # the current token
- span (unchanged)                      # the current span
```

Things to note:
- This proposal removes all uses of "last", which is an unclear word because it
  could mean (a) previous, (b) final, or (c) most recent, i.e. current.
- The "current" things (ch, col, token, span) consistently lack a prefix. The
  "previous" and "next" things consistently have a prefix.
  • Loading branch information
bors authored Oct 18, 2016
2 parents 1d3dfa5 + 94b3659 commit 3543a0f
Show file tree
Hide file tree
Showing 9 changed files with 414 additions and 415 deletions.
2 changes: 1 addition & 1 deletion src/librustc_metadata/creader.rs
Original file line number Diff line number Diff line change
Expand Up @@ -582,7 +582,7 @@ impl<'a> CrateReader<'a> {
unreachable!();
}
};
let local_span = mk_sp(lo, p.last_span.hi);
let local_span = mk_sp(lo, p.prev_span.hi);

// Mark the attrs as used
for attr in &def.attrs {
Expand Down
10 changes: 5 additions & 5 deletions src/librustc_save_analysis/span_utils.rs
Original file line number Diff line number Diff line change
Expand Up @@ -139,9 +139,9 @@ impl<'a> SpanUtils<'a> {
let mut prev = toks.real_token();
let mut result = None;
let mut bracket_count = 0;
let mut last_span = None;
let mut prev_span = None;
while prev.tok != token::Eof {
last_span = None;
prev_span = None;
let mut next = toks.real_token();

if (next.tok == token::OpenDelim(token::Paren) || next.tok == token::Lt) &&
Expand All @@ -166,12 +166,12 @@ impl<'a> SpanUtils<'a> {
};

if prev.tok.is_ident() && bracket_count == 0 {
last_span = Some(prev.sp);
prev_span = Some(prev.sp);
}
prev = next;
}
if result.is_none() && last_span.is_some() {
return self.make_sub_span(span, last_span);
if result.is_none() && prev_span.is_some() {
return self.make_sub_span(span, prev_span);
}
return self.make_sub_span(span, result);
}
Expand Down
6 changes: 3 additions & 3 deletions src/libsyntax/codemap.rs
Original file line number Diff line number Diff line change
Expand Up @@ -804,7 +804,7 @@ impl CodeMap {
}

pub fn macro_backtrace(&self, span: Span) -> Vec<MacroBacktrace> {
let mut last_span = DUMMY_SP;
let mut prev_span = DUMMY_SP;
let mut span = span;
let mut result = vec![];
loop {
Expand All @@ -827,14 +827,14 @@ impl CodeMap {
None => break,
Some((call_site, macro_decl_name, def_site_span)) => {
// Don't print recursive invocations
if !call_site.source_equal(&last_span) {
if !call_site.source_equal(&prev_span) {
result.push(MacroBacktrace {
call_site: call_site,
macro_decl_name: macro_decl_name,
def_site_span: def_site_span,
});
}
last_span = span;
prev_span = span;
span = call_site;
}
}
Expand Down
12 changes: 6 additions & 6 deletions src/libsyntax/parse/attr.rs
Original file line number Diff line number Diff line change
Expand Up @@ -126,7 +126,7 @@ impl<'a> Parser<'a> {
self.expect(&token::OpenDelim(token::Bracket))?;
let meta_item = self.parse_meta_item()?;
self.expect(&token::CloseDelim(token::Bracket))?;
let hi = self.last_span.hi;
let hi = self.prev_span.hi;

(mk_sp(lo, hi), meta_item, style)
}
Expand Down Expand Up @@ -231,16 +231,16 @@ impl<'a> Parser<'a> {
token::Eq => {
self.bump();
let lit = self.parse_unsuffixed_lit()?;
let hi = self.last_span.hi;
let hi = self.prev_span.hi;
Ok(P(spanned(lo, hi, ast::MetaItemKind::NameValue(name, lit))))
}
token::OpenDelim(token::Paren) => {
let inner_items = self.parse_meta_seq()?;
let hi = self.last_span.hi;
let hi = self.prev_span.hi;
Ok(P(spanned(lo, hi, ast::MetaItemKind::List(name, inner_items))))
}
_ => {
let hi = self.last_span.hi;
let hi = self.prev_span.hi;
Ok(P(spanned(lo, hi, ast::MetaItemKind::Word(name))))
}
}
Expand All @@ -253,14 +253,14 @@ impl<'a> Parser<'a> {

match self.parse_unsuffixed_lit() {
Ok(lit) => {
return Ok(spanned(lo, self.last_span.hi, ast::NestedMetaItemKind::Literal(lit)))
return Ok(spanned(lo, self.prev_span.hi, ast::NestedMetaItemKind::Literal(lit)))
}
Err(ref mut err) => self.diagnostic().cancel(err)
}

match self.parse_meta_item() {
Ok(mi) => {
return Ok(spanned(lo, self.last_span.hi, ast::NestedMetaItemKind::MetaItem(mi)))
return Ok(spanned(lo, self.prev_span.hi, ast::NestedMetaItemKind::MetaItem(mi)))
}
Err(ref mut err) => self.diagnostic().cancel(err)
}
Expand Down
40 changes: 20 additions & 20 deletions src/libsyntax/parse/lexer/comments.rs
Original file line number Diff line number Diff line change
Expand Up @@ -149,13 +149,13 @@ fn push_blank_line_comment(rdr: &StringReader, comments: &mut Vec<Comment>) {
comments.push(Comment {
style: BlankLine,
lines: Vec::new(),
pos: rdr.last_pos,
pos: rdr.pos,
});
}

fn consume_whitespace_counting_blank_lines(rdr: &mut StringReader, comments: &mut Vec<Comment>) {
while is_pattern_whitespace(rdr.curr) && !rdr.is_eof() {
if rdr.col == CharPos(0) && rdr.curr_is('\n') {
while is_pattern_whitespace(rdr.ch) && !rdr.is_eof() {
if rdr.col == CharPos(0) && rdr.ch_is('\n') {
push_blank_line_comment(rdr, &mut *comments);
}
rdr.bump();
Expand All @@ -167,7 +167,7 @@ fn read_shebang_comment(rdr: &mut StringReader,
code_to_the_left: bool,
comments: &mut Vec<Comment>) {
debug!(">>> shebang comment");
let p = rdr.last_pos;
let p = rdr.pos;
debug!("<<< shebang comment");
comments.push(Comment {
style: if code_to_the_left { Trailing } else { Isolated },
Expand All @@ -180,9 +180,9 @@ fn read_line_comments(rdr: &mut StringReader,
code_to_the_left: bool,
comments: &mut Vec<Comment>) {
debug!(">>> line comments");
let p = rdr.last_pos;
let p = rdr.pos;
let mut lines: Vec<String> = Vec::new();
while rdr.curr_is('/') && rdr.nextch_is('/') {
while rdr.ch_is('/') && rdr.nextch_is('/') {
let line = rdr.read_one_line_comment();
debug!("{}", line);
// Doc comments are not put in comments.
Expand Down Expand Up @@ -240,7 +240,7 @@ fn read_block_comment(rdr: &mut StringReader,
code_to_the_left: bool,
comments: &mut Vec<Comment>) {
debug!(">>> block comment");
let p = rdr.last_pos;
let p = rdr.pos;
let mut lines: Vec<String> = Vec::new();
let col = rdr.col;
rdr.bump();
Expand All @@ -249,9 +249,9 @@ fn read_block_comment(rdr: &mut StringReader,
let mut curr_line = String::from("/*");

// doc-comments are not really comments, they are attributes
if (rdr.curr_is('*') && !rdr.nextch_is('*')) || rdr.curr_is('!') {
while !(rdr.curr_is('*') && rdr.nextch_is('/')) && !rdr.is_eof() {
curr_line.push(rdr.curr.unwrap());
if (rdr.ch_is('*') && !rdr.nextch_is('*')) || rdr.ch_is('!') {
while !(rdr.ch_is('*') && rdr.nextch_is('/')) && !rdr.is_eof() {
curr_line.push(rdr.ch.unwrap());
rdr.bump();
}
if !rdr.is_eof() {
Expand All @@ -271,19 +271,19 @@ fn read_block_comment(rdr: &mut StringReader,
if rdr.is_eof() {
panic!(rdr.fatal("unterminated block comment"));
}
if rdr.curr_is('\n') {
if rdr.ch_is('\n') {
trim_whitespace_prefix_and_push_line(&mut lines, curr_line, col);
curr_line = String::new();
rdr.bump();
} else {
curr_line.push(rdr.curr.unwrap());
if rdr.curr_is('/') && rdr.nextch_is('*') {
curr_line.push(rdr.ch.unwrap());
if rdr.ch_is('/') && rdr.nextch_is('*') {
rdr.bump();
rdr.bump();
curr_line.push('*');
level += 1;
} else {
if rdr.curr_is('*') && rdr.nextch_is('/') {
if rdr.ch_is('*') && rdr.nextch_is('/') {
rdr.bump();
rdr.bump();
curr_line.push('/');
Expand All @@ -305,7 +305,7 @@ fn read_block_comment(rdr: &mut StringReader,
Isolated
};
rdr.consume_non_eol_whitespace();
if !rdr.is_eof() && !rdr.curr_is('\n') && lines.len() == 1 {
if !rdr.is_eof() && !rdr.ch_is('\n') && lines.len() == 1 {
style = Mixed;
}
debug!("<<< block comment");
Expand All @@ -319,11 +319,11 @@ fn read_block_comment(rdr: &mut StringReader,

fn consume_comment(rdr: &mut StringReader, code_to_the_left: bool, comments: &mut Vec<Comment>) {
debug!(">>> consume comment");
if rdr.curr_is('/') && rdr.nextch_is('/') {
if rdr.ch_is('/') && rdr.nextch_is('/') {
read_line_comments(rdr, code_to_the_left, comments);
} else if rdr.curr_is('/') && rdr.nextch_is('*') {
} else if rdr.ch_is('/') && rdr.nextch_is('*') {
read_block_comment(rdr, code_to_the_left, comments);
} else if rdr.curr_is('#') && rdr.nextch_is('!') {
} else if rdr.ch_is('#') && rdr.nextch_is('!') {
read_shebang_comment(rdr, code_to_the_left, comments);
} else {
panic!();
Expand Down Expand Up @@ -357,7 +357,7 @@ pub fn gather_comments_and_literals(span_diagnostic: &errors::Handler,
loop {
let mut code_to_the_left = !first_read;
rdr.consume_non_eol_whitespace();
if rdr.curr_is('\n') {
if rdr.ch_is('\n') {
code_to_the_left = false;
consume_whitespace_counting_blank_lines(&mut rdr, &mut comments);
}
Expand All @@ -369,7 +369,7 @@ pub fn gather_comments_and_literals(span_diagnostic: &errors::Handler,
}


let bstart = rdr.last_pos;
let bstart = rdr.pos;
rdr.next_token();
// discard, and look ahead; we're working with internal state
let TokenAndSpan { tok, sp } = rdr.peek();
Expand Down
Loading

0 comments on commit 3543a0f

Please sign in to comment.