Skip to content

Commit

Permalink
Emit non-logical newlines for "empty" lines (#4444)
Browse files Browse the repository at this point in the history
  • Loading branch information
charliermarsh authored May 16, 2023
1 parent 8134ec2 commit f0465bf
Show file tree
Hide file tree
Showing 11 changed files with 53 additions and 160 deletions.
12 changes: 6 additions & 6 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

8 changes: 4 additions & 4 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -31,10 +31,10 @@ proc-macro2 = { version = "1.0.51" }
quote = { version = "1.0.23" }
regex = { version = "1.7.1" }
rustc-hash = { version = "1.1.0" }
ruff_text_size = { git = "https://github.com/RustPython/Parser.git", rev = "27e3873dc2a3b0d652cc106bc9ddcede4b01806d" }
rustpython-format = { git = "https://github.com/RustPython/Parser.git", rev = "27e3873dc2a3b0d652cc106bc9ddcede4b01806d" }
rustpython-literal = { git = "https://github.com/RustPython/Parser.git", rev = "27e3873dc2a3b0d652cc106bc9ddcede4b01806d" }
rustpython-parser = { git = "https://github.com/RustPython/Parser.git", rev = "27e3873dc2a3b0d652cc106bc9ddcede4b01806d", default-features = false, features = ["full-lexer", "all-nodes-with-ranges"] }
ruff_text_size = { git = "https://github.com/RustPython/Parser.git", rev = "735c06d5f43da05d191f32442224f082f1d202ee" }
rustpython-format = { git = "https://github.com/RustPython/Parser.git", rev = "735c06d5f43da05d191f32442224f082f1d202ee" }
rustpython-literal = { git = "https://github.com/RustPython/Parser.git", rev = "735c06d5f43da05d191f32442224f082f1d202ee" }
rustpython-parser = { git = "https://github.com/RustPython/Parser.git", rev = "735c06d5f43da05d191f32442224f082f1d202ee", default-features = false, features = ["full-lexer", "all-nodes-with-ranges"] }
schemars = { version = "0.8.12" }
serde = { version = "1.0.152", features = ["derive"] }
serde_json = { version = "1.0.93", features = ["preserve_order"] }
Expand Down
12 changes: 8 additions & 4 deletions crates/ruff/src/checkers/logical_lines.rs
Original file line number Diff line number Diff line change
Expand Up @@ -168,7 +168,8 @@ mod tests {
let contents = r#"
x = 1
y = 2
z = x + 1"#;
z = x + 1"#
.trim();
let lxr: Vec<LexResult> = lexer::lex(contents, Mode::Module).collect();
let locator = Locator::new(contents);
let actual: Vec<String> = LogicalLines::from_tokens(&lxr, &locator)
Expand All @@ -189,7 +190,8 @@ x = [
3,
]
y = 2
z = x + 1"#;
z = x + 1"#
.trim();
let lxr: Vec<LexResult> = lexer::lex(contents, Mode::Module).collect();
let locator = Locator::new(contents);
let actual: Vec<String> = LogicalLines::from_tokens(&lxr, &locator)
Expand All @@ -216,7 +218,8 @@ z = x + 1"#;
let contents = r#"
def f():
x = 1
f()"#;
f()"#
.trim();
let lxr: Vec<LexResult> = lexer::lex(contents, Mode::Module).collect();
let locator = Locator::new(contents);
let actual: Vec<String> = LogicalLines::from_tokens(&lxr, &locator)
Expand All @@ -231,7 +234,8 @@ def f():
"""Docstring goes here."""
# Comment goes here.
x = 1
f()"#;
f()"#
.trim();
let lxr: Vec<LexResult> = lexer::lex(contents, Mode::Module).collect();
let locator = Locator::new(contents);
let actual: Vec<String> = LogicalLines::from_tokens(&lxr, &locator)
Expand Down
21 changes: 6 additions & 15 deletions crates/ruff/src/doc_lines.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@

use std::iter::FusedIterator;

use ruff_text_size::{TextRange, TextSize};
use ruff_text_size::TextSize;
use rustpython_parser::ast::{self, Constant, Expr, Ranged, Stmt, Suite};
use rustpython_parser::lexer::LexResult;
use rustpython_parser::Tok;
Expand All @@ -13,24 +13,19 @@ use ruff_python_ast::source_code::Locator;
use ruff_python_ast::statement_visitor::{walk_stmt, StatementVisitor};

/// Extract doc lines (standalone comments) from a token sequence.
pub(crate) fn doc_lines_from_tokens<'a>(
lxr: &'a [LexResult],
locator: &'a Locator<'a>,
) -> DocLines<'a> {
DocLines::new(lxr, locator)
pub(crate) fn doc_lines_from_tokens(lxr: &[LexResult]) -> DocLines {
DocLines::new(lxr)
}

pub(crate) struct DocLines<'a> {
inner: std::iter::Flatten<core::slice::Iter<'a, LexResult>>,
locator: &'a Locator<'a>,
prev: TextSize,
}

impl<'a> DocLines<'a> {
fn new(lxr: &'a [LexResult], locator: &'a Locator) -> Self {
fn new(lxr: &'a [LexResult]) -> Self {
Self {
inner: lxr.iter().flatten(),
locator,
prev: TextSize::default(),
}
}
Expand All @@ -46,15 +41,11 @@ impl Iterator for DocLines<'_> {

match tok {
Tok::Comment(..) => {
if at_start_of_line
|| self
.locator
.contains_line_break(TextRange::new(self.prev, range.start()))
{
if at_start_of_line {
break Some(range.start());
}
}
Tok::Newline => {
Tok::Newline | Tok::NonLogicalNewline => {
at_start_of_line = true;
}
Tok::Indent | Tok::Dedent => {
Expand Down
2 changes: 1 addition & 1 deletion crates/ruff/src/linter.rs
Original file line number Diff line number Diff line change
Expand Up @@ -88,7 +88,7 @@ pub fn check_path(
let use_doc_lines = settings.rules.enabled(Rule::DocLineTooLong);
let mut doc_lines = vec![];
if use_doc_lines {
doc_lines.extend(doc_lines_from_tokens(&tokens, locator));
doc_lines.extend(doc_lines_from_tokens(&tokens));
}

// Run the token-based rules.
Expand Down
20 changes: 13 additions & 7 deletions crates/ruff/src/rules/flake8_todos/rules.rs
Original file line number Diff line number Diff line change
Expand Up @@ -309,16 +309,22 @@ pub(crate) fn todos(tokens: &[LexResult], settings: &Settings) -> Vec<Diagnostic
// TD003
let mut has_issue_link = false;
while let Some((token, token_range)) = iter.peek() {
if let Tok::Comment(comment) = token {
if detect_tag(comment, token_range.start()).is_some() {
break;
match token {
Tok::Comment(comment) => {
if detect_tag(comment, token_range.start()).is_some() {
break;
}
if ISSUE_LINK_REGEX_SET.is_match(comment) {
has_issue_link = true;
break;
}
}
Tok::Newline | Tok::NonLogicalNewline => {
continue;
}
if ISSUE_LINK_REGEX_SET.is_match(comment) {
has_issue_link = true;
_ => {
break;
}
} else {
break;
}
}
if !has_issue_link {
Expand Down
21 changes: 1 addition & 20 deletions crates/ruff/src/rules/pycodestyle/rules/logical_lines/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -89,8 +89,7 @@ impl<'a> LogicalLines<'a> {
let mut builder = LogicalLinesBuilder::with_capacity(tokens.len());
let mut parens: u32 = 0;

let mut iter = tokens.iter().flatten().peekable();
while let Some((token, range)) = iter.next() {
for (token, range) in tokens.iter().flatten() {
let token_kind = TokenKind::from_token(token);
builder.push_token(token_kind, *range);

Expand All @@ -101,24 +100,6 @@ impl<'a> LogicalLines<'a> {
TokenKind::Rbrace | TokenKind::Rpar | TokenKind::Rsqb => {
parens -= 1;
}
TokenKind::Comment if parens == 0 => {
// If a comment is followed by a newline, ignore it, and we'll build the line
// when we process the newline. Otherwise, we'll end up creating one logical
// line here, and then another, empty logical line when we process the newline.
//
// The lexer will always emit a newline after a comment _unless_ the comment
// appears at the start of a logical line.
if let Some((token, ..)) = iter.peek() {
let token_kind = TokenKind::from_token(token);
if matches!(
token_kind,
TokenKind::Newline | TokenKind::NonLogicalNewline
) {
continue;
}
}
builder.finish_line();
}
TokenKind::Newline | TokenKind::NonLogicalNewline if parens == 0 => {
builder.finish_line();
}
Expand Down
8 changes: 4 additions & 4 deletions crates/ruff_python_ast/src/source_code/indexer.rs
Original file line number Diff line number Diff line change
Expand Up @@ -35,17 +35,17 @@ impl Indexer {

// Get the trivia between the previous and the current token and detect any newlines.
// This is necessary because `RustPython` doesn't emit `[Tok::Newline]` tokens
// between any two tokens that form a continuation nor multiple newlines in a row.
// That's why we have to extract the newlines "manually".
// between any two tokens that form a continuation. That's why we have to extract the
// newlines "manually".
for (index, text) in trivia.match_indices(['\n', '\r']) {
if text == "\r" && trivia.as_bytes().get(index + 1) == Some(&b'\n') {
continue;
}

// Newlines after a comment or new-line never form a continuation.
// Newlines after a newline never form a continuation.
if !matches!(
prev_token,
Some(Tok::Newline | Tok::NonLogicalNewline | Tok::Comment(..)) | None
Some(Tok::Newline | Tok::NonLogicalNewline) | None
) {
continuation_lines.push(line_start);
}
Expand Down
2 changes: 1 addition & 1 deletion crates/ruff_python_formatter/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ pub fn fmt(contents: &str) -> Result<Formatted<ASTFormatContext>> {
let tokens: Vec<LexResult> = ruff_rustpython::tokenize(contents);

// Extract trivia.
let trivia = trivia::extract_trivia_tokens(&tokens, contents);
let trivia = trivia::extract_trivia_tokens(&tokens);

// Parse the AST.
let python_ast = ruff_rustpython::parse_program_tokens(tokens, "<filename>")?;
Expand Down

This file was deleted.

Loading

0 comments on commit f0465bf

Please sign in to comment.