Skip to content

Commit

Permalink
Emit non-logical newlines for "empty" lines
Browse files Browse the repository at this point in the history
  • Loading branch information
charliermarsh committed May 15, 2023
1 parent 8f3f8d3 commit 164f321
Show file tree
Hide file tree
Showing 9 changed files with 41 additions and 133 deletions.
12 changes: 6 additions & 6 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

8 changes: 4 additions & 4 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -31,10 +31,10 @@ proc-macro2 = { version = "1.0.51" }
quote = { version = "1.0.23" }
regex = { version = "1.7.1" }
rustc-hash = { version = "1.1.0" }
ruff_text_size = { git = "https://github.com/RustPython/Parser.git", rev = "a983f4383fb1ad8c1c66acb1d5b0016e59f95a49" }
rustpython-format = { git = "https://github.com/RustPython/Parser.git", rev = "a983f4383fb1ad8c1c66acb1d5b0016e59f95a49" }
rustpython-literal = { git = "https://github.com/RustPython/Parser.git", rev = "a983f4383fb1ad8c1c66acb1d5b0016e59f95a49" }
rustpython-parser = { git = "https://github.com/RustPython/Parser.git", rev = "a983f4383fb1ad8c1c66acb1d5b0016e59f95a49" , default-features = false}
ruff_text_size = { git = "https://github.com/astral-sh/RustPython-Parser.git", rev = "66ccbc8d00a1ff81f4fe72bbcfe789a429453aeb" }
rustpython-format = { git = "https://github.com/astral-sh/RustPython-Parser.git", rev = "66ccbc8d00a1ff81f4fe72bbcfe789a429453aeb" }
rustpython-literal = { git = "https://github.com/astral-sh/RustPython-Parser.git", rev = "66ccbc8d00a1ff81f4fe72bbcfe789a429453aeb" }
rustpython-parser = { git = "https://github.com/astral-sh/RustPython-Parser.git", rev = "66ccbc8d00a1ff81f4fe72bbcfe789a429453aeb" , default-features = false}
schemars = { version = "0.8.12" }
serde = { version = "1.0.152", features = ["derive"] }
serde_json = { version = "1.0.93", features = ["preserve_order"] }
Expand Down
21 changes: 6 additions & 15 deletions crates/ruff/src/doc_lines.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@

use std::iter::FusedIterator;

use ruff_text_size::{TextRange, TextSize};
use ruff_text_size::TextSize;
use rustpython_parser::ast::{self, Constant, ExprKind, Stmt, StmtKind, Suite};
use rustpython_parser::lexer::LexResult;
use rustpython_parser::Tok;
Expand All @@ -13,24 +13,19 @@ use ruff_python_ast::source_code::Locator;
use ruff_python_ast::statement_visitor::{walk_stmt, StatementVisitor};

/// Extract doc lines (standalone comments) from a token sequence.
pub(crate) fn doc_lines_from_tokens<'a>(
lxr: &'a [LexResult],
locator: &'a Locator<'a>,
) -> DocLines<'a> {
DocLines::new(lxr, locator)
pub(crate) fn doc_lines_from_tokens(lxr: &[LexResult]) -> DocLines {
DocLines::new(lxr)
}

pub(crate) struct DocLines<'a> {
inner: std::iter::Flatten<core::slice::Iter<'a, LexResult>>,
locator: &'a Locator<'a>,
prev: TextSize,
}

impl<'a> DocLines<'a> {
fn new(lxr: &'a [LexResult], locator: &'a Locator) -> Self {
fn new(lxr: &'a [LexResult]) -> Self {
Self {
inner: lxr.iter().flatten(),
locator,
prev: TextSize::default(),
}
}
Expand All @@ -46,15 +41,11 @@ impl Iterator for DocLines<'_> {

match tok {
Tok::Comment(..) => {
if at_start_of_line
|| self
.locator
.contains_line_break(TextRange::new(self.prev, range.start()))
{
if at_start_of_line {
break Some(range.start());
}
}
Tok::Newline => {
Tok::Newline | Tok::NonLogicalNewline => {
at_start_of_line = true;
}
Tok::Indent | Tok::Dedent => {
Expand Down
2 changes: 1 addition & 1 deletion crates/ruff/src/linter.rs
Original file line number Diff line number Diff line change
Expand Up @@ -88,7 +88,7 @@ pub fn check_path(
let use_doc_lines = settings.rules.enabled(Rule::DocLineTooLong);
let mut doc_lines = vec![];
if use_doc_lines {
doc_lines.extend(doc_lines_from_tokens(&tokens, locator));
doc_lines.extend(doc_lines_from_tokens(&tokens));
}

// Run the token-based rules.
Expand Down
20 changes: 13 additions & 7 deletions crates/ruff/src/rules/flake8_todos/rules.rs
Original file line number Diff line number Diff line change
Expand Up @@ -309,16 +309,22 @@ pub(crate) fn todos(tokens: &[LexResult], settings: &Settings) -> Vec<Diagnostic
// TD003
let mut has_issue_link = false;
while let Some((token, token_range)) = iter.peek() {
if let Tok::Comment(comment) = token {
if detect_tag(comment, token_range.start()).is_some() {
break;
match token {
Tok::Comment(comment) => {
if detect_tag(comment, token_range.start()).is_some() {
break;
}
if ISSUE_LINK_REGEX_SET.is_match(comment) {
has_issue_link = true;
break;
}
}
Tok::Newline | Tok::NonLogicalNewline => {
continue;
}
if ISSUE_LINK_REGEX_SET.is_match(comment) {
has_issue_link = true;
_ => {
break;
}
} else {
break;
}
}
if !has_issue_link {
Expand Down
2 changes: 1 addition & 1 deletion crates/ruff_python_ast/src/source_code/indexer.rs
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,7 @@ impl Indexer {
// Newlines after a comment or new-line never form a continuation.
if !matches!(
prev_token,
Some(Tok::Newline | Tok::NonLogicalNewline | Tok::Comment(..)) | None
Some(Tok::Newline | Tok::NonLogicalNewline) | None
) {
continuation_lines.push(line_start);
}
Expand Down
2 changes: 1 addition & 1 deletion crates/ruff_python_formatter/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ pub fn fmt(contents: &str) -> Result<Formatted<ASTFormatContext>> {
let tokens: Vec<LexResult> = ruff_rustpython::tokenize(contents);

// Extract trivia.
let trivia = trivia::extract_trivia_tokens(&tokens, contents);
let trivia = trivia::extract_trivia_tokens(&tokens);

// Parse the AST.
let python_ast = ruff_rustpython::parse_program_tokens(tokens, "<filename>")?;
Expand Down

This file was deleted.

45 changes: 9 additions & 36 deletions crates/ruff_python_formatter/src/trivia.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,6 @@ use ruff_text_size::{TextRange, TextSize};
use rustc_hash::FxHashMap;
use rustpython_parser::lexer::LexResult;
use rustpython_parser::Tok;
use std::ops::Add;

use crate::cst::{
Alias, Arg, Body, BoolOp, CmpOp, Excepthandler, ExcepthandlerKind, Expr, ExprKind, Keyword,
Expand Down Expand Up @@ -190,49 +189,25 @@ impl Trivia {
}
}

pub fn extract_trivia_tokens(lxr: &[LexResult], text: &str) -> Vec<TriviaToken> {
pub fn extract_trivia_tokens(lxr: &[LexResult]) -> Vec<TriviaToken> {
let mut tokens = vec![];
let mut prev_end = TextSize::default();
let mut prev_tok: Option<(&Tok, TextRange)> = None;
let mut prev_semantic_tok: Option<(&Tok, TextRange)> = None;
let mut parens = vec![];

for (tok, range) in lxr.iter().flatten() {
// Add empty lines.
let trivia = &text[TextRange::new(prev_end, range.start())];
let bytes = trivia.as_bytes();

let mut bytes_iter = bytes.iter().enumerate();

let mut after_new_line =
matches!(prev_tok, Some((Tok::Newline | Tok::NonLogicalNewline, _)));

while let Some((index, byte)) = bytes_iter.next() {
let len = match byte {
b'\r' if bytes.get(index + 1) == Some(&b'\n') => {
bytes_iter.next();
TextSize::from(2)
}
b'\n' | b'\r' => TextSize::from(1),
_ => {
// Must be whitespace or the parser would generate a token
continue;
}
};
let after_new_line = matches!(prev_tok, Some((Tok::Newline | Tok::NonLogicalNewline, _)));

if after_new_line {
let new_line_start = prev_end.add(TextSize::try_from(index).unwrap());
tokens.push(TriviaToken {
range: TextRange::new(new_line_start, new_line_start.add(len)),
kind: TriviaTokenKind::EmptyLine,
});
} else {
after_new_line = true;
}
// Add empty lines.
if after_new_line && matches!(tok, Tok::NonLogicalNewline) {
tokens.push(TriviaToken {
range: *range,
kind: TriviaTokenKind::EmptyLine,
});
}

// Add comments.
if let Tok::Comment(_) = tok {
if matches!(tok, Tok::Comment(..)) {
tokens.push(TriviaToken {
range: *range,
// Used to use prev_non-newline_tok
Expand Down Expand Up @@ -293,8 +268,6 @@ pub fn extract_trivia_tokens(lxr: &[LexResult], text: &str) -> Vec<TriviaToken>
) {
prev_semantic_tok = Some((tok, *range));
}

prev_end = range.end();
}
tokens
}
Expand Down

0 comments on commit 164f321

Please sign in to comment.