Skip to content

Commit

Permalink
use rust-style doc comments ; ignore others
Browse files Browse the repository at this point in the history
  • Loading branch information
Vrixyz committed Feb 25, 2025
1 parent 46f81af commit 386ed0d
Show file tree
Hide file tree
Showing 3 changed files with 106 additions and 61 deletions.
4 changes: 2 additions & 2 deletions naga/src/front/wgsl/error.rs
Original file line number Diff line number Diff line change
Expand Up @@ -377,8 +377,8 @@ impl<'a> Error<'a> {
Token::Arrow => "->".to_string(),
Token::Unknown(c) => format!("unknown (`{c}`)"),
Token::Trivia => "trivia".to_string(),
Token::Comment(s) => format!("documentation ('{s}')"),
Token::CommentModule(s) => format!("module documentation ('{s}')"),
Token::CommentDoc(s) => format!("documentation ('{s}')"),
Token::CommentDocModule(s) => format!("module documentation ('{s}')"),
Token::End => "end".to_string(),
},
ExpectedToken::Identifier => "identifier".to_string(),
Expand Down
161 changes: 103 additions & 58 deletions naga/src/front/wgsl/parse/lexer.rs
Original file line number Diff line number Diff line change
Expand Up @@ -22,8 +22,8 @@ pub enum Token<'a> {
Arrow,
Unknown(char),
Trivia,
Comment(&'a str),
CommentModule(&'a str),
CommentDoc(&'a str),
CommentDocModule(&'a str),
End,
}

Expand Down Expand Up @@ -83,46 +83,64 @@ fn consume_token(input: &str, generic: bool, save_comments: bool) -> (Token<'_>,
let og_chars = chars.as_str();
match chars.next() {
Some('/') => {
if let Some(end_position) = input
.char_indices()
.find(|char_indices| is_comment_end(char_indices.1))
{
if !save_comments {
return (Token::Trivia, &input[end_position.0..]);
let end_position = {
if let Some(end_position) = input
.char_indices()
.find(|char_indices| is_comment_end(char_indices.1))
{
end_position.0
} else {
input.len()
}
let end_position = end_position.0;
return (
if chars.next() == Some('!') {
Token::CommentModule(&input[..end_position])
} else {
Token::Comment(&input[..end_position])
},
&input[end_position..],
);
}
};
if !save_comments {
return (Token::Trivia, "");
return (Token::Trivia, &input[end_position..]);
}
(Token::Comment(input), "")
let next_char = chars.next();
(
match next_char {
Some('/') => Token::CommentDoc(&input[..end_position]),
Some('!') => Token::CommentDocModule(&input[..end_position]),
_ => Token::Trivia,
},
&input[end_position..],
)
}
Some('*') => {
let mut depth = 1;
let mut prev = None;
let mut char_indices = input.char_indices();

// Skip '/' and '*'
char_indices.next();
char_indices.next();

let mut constructing_token = if !save_comments {
Token::Trivia
} else {
let mut peeker = char_indices.clone().peekable();
let peeked_next_char = peeker.peek();
let peeked_next_char =
peeked_next_char.map(|peeked_next_char| peeked_next_char.1);
match peeked_next_char {
Some('*') => Token::CommentDoc(""),
Some('!') => Token::CommentDocModule(""),
_ => Token::Trivia,
}
};
for (index, c) in char_indices {
match (prev, c) {
(Some('*'), '/') => {
prev = None;
depth -= 1;
if depth == 0 {
if !save_comments {
return (Token::Trivia, &input[(index + 1)..]);
if let Token::CommentDoc(ref mut doc)
| Token::CommentDocModule(ref mut doc) = constructing_token
{
*doc = &input[..=index];
}
let doc = &input[..=index];
return (Token::Comment(doc), &input[(index + 1)..]);

return (constructing_token, &input[(index + 1)..]);
}
}
(Some('/'), '*') => {
Expand Down Expand Up @@ -288,7 +306,7 @@ impl<'a> Lexer<'a> {
loop {
// Eat all trivia because `next` doesn't eat trailing trivia.
let (token, rest) = consume_token(self.input, false, self.save_comments);
if let Token::Trivia | Token::Comment(_) | Token::CommentModule(_) = token {
if let Token::Trivia | Token::CommentDoc(_) | Token::CommentDocModule(_) = token {
self.input = rest;
} else {
return self.current_byte_offset();
Expand All @@ -311,13 +329,13 @@ impl<'a> Lexer<'a> {
let start = self.current_byte_offset();
// Eat all trivia because `next` doesn't eat trailing trivia.
let (token, rest) = consume_token(self.input, false, self.save_comments);
if let Token::Comment(_) = token {
if let Token::CommentDoc(_) = token {
self.input = rest;
let next = self.current_byte_offset();
comments.push(Span::new(start as u32, next as u32));
} else if let Token::Trivia = token {
self.input = rest;
} else if let Token::CommentModule(_) = token {
} else if let Token::CommentDocModule(_) = token {
self.input = rest;
} else {
return self.current_byte_offset();
Expand Down Expand Up @@ -359,7 +377,7 @@ impl<'a> Lexer<'a> {
|token| {
!matches!(
token,
Token::Trivia | Token::Comment(_) | Token::CommentModule(_)
Token::Trivia | Token::CommentDoc(_) | Token::CommentDocModule(_)
)
},
generic,
Expand Down Expand Up @@ -564,7 +582,7 @@ fn sub_test_with_and_without_comments(source: &str, expected_tokens: &[Token]) {
source,
expected_tokens
.iter()
.filter(|v| !matches!(v, Token::Comment(_) | Token::CommentModule(_)))
.filter(|v| !matches!(**v, Token::CommentDoc(_) | Token::CommentDocModule(_)))
.cloned()
.collect::<Vec<_>>()
.as_slice(),
Expand Down Expand Up @@ -801,9 +819,8 @@ fn test_tokens() {
"*/*/***/*//=/*****//",
&[
Token::Operation('*'),
Token::Comment("/*/***/*/"),
Token::AssignmentOperation('/'),
Token::Comment("/*****/"),
Token::CommentDoc("/*****/"),
Token::Operation('/'),
],
);
Expand Down Expand Up @@ -871,47 +888,70 @@ fn test_variable_decl() {
}

#[test]
fn test_comments() {
sub_test_with_and_without_comments("// Single comment", &[Token::Comment("// Single comment")]);
fn test_comments_trivia() {
sub_test_with_and_without_comments("// Single comment", &[]);

sub_test_with_and_without_comments(
"/* multi
line
comment */",
&[Token::Comment(
"/* multi
&[],
);
sub_test_with_and_without_comments(
"/* multi
line
comment */
// and another",
&[],
);
}

#[test]
fn test_comments() {
sub_test_with_and_without_comments(
"/// Single comment",
&[Token::CommentDoc("/// Single comment")],
);

sub_test_with_and_without_comments(
"/** multi
line
comment */",
&[Token::CommentDoc(
"/** multi
line
comment */",
)],
);
sub_test_with_and_without_comments(
"/* multi
"/** multi
line
comment */
// and another",
/// and another",
&[
Token::Comment(
"/* multi
Token::CommentDoc(
"/** multi
line
comment */",
),
Token::Comment("// and another"),
Token::CommentDoc("/// and another"),
],
);
}

#[test]
fn test_comment_nested() {
sub_test_with_and_without_comments(
"/*
a comment with nested one /*
"/**
a comment with nested one /**
nested comment
*/
*/
const a : i32 = 2;",
&[
Token::Comment(
"/*
a comment with nested one /*
Token::CommentDoc(
"/**
a comment with nested one /**
nested comment
*/
*/",
Expand All @@ -930,14 +970,14 @@ fn test_comment_nested() {
#[test]
fn test_comment_long_character() {
sub_test_with_and_without_comments(
"// π/2
// D(𝐡) = ───────────────────────────────────────────────────
// παₜα_b((𝐡 ⋅ 𝐭)² / αₜ²) + (𝐡 ⋅ 𝐛)² / α_b² +`
"/// π/2
/// D(𝐡) = ───────────────────────────────────────────────────
/// παₜα_b((𝐡 ⋅ 𝐭)² / αₜ²) + (𝐡 ⋅ 𝐛)² / α_b² +`
const a : i32 = 2;",
&[
Token::Comment("// π/2"),
Token::Comment("// D(𝐡) = ───────────────────────────────────────────────────"),
Token::Comment("// παₜα_b((𝐡 ⋅ 𝐭)² / αₜ²) + (𝐡 ⋅ 𝐛)² / α_b² +`"),
Token::CommentDoc("/// π/2"),
Token::CommentDoc("/// D(𝐡) = ───────────────────────────────────────────────────"),
Token::CommentDoc("/// παₜα_b((𝐡 ⋅ 𝐭)² / αₜ²) + (𝐡 ⋅ 𝐛)² / α_b² +`"),
Token::Word("const"),
Token::Word("a"),
Token::Separator(':'),
Expand All @@ -950,22 +990,27 @@ fn test_comment_long_character() {
}

#[test]
fn test_module_comments() {
fn test_comments_module() {
sub_test_with_and_without_comments(
"//! Comment Module
//! Another one.
// Trying to break module comment
/*! Different module comment */
/// Trying to break module comment
// Trying to break module comment again
//! After a regular comment is ok.
/*! Different module comment again */
//! After a break is supported.
const
//! After anything else is not.",
&[
Token::CommentModule("//! Comment Module"),
Token::CommentModule("//! Another one."),
Token::Comment("// Trying to break module comment"),
Token::CommentModule("//! After a regular comment is ok."),
Token::CommentModule("//! After a break is supported."),
Token::CommentDocModule("//! Comment Module"),
Token::CommentDocModule("//! Another one."),
Token::CommentDocModule("/*! Different module comment */"),
Token::CommentDoc("/// Trying to break module comment"),
Token::CommentDocModule("//! After a regular comment is ok."),
Token::CommentDocModule("/*! Different module comment again */"),
Token::CommentDocModule("//! After a break is supported."),
Token::Word("const"),
],
);
Expand Down
2 changes: 1 addition & 1 deletion naga/src/front/wgsl/parse/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2860,7 +2860,7 @@ impl Parser {
let token = cloned.next_until(|_| true, false);
token
}
while let (Token::CommentModule(_), span) = peek_any_next(&lexer) {
while let (Token::CommentDocModule(_), span) = peek_any_next(&lexer) {
comments.push(lexer.source.index(span));
let _ = lexer.next_until(|_| true, false);
}
Expand Down

0 comments on commit 386ed0d

Please sign in to comment.