Skip to content

Commit

Permalink
add an option struct to opt in comments parsing for wgsl
Browse files Browse the repository at this point in the history
  • Loading branch information
Vrixyz committed Feb 21, 2025
1 parent 684e7e4 commit 57865be
Show file tree
Hide file tree
Showing 4 changed files with 99 additions and 35 deletions.
3 changes: 0 additions & 3 deletions naga/src/front/wgsl/lower/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3168,9 +3168,6 @@ impl<'source, 'temp> Lowerer<'source, 'temp> {
);
for (i, c) in comments.drain(..).enumerate() {
if let Some(comment) = c {
if ctx.module.comments.is_none() {
ctx.module.comments = Some(Default::default());
}
let comments = ctx.module.get_comments_or_insert_default();
comments.struct_members.insert((handle, i), comment);
}
Expand Down
11 changes: 10 additions & 1 deletion naga/src/front/wgsl/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -19,19 +19,28 @@ use thiserror::Error;
pub use crate::front::wgsl::error::ParseError;
use crate::front::wgsl::lower::Lowerer;
use crate::Scalar;
pub use parse::ParserOptions;

pub use crate::front::wgsl::parse::directive::language_extension::{
ImplementedLanguageExtension, LanguageExtension, UnimplementedLanguageExtension,
};

pub struct Frontend {
parser: Parser,
options: ParserOptions,
}

impl Frontend {
pub const fn new() -> Self {
Self {
parser: Parser::new(),
options: ParserOptions::new(),
}
}
pub const fn new_with_options(options: ParserOptions) -> Self {
Self {
parser: Parser::new(),
options,
}
}

Expand All @@ -40,7 +49,7 @@ impl Frontend {
}

fn inner<'a>(&mut self, source: &'a str) -> Result<crate::Module, Error<'a>> {
let tu = self.parser.parse(source)?;
let tu = self.parser.parse(source, &self.options)?;
let index = index::Index::generate(&tu)?;
let module = Lowerer::new(&index).lower(&tu)?;

Expand Down
63 changes: 50 additions & 13 deletions naga/src/front/wgsl/parse/lexer.rs
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,7 @@ fn consume_any(input: &str, what: impl Fn(char) -> bool) -> (&str, &str) {
/// `Token::LogicalOperation` tokens.
///
/// [§3.1 Parsing]: https://gpuweb.github.io/gpuweb/wgsl/#parsing
fn consume_token(input: &str, generic: bool) -> (Token<'_>, &str) {
fn consume_token(input: &str, generic: bool, save_comments: bool) -> (Token<'_>, &str) {
let mut chars = input.chars();
let cur = match chars.next() {
Some(c) => c,
Expand Down Expand Up @@ -87,6 +87,9 @@ fn consume_token(input: &str, generic: bool) -> (Token<'_>, &str) {
.char_indices()
.find(|char_indices| is_comment_end(char_indices.1))
{
if !save_comments {
return (Token::Trivia, &input[end_position.0..]);
}
let end_position = end_position.0;
return (
if chars.next() == Some('!') {
Expand All @@ -97,6 +100,9 @@ fn consume_token(input: &str, generic: bool) -> (Token<'_>, &str) {
&input[end_position..],
);
}
if !save_comments {
return (Token::Trivia, "");
}
(Token::Comment(input), "")
}
Some('*') => {
Expand All @@ -112,6 +118,9 @@ fn consume_token(input: &str, generic: bool) -> (Token<'_>, &str) {
prev = None;
depth -= 1;
if depth == 0 {
if !save_comments {
return (Token::Trivia, &input[(index + 1)..]);
}
let doc = &input[..=index];
return (Token::Comment(doc), &input[(index + 1)..]);
}
Expand Down Expand Up @@ -237,17 +246,22 @@ pub(in crate::front::wgsl) struct Lexer<'a> {
/// statements.
last_end_offset: usize,

/// Whether or not to save comments as we lexe through them.
/// If `false`, comments are saved as [`Token::Trivia`].
save_comments: bool,

#[allow(dead_code)]
pub(in crate::front::wgsl) enable_extensions: EnableExtensions,
}

impl<'a> Lexer<'a> {
pub(in crate::front::wgsl) const fn new(input: &'a str) -> Self {
pub(in crate::front::wgsl) const fn new(input: &'a str, save_comments: bool) -> Self {
Lexer {
input,
source: input,
last_end_offset: 0,
enable_extensions: EnableExtensions::empty(),
save_comments,
}
}

Expand All @@ -273,7 +287,7 @@ impl<'a> Lexer<'a> {
pub(in crate::front::wgsl) fn start_byte_offset(&mut self) -> usize {
loop {
// Eat all trivia because `next` doesn't eat trailing trivia.
let (token, rest) = consume_token(self.input, false);
let (token, rest) = consume_token(self.input, false, self.save_comments);
if let Token::Trivia | Token::Comment(_) | Token::CommentModule(_) = token {
self.input = rest;
} else {
Expand All @@ -296,7 +310,7 @@ impl<'a> Lexer<'a> {
loop {
let start = self.current_byte_offset();
// Eat all trivia because `next` doesn't eat trailing trivia.
let (token, rest) = consume_token(self.input, false);
let (token, rest) = consume_token(self.input, false, self.save_comments);
if let Token::Comment(_) = token {
self.input = rest;
let next = self.current_byte_offset();
Expand Down Expand Up @@ -358,7 +372,7 @@ impl<'a> Lexer<'a> {
pub fn next_until(&mut self, stop_at: fn(Token) -> bool, generic: bool) -> TokenSpan<'a> {
let mut start_byte_offset = self.current_byte_offset();
loop {
let (token, rest) = consume_token(self.input, generic);
let (token, rest) = consume_token(self.input, generic, self.save_comments);
self.input = rest;
if stop_at(token) {
self.last_end_offset = self.current_byte_offset();
Expand Down Expand Up @@ -538,7 +552,29 @@ impl<'a> Lexer<'a> {
#[cfg(test)]
#[track_caller]
fn sub_test(source: &str, expected_tokens: &[Token]) {
let mut lex = Lexer::new(source);
sub_test_with_comments(false, source, expected_tokens);
}

#[cfg(test)]
#[track_caller]
fn sub_test_with_and_without_comments(source: &str, expected_tokens: &[Token]) {
sub_test_with_comments(true, source, expected_tokens);
sub_test_with_comments(
false,
source,
expected_tokens
.iter()
.filter(|v| !matches!(v, Token::Comment(_) | Token::CommentModule(_)))
.cloned()
.collect::<Vec<_>>()
.as_slice(),
);
}

#[cfg(test)]
#[track_caller]
fn sub_test_with_comments(with_comments: bool, source: &str, expected_tokens: &[Token]) {
let mut lex = Lexer::new(source, with_comments);
for &token in expected_tokens {
assert_eq!(
lex.next_until(|token| !matches!(token, Token::Trivia), false)
Expand Down Expand Up @@ -760,7 +796,8 @@ fn test_tokens() {
sub_test("No¾", &[Token::Word("No"), Token::Unknown('¾')]);
sub_test("No好", &[Token::Word("No好")]);
sub_test("_No", &[Token::Word("_No")]);
sub_test(

sub_test_with_and_without_comments(
"*/*/***/*//=/*****//",
&[
Token::Operation('*'),
Expand Down Expand Up @@ -835,8 +872,8 @@ fn test_variable_decl() {

#[test]
fn test_comments() {
sub_test("// Single comment", &[Token::Comment("// Single comment")]);
sub_test(
sub_test_with_and_without_comments("// Single comment", &[Token::Comment("// Single comment")]);
sub_test_with_and_without_comments(
"/* multi
line
comment */",
Expand All @@ -846,7 +883,7 @@ fn test_comments() {
comment */",
)],
);
sub_test(
sub_test_with_and_without_comments(
"/* multi
line
comment */
Expand All @@ -864,7 +901,7 @@ fn test_comments() {

#[test]
fn test_comment_nested() {
sub_test(
sub_test_with_and_without_comments(
"/*
a comment with nested one /*
nested comment
Expand Down Expand Up @@ -892,7 +929,7 @@ fn test_comment_nested() {

#[test]
fn test_comment_long_character() {
sub_test(
sub_test_with_and_without_comments(
"// π/2
// D(𝐡) = ───────────────────────────────────────────────────
// παₜα_b((𝐡 ⋅ 𝐭)² / αₜ²) + (𝐡 ⋅ 𝐛)² / α_b² +`
Expand All @@ -914,7 +951,7 @@ fn test_comment_long_character() {

#[test]
fn test_module_comments() {
sub_test(
sub_test_with_and_without_comments(
"//! Comment Module
//! Another one.
// Trying to break module comment
Expand Down
57 changes: 39 additions & 18 deletions naga/src/front/wgsl/parse/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -261,6 +261,21 @@ impl<'a> BindingParser<'a> {
}
}

/// Configuration for the whole parser run.
pub struct ParserOptions {
/// Controls whether the parser should parse comments.
pub parse_comments: bool,
}

impl ParserOptions {
/// Creates a new `ParserOptions` with default settings.
pub const fn new() -> Self {
ParserOptions {
parse_comments: false,
}
}
}

pub struct Parser {
rules: Vec<(Rule, usize)>,
recursion_depth: u32,
Expand Down Expand Up @@ -2824,34 +2839,40 @@ impl Parser {
}
}

pub fn parse<'a>(&mut self, source: &'a str) -> Result<ast::TranslationUnit<'a>, Error<'a>> {
pub fn parse<'a>(
&mut self,
source: &'a str,
options: &ParserOptions,
) -> Result<ast::TranslationUnit<'a>, Error<'a>> {
self.reset();

let mut lexer = Lexer::new(source);
let mut lexer = Lexer::new(source, options.parse_comments);
let mut tu = ast::TranslationUnit::default();
let mut enable_extensions = EnableExtensions::empty();
let mut diagnostic_filters = DiagnosticFilterMap::new();

// Parse module comments.
let mut comments = Vec::new();
if options.parse_comments {
// Parse module comments.
let mut comments = Vec::new();

fn peek_any_next<'a>(lexer: &'a Lexer) -> (Token<'a>, Span) {
let mut cloned = lexer.clone();
let token = cloned.next_until(|_| true, false);
token
}
loop {
match peek_any_next(&lexer) {
(Token::CommentModule(_), span) => {
comments.push(lexer.source.index(span));
let _ = lexer.next_until(|_| true, false);
}
_ => {
break;
fn peek_any_next<'a>(lexer: &'a Lexer) -> (Token<'a>, Span) {
let mut cloned = lexer.clone();
let token = cloned.next_until(|_| true, false);
token
}
loop {
match peek_any_next(&lexer) {
(Token::CommentModule(_), span) => {
comments.push(lexer.source.index(span));
let _ = lexer.next_until(|_| true, false);
}
_ => {
break;
}
}
}
tu.comments = comments;
}
tu.comments = comments;

// Parse directives.
while let Ok((ident, _directive_ident_span)) = lexer.peek_ident_with_span() {
Expand Down

0 comments on commit 57865be

Please sign in to comment.