diff --git a/naga/src/front/wgsl/error.rs b/naga/src/front/wgsl/error.rs index febcd9a4e0..cc50298bb2 100644 --- a/naga/src/front/wgsl/error.rs +++ b/naga/src/front/wgsl/error.rs @@ -299,6 +299,7 @@ impl<'a> Error<'a> { Token::Arrow => "->".to_string(), Token::Unknown(c) => format!("unknown ('{c}')"), Token::Trivia => "trivia".to_string(), + Token::Comment(s) => format!("documentation ('{s}')"), Token::End => "end".to_string(), } } diff --git a/naga/src/front/wgsl/parse/ast.rs b/naga/src/front/wgsl/parse/ast.rs index ea8013ee7c..95fa99c029 100644 --- a/naga/src/front/wgsl/parse/ast.rs +++ b/naga/src/front/wgsl/parse/ast.rs @@ -186,6 +186,8 @@ pub struct StructMember<'a> { pub struct Struct<'a> { pub name: Ident<'a>, pub members: Vec>, + // TODO: Make it optional ? Store Span ? Add it to other elements + pub comments: Vec<&'a str>, } #[derive(Debug)] diff --git a/naga/src/front/wgsl/parse/lexer.rs b/naga/src/front/wgsl/parse/lexer.rs index d03a448561..243de6080f 100644 --- a/naga/src/front/wgsl/parse/lexer.rs +++ b/naga/src/front/wgsl/parse/lexer.rs @@ -22,6 +22,7 @@ pub enum Token<'a> { Arrow, Unknown(char), Trivia, + Comment(&'a str), End, } @@ -81,8 +82,13 @@ fn consume_token(input: &str, generic: bool) -> (Token<'_>, &str) { let og_chars = chars.as_str(); match chars.next() { Some('/') => { - let _ = chars.position(is_comment_end); - (Token::Trivia, chars.as_str()) + let og_chars = chars.as_str(); + let documentation = if let Some(end_position) = chars.position(is_comment_end) { + &og_chars[..end_position] + } else { + og_chars + }; + (Token::Comment(documentation), chars.as_str()) } Some('*') => { let mut depth = 1; @@ -238,7 +244,7 @@ impl<'a> Lexer<'a> { loop { // Eat all trivia because `next` doesn't eat trailing trivia. let (token, rest) = consume_token(self.input, false); - if let Token::Trivia = token { + if let Token::Trivia | Token::Comment(_) = token { self.input = rest; } else { return self.current_byte_offset(); @@ -253,7 +259,27 @@ impl<'a> Lexer<'a> { (token, rest) } - const fn current_byte_offset(&self) -> usize { + pub(in crate::front::wgsl) fn start_byte_offset_and_aggregate_comment( + &'a mut self, + comments: &mut Vec, + ) -> usize { + loop { + let start = self.current_byte_offset(); + // Eat all trivia because `next` doesn't eat trailing trivia. + let (token, rest) = consume_token(self.input, false); + if let Token::Comment(_) = token { + let next = self.current_byte_offset(); + comments.push(Span::new(start as u32, next as u32)); + self.input = rest; + } else if let Token::Trivia = token { + self.input = rest; + } else { + return self.current_byte_offset(); + } + } + } + + pub const fn current_byte_offset(&self) -> usize { self.source.len() - self.input.len() } @@ -288,7 +314,7 @@ impl<'a> Lexer<'a> { let (token, rest) = consume_token(self.input, generic); self.input = rest; match token { - Token::Trivia => start_byte_offset = self.current_byte_offset(), + Token::Trivia | Token::Comment(_) => start_byte_offset = self.current_byte_offset(), _ => { self.last_end_offset = self.current_byte_offset(); return (token, self.span_from(start_byte_offset)); diff --git a/naga/src/front/wgsl/parse/mod.rs b/naga/src/front/wgsl/parse/mod.rs index ee3a1846b9..fa5e3fe1af 100644 --- a/naga/src/front/wgsl/parse/mod.rs +++ b/naga/src/front/wgsl/parse/mod.rs @@ -1,3 +1,5 @@ +use std::ops::Index; + use crate::front::wgsl::error::{Error, ExpectedToken}; use crate::front::wgsl::parse::lexer::{Lexer, Token}; use crate::front::wgsl::parse::number::Number; @@ -2172,6 +2174,9 @@ impl Parser { lexer: &mut Lexer<'a>, out: &mut ast::TranslationUnit<'a>, ) -> Result<(), Error<'a>> { + // Save a lexer to be able to backtrack comments if need be. + let mut lexer_comments = lexer.clone(); + // read attributes let mut binding = None; let mut stage = ParsedAttribute::default(); @@ -2251,7 +2256,6 @@ impl Parser { (_, word_span) => return Err(Error::UnknownAttribute(word_span)), } } - let attrib_span = self.pop_rule_span(lexer); match (bind_group.value, bind_index.value) { (Some(group), Some(index)) => { @@ -2267,13 +2271,27 @@ impl Parser { // read item let start = lexer.start_byte_offset(); - let kind = match lexer.next() { + let token_span = lexer.next(); + + let kind = match token_span { (Token::Separator(';'), _) => None, (Token::Word("struct"), _) => { let name = lexer.next_ident()?; let members = self.struct_body(lexer, &mut ctx)?; - Some(ast::GlobalDeclKind::Struct(ast::Struct { name, members })) + + let mut comments = Vec::new(); + lexer_comments.start_byte_offset_and_aggregate_comment(&mut comments); + + let comments = comments + .into_iter() + .map(|comment_span| lexer.source.index(comment_span)) + .collect(); + Some(ast::GlobalDeclKind::Struct(ast::Struct { + name, + members, + comments, + })) } (Token::Word("alias"), _) => { let name = lexer.next_ident()?;