Skip to content

Commit

Permalink
wip simple '//' comment token for 'Struct'
Browse files Browse the repository at this point in the history
  • Loading branch information
Vrixyz committed Oct 3, 2024
1 parent 5c5c8b1 commit 2da85d3
Show file tree
Hide file tree
Showing 4 changed files with 55 additions and 8 deletions.
1 change: 1 addition & 0 deletions naga/src/front/wgsl/error.rs
Original file line number Diff line number Diff line change
Expand Up @@ -299,6 +299,7 @@ impl<'a> Error<'a> {
Token::Arrow => "->".to_string(),
Token::Unknown(c) => format!("unknown ('{c}')"),
Token::Trivia => "trivia".to_string(),
Token::Comment(s) => format!("documentation ('{s}')"),
Token::End => "end".to_string(),
}
}
Expand Down
2 changes: 2 additions & 0 deletions naga/src/front/wgsl/parse/ast.rs
Original file line number Diff line number Diff line change
Expand Up @@ -186,6 +186,8 @@ pub struct StructMember<'a> {
pub struct Struct<'a> {
pub name: Ident<'a>,
pub members: Vec<StructMember<'a>>,
// TODO: Make it optional ? Store Span ? Add it to other elements
pub comments: Vec<&'a str>,
}

#[derive(Debug)]
Expand Down
36 changes: 31 additions & 5 deletions naga/src/front/wgsl/parse/lexer.rs
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@ pub enum Token<'a> {
Arrow,
Unknown(char),
Trivia,
Comment(&'a str),
End,
}

Expand Down Expand Up @@ -81,8 +82,13 @@ fn consume_token(input: &str, generic: bool) -> (Token<'_>, &str) {
let og_chars = chars.as_str();
match chars.next() {
Some('/') => {
let _ = chars.position(is_comment_end);
(Token::Trivia, chars.as_str())
let og_chars = chars.as_str();
let documentation = if let Some(end_position) = chars.position(is_comment_end) {
&og_chars[..end_position]
} else {
og_chars
};
(Token::Comment(documentation), chars.as_str())
}
Some('*') => {
let mut depth = 1;
Expand Down Expand Up @@ -238,7 +244,7 @@ impl<'a> Lexer<'a> {
loop {
// Eat all trivia because `next` doesn't eat trailing trivia.
let (token, rest) = consume_token(self.input, false);
if let Token::Trivia = token {
if let Token::Trivia | Token::Comment(_) = token {
self.input = rest;
} else {
return self.current_byte_offset();
Expand All @@ -253,7 +259,27 @@ impl<'a> Lexer<'a> {
(token, rest)
}

const fn current_byte_offset(&self) -> usize {
pub(in crate::front::wgsl) fn start_byte_offset_and_aggregate_comment(
&'a mut self,
comments: &mut Vec<Span>,
) -> usize {
loop {
let start = self.current_byte_offset();
// Eat all trivia because `next` doesn't eat trailing trivia.
let (token, rest) = consume_token(self.input, false);
if let Token::Comment(_) = token {
let next = self.current_byte_offset();
comments.push(Span::new(start as u32, next as u32));
self.input = rest;
} else if let Token::Trivia = token {
self.input = rest;
} else {
return self.current_byte_offset();
}
}
}

pub const fn current_byte_offset(&self) -> usize {
self.source.len() - self.input.len()
}

Expand Down Expand Up @@ -288,7 +314,7 @@ impl<'a> Lexer<'a> {
let (token, rest) = consume_token(self.input, generic);
self.input = rest;
match token {
Token::Trivia => start_byte_offset = self.current_byte_offset(),
Token::Trivia | Token::Comment(_) => start_byte_offset = self.current_byte_offset(),
_ => {
self.last_end_offset = self.current_byte_offset();
return (token, self.span_from(start_byte_offset));
Expand Down
24 changes: 21 additions & 3 deletions naga/src/front/wgsl/parse/mod.rs
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
use std::ops::Index;

use crate::front::wgsl::error::{Error, ExpectedToken};
use crate::front::wgsl::parse::lexer::{Lexer, Token};
use crate::front::wgsl::parse::number::Number;
Expand Down Expand Up @@ -2172,6 +2174,9 @@ impl Parser {
lexer: &mut Lexer<'a>,
out: &mut ast::TranslationUnit<'a>,
) -> Result<(), Error<'a>> {
// Save a lexer to be able to backtrack comments if need be.
let mut lexer_comments = lexer.clone();

// read attributes
let mut binding = None;
let mut stage = ParsedAttribute::default();
Expand Down Expand Up @@ -2251,7 +2256,6 @@ impl Parser {
(_, word_span) => return Err(Error::UnknownAttribute(word_span)),
}
}

let attrib_span = self.pop_rule_span(lexer);
match (bind_group.value, bind_index.value) {
(Some(group), Some(index)) => {
Expand All @@ -2267,13 +2271,27 @@ impl Parser {

// read item
let start = lexer.start_byte_offset();
let kind = match lexer.next() {
let token_span = lexer.next();

let kind = match token_span {
(Token::Separator(';'), _) => None,
(Token::Word("struct"), _) => {
let name = lexer.next_ident()?;

let members = self.struct_body(lexer, &mut ctx)?;
Some(ast::GlobalDeclKind::Struct(ast::Struct { name, members }))

let mut comments = Vec::new();
lexer_comments.start_byte_offset_and_aggregate_comment(&mut comments);

let comments = comments
.into_iter()
.map(|comment_span| lexer.source.index(comment_span))
.collect();
Some(ast::GlobalDeclKind::Struct(ast::Struct {
name,
members,
comments,
}))
}
(Token::Word("alias"), _) => {
let name = lexer.next_ident()?;
Expand Down

0 comments on commit 2da85d3

Please sign in to comment.