diff --git a/crates/lexer-generator/resources/lex_template.rs b/crates/lexer-generator/resources/lex_template.rs index 6c2229a..0f21cbb 100644 --- a/crates/lexer-generator/resources/lex_template.rs +++ b/crates/lexer-generator/resources/lex_template.rs @@ -1,3 +1,4 @@ +#![allow(clippy::all)] #![allow(unreachable_code)] use std::collections::HashMap; diff --git a/crates/postgresql-lst-parser/src/lexer.rs b/crates/postgresql-lst-parser/src/lexer.rs index c5c9edd..0eccdd3 100644 --- a/crates/postgresql-lst-parser/src/lexer.rs +++ b/crates/postgresql-lst-parser/src/lexer.rs @@ -48,6 +48,7 @@ pub struct Rule { pub kind: RuleKind, } +#[allow(clippy::all)] #[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] pub enum TokenKind { RAW(String), @@ -177,5 +178,5 @@ pub fn lex(input: &str) -> Vec { } // dbg!(&tokens); - return tokens; + tokens } diff --git a/crates/postgresql-lst-parser/src/lexer/generated.rs b/crates/postgresql-lst-parser/src/lexer/generated.rs index 21d09bf..ac73179 100644 --- a/crates/postgresql-lst-parser/src/lexer/generated.rs +++ b/crates/postgresql-lst-parser/src/lexer/generated.rs @@ -1,3 +1,4 @@ +#![allow(clippy::all)] #![allow(unreachable_code)] use std::collections::HashMap; diff --git a/crates/postgresql-lst-parser/src/lexer/util.rs b/crates/postgresql-lst-parser/src/lexer/util.rs index c384f27..d63f9e2 100644 --- a/crates/postgresql-lst-parser/src/lexer/util.rs +++ b/crates/postgresql-lst-parser/src/lexer/util.rs @@ -13,7 +13,7 @@ pub fn yyerror(msg: &str) { } pub fn get_char_by_byte_pos(s: &str, byte_pos: usize) -> char { - s.bytes().nth(byte_pos).unwrap() as char + s.as_bytes()[byte_pos] as char } impl Lexer { @@ -112,11 +112,11 @@ impl Lexer { let mut neg = false; match yytext.bytes().next() { - Some(b) if b == '-' as u8 => { + Some(b'-') => { neg = true; yytext = &yytext[1..]; } - Some(b) if b == '+' as u8 => { + Some(b'+') => { yytext = &yytext[1..]; } _ => (), @@ -124,7 +124,7 @@ impl Lexer { let res_parse_as_i32 = match radix { 8 => i32::from_str_radix(&yytext[2..], 8), - 10 => i32::from_str_radix(yytext, 10), + 10 => yytext.parse::(), 16 => i32::from_str_radix(&yytext[2..], 16), _ => unreachable!(), }; diff --git a/crates/postgresql-lst-parser/src/lst.rs b/crates/postgresql-lst-parser/src/lst.rs index 22bc29a..bab7fc3 100644 --- a/crates/postgresql-lst-parser/src/lst.rs +++ b/crates/postgresql-lst-parser/src/lst.rs @@ -106,10 +106,10 @@ impl Parser { /// The logic for converting tokens in PostgreSQL's parser.c /// ref: https://github.com/postgres/postgres/blob/REL_16_STABLE/src/backend/parser/parser.c#L195 -fn init_tokens(tokens: &mut Vec) { - fn next_token_index(tokens: &Vec, i: usize) -> Option { - for j in i + 1..tokens.len() { - match tokens[j].kind { +fn init_tokens(tokens: &mut [Token]) { + fn next_token_index(tokens: &[Token], i: usize) -> Option { + for (j, token) in tokens.iter().enumerate().skip(i + 1) { + match token.kind { TokenKind::C_COMMENT | TokenKind::SQL_COMMENT => continue, _ => return Some(j), } @@ -234,7 +234,6 @@ pub fn parse(input: &str) -> Result { token.start_byte_pos, &input[last_pos..token.start_byte_pos], )); - token.start_byte_pos; } last_pos = token.end_byte_pos; @@ -252,7 +251,7 @@ pub fn parse(input: &str) -> Result { } let action = match action_table[(state * num_terminal_symbol() + cid) as usize] { - v if v == 0x7FFF => Action::Error, + 0x7FFF => Action::Error, v if v > 0 => Action::Shift((v - 1) as usize), v if v < 0 => Action::Reduce((-v - 1) as usize), _ => Action::Accept, @@ -290,7 +289,7 @@ pub fn parse(input: &str) -> Result { } children.reverse(); - let reduced_component_id = rule_name_to_component_id(&rule.name); + let reduced_component_id = rule_name_to_component_id(rule.name); let start_byte_pos = children @@ -359,7 +358,6 @@ pub fn parse(input: &str) -> Result { token.start_byte_pos, &input[last_pos..token.start_byte_pos], )); - token.start_byte_pos; } last_pos = token.end_byte_pos;