Skip to content

Commit

Permalink
Merge pull request #65 from gwenn/clippy
Browse files Browse the repository at this point in the history
Clippy
  • Loading branch information
gwenn authored Aug 10, 2024
2 parents 1e0fb76 + 06ef676 commit d98b3f2
Show file tree
Hide file tree
Showing 11 changed files with 375 additions and 384 deletions.
2 changes: 1 addition & 1 deletion benches/keyword.rs
Original file line number Diff line number Diff line change
Expand Up @@ -146,7 +146,7 @@ static VALUES: [&[u8]; 136] = [
#[bench]
fn bench_keyword_token(b: &mut Bencher) {
b.iter(|| {
for value in VALUES.iter() {
for value in &VALUES {
assert!(keyword_token(value).is_some())
}
});
Expand Down
4 changes: 2 additions & 2 deletions examples/sql_tokens.rs
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ fn main() {
//TK_ID => debug_assert!(),
//TK_VARIABLE => debug_assert!(),
TK_BLOB => debug_assert!(
token.len() % 2 == 0 && token.iter().all(|b| b.is_ascii_hexdigit())
token.len() % 2 == 0 && token.iter().all(u8::is_ascii_hexdigit)
),
TK_INTEGER => {
if token.len() > 2
Expand All @@ -50,7 +50,7 @@ fn main() {
if res.is_err() {
eprintln!("Err: {} in {}", res.unwrap_err(), arg);
}*/
debug_assert!(token.iter().all(|b| b.is_ascii_digit()))
debug_assert!(token.iter().all(u8::is_ascii_digit))
}
}
TK_FLOAT => {
Expand Down
2 changes: 1 addition & 1 deletion src/dialect/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,7 @@ pub(crate) const MAX_KEYWORD_LEN: usize = 17;
pub fn keyword_token(word: &[u8]) -> Option<TokenType> {
KEYWORDS
.get(UncasedStr::new(unsafe { str::from_utf8_unchecked(word) }))
.cloned()
.copied()
}

pub(crate) fn is_identifier(name: &str) -> bool {
Expand Down
4 changes: 2 additions & 2 deletions src/lexer/scan.rs
Original file line number Diff line number Diff line change
Expand Up @@ -60,8 +60,8 @@ pub struct Scanner<S: Splitter> {

impl<S: Splitter> Scanner<S> {
/// Constructor
pub fn new(splitter: S) -> Scanner<S> {
Scanner {
pub fn new(splitter: S) -> Self {
Self {
offset: 0,
mark: (0, 0, 0),
splitter,
Expand Down
54 changes: 27 additions & 27 deletions src/lexer/sql/error.rs
Original file line number Diff line number Diff line change
Expand Up @@ -36,60 +36,60 @@ pub enum Error {
impl fmt::Display for Error {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match *self {
Error::Io(ref err) => err.fmt(f),
Error::UnrecognizedToken(pos) => write!(f, "unrecognized token at {:?}", pos.unwrap()),
Error::UnterminatedLiteral(pos) => {
Self::Io(ref err) => err.fmt(f),
Self::UnrecognizedToken(pos) => write!(f, "unrecognized token at {:?}", pos.unwrap()),
Self::UnterminatedLiteral(pos) => {
write!(f, "non-terminated literal at {:?}", pos.unwrap())
}
Error::UnterminatedBracket(pos) => {
Self::UnterminatedBracket(pos) => {
write!(f, "non-terminated bracket at {:?}", pos.unwrap())
}
Error::UnterminatedBlockComment(pos) => {
Self::UnterminatedBlockComment(pos) => {
write!(f, "non-terminated block comment at {:?}", pos.unwrap())
}
Error::BadVariableName(pos) => write!(f, "bad variable name at {:?}", pos.unwrap()),
Error::BadNumber(pos) => write!(f, "bad number at {:?}", pos.unwrap()),
Error::ExpectedEqualsSign(pos) => write!(f, "expected = sign at {:?}", pos.unwrap()),
Error::MalformedBlobLiteral(pos) => {
Self::BadVariableName(pos) => write!(f, "bad variable name at {:?}", pos.unwrap()),
Self::BadNumber(pos) => write!(f, "bad number at {:?}", pos.unwrap()),
Self::ExpectedEqualsSign(pos) => write!(f, "expected = sign at {:?}", pos.unwrap()),
Self::MalformedBlobLiteral(pos) => {
write!(f, "malformed blob literal at {:?}", pos.unwrap())
}
Error::MalformedHexInteger(pos) => {
Self::MalformedHexInteger(pos) => {
write!(f, "malformed hex integer at {:?}", pos.unwrap())
}
Error::ParserError(ref msg, Some(pos)) => write!(f, "{} at {:?}", msg, pos),
Error::ParserError(ref msg, _) => write!(f, "{}", msg),
Self::ParserError(ref msg, Some(pos)) => write!(f, "{msg} at {pos:?}"),
Self::ParserError(ref msg, _) => write!(f, "{msg}"),
}
}
}

impl error::Error for Error {}

impl From<io::Error> for Error {
fn from(err: io::Error) -> Error {
Error::Io(err)
fn from(err: io::Error) -> Self {
Self::Io(err)
}
}

impl From<ParserError> for Error {
fn from(err: ParserError) -> Error {
Error::ParserError(err, None)
fn from(err: ParserError) -> Self {
Self::ParserError(err, None)
}
}

impl ScanError for Error {
fn position(&mut self, line: u64, column: usize) {
match *self {
Error::Io(_) => {}
Error::UnrecognizedToken(ref mut pos) => *pos = Some((line, column)),
Error::UnterminatedLiteral(ref mut pos) => *pos = Some((line, column)),
Error::UnterminatedBracket(ref mut pos) => *pos = Some((line, column)),
Error::UnterminatedBlockComment(ref mut pos) => *pos = Some((line, column)),
Error::BadVariableName(ref mut pos) => *pos = Some((line, column)),
Error::BadNumber(ref mut pos) => *pos = Some((line, column)),
Error::ExpectedEqualsSign(ref mut pos) => *pos = Some((line, column)),
Error::MalformedBlobLiteral(ref mut pos) => *pos = Some((line, column)),
Error::MalformedHexInteger(ref mut pos) => *pos = Some((line, column)),
Error::ParserError(_, ref mut pos) => *pos = Some((line, column)),
Self::Io(_) => {}
Self::UnrecognizedToken(ref mut pos) => *pos = Some((line, column)),
Self::UnterminatedLiteral(ref mut pos) => *pos = Some((line, column)),
Self::UnterminatedBracket(ref mut pos) => *pos = Some((line, column)),
Self::UnterminatedBlockComment(ref mut pos) => *pos = Some((line, column)),
Self::BadVariableName(ref mut pos) => *pos = Some((line, column)),
Self::BadNumber(ref mut pos) => *pos = Some((line, column)),
Self::ExpectedEqualsSign(ref mut pos) => *pos = Some((line, column)),
Self::MalformedBlobLiteral(ref mut pos) => *pos = Some((line, column)),
Self::MalformedHexInteger(ref mut pos) => *pos = Some((line, column)),
Self::ParserError(_, ref mut pos) => *pos = Some((line, column)),
}
}
}
10 changes: 5 additions & 5 deletions src/lexer/sql/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ pub struct Parser<'input> {

impl<'input> Parser<'input> {
/// Constructor
pub fn new(input: &'input [u8]) -> Parser<'input> {
pub fn new(input: &'input [u8]) -> Self {
let lexer = Tokenizer::new();
let scanner = Scanner::new(lexer);
let ctx = Context::new(input);
Expand Down Expand Up @@ -253,8 +253,8 @@ pub struct Tokenizer {}

impl Tokenizer {
/// Constructor
pub fn new() -> Tokenizer {
Tokenizer {}
pub fn new() -> Self {
Self {}
}
}

Expand Down Expand Up @@ -401,7 +401,7 @@ impl Splitter for Tokenizer {
b',' => Ok((Some((&data[..1], TK_COMMA)), 1)),
b'&' => Ok((Some((&data[..1], TK_BITAND)), 1)),
b'~' => Ok((Some((&data[..1], TK_BITNOT)), 1)),
quote @ b'`' | quote @ b'\'' | quote @ b'"' => literal(data, quote),
quote @ (b'`' | b'\'' | b'"') => literal(data, quote),
b'.' => {
if let Some(b) = data.get(1) {
if b.is_ascii_digit() {
Expand All @@ -417,7 +417,7 @@ impl Splitter for Tokenizer {
b'[' => {
if let Some(i) = memchr(b']', data) {
// Keep original quotes / '[' ... ’]'
Ok((Some((&data[0..i + 1], TK_ID)), i + 1))
Ok((Some((&data[0..=i], TK_ID)), i + 1))
} else {
Err(Error::UnterminatedBracket(None))
}
Expand Down
6 changes: 3 additions & 3 deletions src/lexer/sql/test.rs
Original file line number Diff line number Diff line change
Expand Up @@ -96,7 +96,7 @@ fn vtab_args() -> Result<(), Error> {
#[test]
fn only_semicolons_no_statements() {
let sqls = ["", ";", ";;;"];
for sql in sqls.iter() {
for sql in &sqls {
let r = parse(sql.as_bytes());
assert_eq!(r.unwrap(), None);
}
Expand All @@ -110,7 +110,7 @@ fn extra_semicolons_between_statements() {
"; SELECT 1; SELECT 2",
";; SELECT 1;; SELECT 2;;",
];
for sql in sqls.iter() {
for sql in &sqls {
let mut parser = Parser::new(sql.as_bytes());
assert!(matches!(
parser.next().unwrap(),
Expand All @@ -132,7 +132,7 @@ fn extra_comments_between_statements() {
"/* abc */; SELECT 1 /* def */; SELECT 2 /* ghj */",
"/* abc */;; SELECT 1;/* def */; SELECT 2; /* ghj */; /* klm */",
];
for sql in sqls.iter() {
for sql in &sqls {
let mut parser = Parser::new(sql.as_bytes());
assert!(matches!(
parser.next().unwrap(),
Expand Down
Loading

0 comments on commit d98b3f2

Please sign in to comment.