Skip to content

Commit

Permalink
Rename TokenWithLocation to TokenWithSpan, in backwards compatibl…
Browse files Browse the repository at this point in the history
…e way (#1562)
  • Loading branch information
alamb authored Nov 30, 2024
1 parent 92c6e7f commit a134910
Show file tree
Hide file tree
Showing 4 changed files with 67 additions and 63 deletions.
10 changes: 5 additions & 5 deletions src/ast/helpers/attached_token.rs
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ use core::cmp::{Eq, Ord, Ordering, PartialEq, PartialOrd};
use core::fmt::{self, Debug, Formatter};
use core::hash::{Hash, Hasher};

use crate::tokenizer::{Token, TokenWithLocation};
use crate::tokenizer::{Token, TokenWithSpan};

#[cfg(feature = "serde")]
use serde::{Deserialize, Serialize};
Expand All @@ -33,11 +33,11 @@ use sqlparser_derive::{Visit, VisitMut};
#[derive(Clone)]
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))]
pub struct AttachedToken(pub TokenWithLocation);
pub struct AttachedToken(pub TokenWithSpan);

impl AttachedToken {
pub fn empty() -> Self {
AttachedToken(TokenWithLocation::wrap(Token::EOF))
AttachedToken(TokenWithSpan::wrap(Token::EOF))
}
}

Expand Down Expand Up @@ -75,8 +75,8 @@ impl Hash for AttachedToken {
}
}

impl From<TokenWithLocation> for AttachedToken {
fn from(value: TokenWithLocation) -> Self {
impl From<TokenWithSpan> for AttachedToken {
fn from(value: TokenWithSpan) -> Self {
AttachedToken(value)
}
}
4 changes: 2 additions & 2 deletions src/ast/query.rs
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@ use sqlparser_derive::{Visit, VisitMut};

use crate::{
ast::*,
tokenizer::{Token, TokenWithLocation},
tokenizer::{Token, TokenWithSpan},
};

/// The most complete variant of a `SELECT` query expression, optionally
Expand Down Expand Up @@ -643,7 +643,7 @@ pub struct WildcardAdditionalOptions {
impl Default for WildcardAdditionalOptions {
fn default() -> Self {
Self {
wildcard_token: TokenWithLocation::wrap(Token::Mul).into(),
wildcard_token: TokenWithSpan::wrap(Token::Mul).into(),
opt_ilike: None,
opt_exclude: None,
opt_except: None,
Expand Down
66 changes: 33 additions & 33 deletions src/parser/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -265,7 +265,7 @@ enum ParserState {
}

pub struct Parser<'a> {
tokens: Vec<TokenWithLocation>,
tokens: Vec<TokenWithSpan>,
/// The index of the first unprocessed token in [`Parser::tokens`].
index: usize,
/// The current state of the parser.
Expand Down Expand Up @@ -359,7 +359,7 @@ impl<'a> Parser<'a> {
}

/// Reset this parser to parse the specified token stream
pub fn with_tokens_with_locations(mut self, tokens: Vec<TokenWithLocation>) -> Self {
pub fn with_tokens_with_locations(mut self, tokens: Vec<TokenWithSpan>) -> Self {
self.tokens = tokens;
self.index = 0;
self
Expand All @@ -368,9 +368,9 @@ impl<'a> Parser<'a> {
/// Reset this parser state to parse the specified tokens
pub fn with_tokens(self, tokens: Vec<Token>) -> Self {
// Put in dummy locations
let tokens_with_locations: Vec<TokenWithLocation> = tokens
let tokens_with_locations: Vec<TokenWithSpan> = tokens
.into_iter()
.map(|token| TokenWithLocation {
.map(|token| TokenWithSpan {
token,
span: Span::empty(),
})
Expand Down Expand Up @@ -1147,7 +1147,7 @@ impl<'a> Parser<'a> {
match self.peek_token().token {
Token::LParen | Token::Period => {
let mut id_parts: Vec<Ident> = vec![w.to_ident(w_span)];
let mut ending_wildcard: Option<TokenWithLocation> = None;
let mut ending_wildcard: Option<TokenWithSpan> = None;
while self.consume_token(&Token::Period) {
let next_token = self.next_token();
match next_token.token {
Expand Down Expand Up @@ -3273,7 +3273,7 @@ impl<'a> Parser<'a> {

/// Return the first non-whitespace token that has not yet been processed
/// (or None if reached end-of-file)
pub fn peek_token(&self) -> TokenWithLocation {
pub fn peek_token(&self) -> TokenWithSpan {
self.peek_nth_token(0)
}

Expand Down Expand Up @@ -3308,38 +3308,38 @@ impl<'a> Parser<'a> {
/// yet been processed.
///
/// See [`Self::peek_token`] for an example.
pub fn peek_tokens_with_location<const N: usize>(&self) -> [TokenWithLocation; N] {
pub fn peek_tokens_with_location<const N: usize>(&self) -> [TokenWithSpan; N] {
let mut index = self.index;
core::array::from_fn(|_| loop {
let token = self.tokens.get(index);
index += 1;
if let Some(TokenWithLocation {
if let Some(TokenWithSpan {
token: Token::Whitespace(_),
span: _,
}) = token
{
continue;
}
break token.cloned().unwrap_or(TokenWithLocation {
break token.cloned().unwrap_or(TokenWithSpan {
token: Token::EOF,
span: Span::empty(),
});
})
}

/// Return nth non-whitespace token that has not yet been processed
pub fn peek_nth_token(&self, mut n: usize) -> TokenWithLocation {
pub fn peek_nth_token(&self, mut n: usize) -> TokenWithSpan {
let mut index = self.index;
loop {
index += 1;
match self.tokens.get(index - 1) {
Some(TokenWithLocation {
Some(TokenWithSpan {
token: Token::Whitespace(_),
span: _,
}) => continue,
non_whitespace => {
if n == 0 {
return non_whitespace.cloned().unwrap_or(TokenWithLocation {
return non_whitespace.cloned().unwrap_or(TokenWithSpan {
token: Token::EOF,
span: Span::empty(),
});
Expand All @@ -3352,16 +3352,16 @@ impl<'a> Parser<'a> {

/// Return the first token, possibly whitespace, that has not yet been processed
/// (or None if reached end-of-file).
pub fn peek_token_no_skip(&self) -> TokenWithLocation {
pub fn peek_token_no_skip(&self) -> TokenWithSpan {
self.peek_nth_token_no_skip(0)
}

/// Return nth token, possibly whitespace, that has not yet been processed.
pub fn peek_nth_token_no_skip(&self, n: usize) -> TokenWithLocation {
pub fn peek_nth_token_no_skip(&self, n: usize) -> TokenWithSpan {
self.tokens
.get(self.index + n)
.cloned()
.unwrap_or(TokenWithLocation {
.unwrap_or(TokenWithSpan {
token: Token::EOF,
span: Span::empty(),
})
Expand All @@ -3378,25 +3378,25 @@ impl<'a> Parser<'a> {
/// Return the first non-whitespace token that has not yet been processed
/// (or None if reached end-of-file) and mark it as processed. OK to call
/// repeatedly after reaching EOF.
pub fn next_token(&mut self) -> TokenWithLocation {
pub fn next_token(&mut self) -> TokenWithSpan {
loop {
self.index += 1;
match self.tokens.get(self.index - 1) {
Some(TokenWithLocation {
Some(TokenWithSpan {
token: Token::Whitespace(_),
span: _,
}) => continue,
token => {
return token
.cloned()
.unwrap_or_else(|| TokenWithLocation::wrap(Token::EOF))
.unwrap_or_else(|| TokenWithSpan::wrap(Token::EOF))
}
}
}
}

/// Return the first unprocessed token, possibly whitespace.
pub fn next_token_no_skip(&mut self) -> Option<&TokenWithLocation> {
pub fn next_token_no_skip(&mut self) -> Option<&TokenWithSpan> {
self.index += 1;
self.tokens.get(self.index - 1)
}
Expand All @@ -3408,7 +3408,7 @@ impl<'a> Parser<'a> {
loop {
assert!(self.index > 0);
self.index -= 1;
if let Some(TokenWithLocation {
if let Some(TokenWithSpan {
token: Token::Whitespace(_),
span: _,
}) = self.tokens.get(self.index)
Expand All @@ -3420,7 +3420,7 @@ impl<'a> Parser<'a> {
}

/// Report `found` was encountered instead of `expected`
pub fn expected<T>(&self, expected: &str, found: TokenWithLocation) -> Result<T, ParserError> {
pub fn expected<T>(&self, expected: &str, found: TokenWithSpan) -> Result<T, ParserError> {
parser_err!(
format!("Expected: {expected}, found: {found}"),
found.span.start
Expand All @@ -3435,7 +3435,7 @@ impl<'a> Parser<'a> {
}

#[must_use]
pub fn parse_keyword_token(&mut self, expected: Keyword) -> Option<TokenWithLocation> {
pub fn parse_keyword_token(&mut self, expected: Keyword) -> Option<TokenWithSpan> {
match self.peek_token().token {
Token::Word(w) if expected == w.keyword => Some(self.next_token()),
_ => None,
Expand Down Expand Up @@ -3524,7 +3524,7 @@ impl<'a> Parser<'a> {

/// If the current token is the `expected` keyword, consume the token.
/// Otherwise, return an error.
pub fn expect_keyword(&mut self, expected: Keyword) -> Result<TokenWithLocation, ParserError> {
pub fn expect_keyword(&mut self, expected: Keyword) -> Result<TokenWithSpan, ParserError> {
if let Some(token) = self.parse_keyword_token(expected) {
Ok(token)
} else {
Expand Down Expand Up @@ -3568,7 +3568,7 @@ impl<'a> Parser<'a> {
}

/// Bail out if the current token is not an expected keyword, or consume it if it is
pub fn expect_token(&mut self, expected: &Token) -> Result<TokenWithLocation, ParserError> {
pub fn expect_token(&mut self, expected: &Token) -> Result<TokenWithSpan, ParserError> {
if self.peek_token() == *expected {
Ok(self.next_token())
} else {
Expand Down Expand Up @@ -4107,7 +4107,7 @@ impl<'a> Parser<'a> {
Keyword::ARCHIVE => Ok(Some(CreateFunctionUsing::Archive(uri))),
_ => self.expected(
"JAR, FILE or ARCHIVE, got {:?}",
TokenWithLocation::wrap(Token::make_keyword(format!("{keyword:?}").as_str())),
TokenWithSpan::wrap(Token::make_keyword(format!("{keyword:?}").as_str())),
),
}
}
Expand Down Expand Up @@ -6832,7 +6832,7 @@ impl<'a> Parser<'a> {
if let Some(name) = name {
return self.expected(
"FULLTEXT or SPATIAL option without constraint name",
TokenWithLocation {
TokenWithSpan {
token: Token::make_keyword(&name.to_string()),
span: next_token.span,
},
Expand Down Expand Up @@ -7808,15 +7808,15 @@ impl<'a> Parser<'a> {
Some('\'') => Ok(Value::SingleQuotedString(w.value)),
_ => self.expected(
"A value?",
TokenWithLocation {
TokenWithSpan {
token: Token::Word(w),
span,
},
)?,
},
_ => self.expected(
"a concrete value",
TokenWithLocation {
TokenWithSpan {
token: Token::Word(w),
span,
},
Expand Down Expand Up @@ -7878,7 +7878,7 @@ impl<'a> Parser<'a> {
}
unexpected => self.expected(
"a value",
TokenWithLocation {
TokenWithSpan {
token: unexpected,
span,
},
Expand Down Expand Up @@ -7927,7 +7927,7 @@ impl<'a> Parser<'a> {
Token::HexStringLiteral(ref s) => Ok(Value::HexStringLiteral(s.to_string())),
unexpected => self.expected(
"a string value",
TokenWithLocation {
TokenWithSpan {
token: unexpected,
span,
},
Expand Down Expand Up @@ -8618,7 +8618,7 @@ impl<'a> Parser<'a> {
let token = self
.next_token_no_skip()
.cloned()
.unwrap_or(TokenWithLocation::wrap(Token::EOF));
.unwrap_or(TokenWithSpan::wrap(Token::EOF));
requires_whitespace = match token.token {
Token::Word(next_word) if next_word.quote_style.is_none() => {
ident.value.push_str(&next_word.value);
Expand Down Expand Up @@ -11683,7 +11683,7 @@ impl<'a> Parser<'a> {
/// If it is not possible to parse it, will return an option.
pub fn parse_wildcard_additional_options(
&mut self,
wildcard_token: TokenWithLocation,
wildcard_token: TokenWithSpan,
) -> Result<WildcardAdditionalOptions, ParserError> {
let opt_ilike = if dialect_of!(self is GenericDialect | SnowflakeDialect) {
self.parse_optional_select_item_ilike()?
Expand Down Expand Up @@ -12708,7 +12708,7 @@ impl<'a> Parser<'a> {
}

/// Consume the parser and return its underlying token buffer
pub fn into_tokens(self) -> Vec<TokenWithLocation> {
pub fn into_tokens(self) -> Vec<TokenWithSpan> {
self.tokens
}

Expand Down
Loading

0 comments on commit a134910

Please sign in to comment.