Skip to content

Commit

Permalink
sql_quote: Support #
Browse files Browse the repository at this point in the history
  • Loading branch information
emk committed Oct 21, 2023
1 parent f84ba7e commit fc8a72c
Show file tree
Hide file tree
Showing 2 changed files with 138 additions and 60 deletions.
130 changes: 71 additions & 59 deletions joinery_macros/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -227,75 +227,87 @@ pub fn sql_quote(input: TokenStream) -> TokenStream {
let input = TokenStream2::from(input);

let mut sql_token_exprs = vec![];
for token in input {
emit_sql_token_exprs(&mut sql_token_exprs, token);
}
emit_sql_token_exprs(&mut sql_token_exprs, input.into_iter());
let capacity = sql_token_exprs.len();
let output = quote! {
crate::tokenizer::TokenStream::from_tokens(&[#(#sql_token_exprs),*][..])
{
use crate::tokenizer::{Literal, Token, TokenStream};
let mut __tokens = Vec::with_capacity(#capacity);
#( #sql_token_exprs; )*
TokenStream { tokens: __tokens }
}
};
output.into()
}

fn emit_sql_token_exprs(sql_token_exprs: &mut Vec<TokenStream2>, token: TokenTree) {
match token {
TokenTree::Group(group) => {
// We flatten this and use `Punct::new`.
let (open, close) = delimiter_pair(group.delimiter());
if let Some(open) = open {
sql_token_exprs.push(quote! {
crate::tokenizer::Token::Punct(crate::tokenizer::Punct::new(#open))
});
}
for token in group.stream() {
emit_sql_token_exprs(sql_token_exprs, token);
}
if let Some(close) = close {
sql_token_exprs.push(quote! {
crate::tokenizer::Token::Punct(crate::tokenizer::Punct::new(#close))
});
}
}
TokenTree::Ident(ident) => {
let ident_str = ident.to_string();
sql_token_exprs.push(quote! {
crate::tokenizer::Token::Ident(crate::tokenizer::Ident::new(#ident_str))
});
}
TokenTree::Punct(punct) => {
let punct_str = punct.to_string();
sql_token_exprs.push(quote! {
crate::tokenizer::Token::Punct(crate::tokenizer::Punct::new(#punct_str))
});
}
TokenTree::Literal(lit) => {
// There's probably a better way to do this.
let lit: syn::Lit = syn::parse_quote!(#lit);
match lit {
syn::Lit::Int(i) => {
fn emit_sql_token_exprs(
sql_token_exprs: &mut Vec<TokenStream2>,
mut tokens: impl Iterator<Item = TokenTree>,
) {
while let Some(token) = tokens.next() {
match token {
// Treat `#` as interpolation.
TokenTree::Punct(p) if p.to_string() == "#" => {
if let Some(expr) = tokens.next() {
sql_token_exprs.push(quote! {
crate::tokenizer::Token::Literal(crate::tokenizer::Literal::int(#i))
(#expr).to_tokens(&mut __tokens)
});
}
syn::Lit::Str(s) => {
sql_token_exprs.push(quote! {
crate::tokenizer::Token::Literal(crate::tokenizer::Literal::string(#s))
} else {
sql_token_exprs.push(quote_spanned! {
p.span() =>
compile_error!("expected expression after `#`")
});
}
syn::Lit::Float(f) => {
sql_token_exprs.push(quote! {
crate::tokenizer::Token::Literal(crate::tokenizer::Literal::float(#f))
});
}
TokenTree::Group(group) => {
// We flatten this and use `Punct::new`.
let (open, close) = delimiter_pair(group.delimiter());
if let Some(open) = open {
sql_token_exprs.push(quote! { __tokens.push(Token::punct(#open)) });
}
// syn::Lit::ByteStr(_) => todo!(),
// syn::Lit::Byte(_) => todo!(),
// syn::Lit::Char(_) => todo!(),
// syn::Lit::Bool(_) => todo!(),
// syn::Lit::Verbatim(_) => todo!(),
_ => {
sql_token_exprs.push(quote_spanned! {
lit.span() =>
compile_error!("unsupported literal type")
});
emit_sql_token_exprs(sql_token_exprs, group.stream().into_iter());
if let Some(close) = close {
sql_token_exprs.push(quote! { __tokens.push(Token::punct(#close)) });
}
}
TokenTree::Ident(ident) => {
let ident_str = ident.to_string();
sql_token_exprs.push(quote! { __tokens.push(Token::ident(#ident_str)) });
}
TokenTree::Punct(punct) => {
let punct_str = punct.to_string();
sql_token_exprs.push(quote! { __tokens.push(Token::punct(#punct_str)) });
}
TokenTree::Literal(lit) => {
// There's probably a better way to do this.
let lit: syn::Lit = syn::parse_quote!(#lit);
match lit {
syn::Lit::Int(i) => {
sql_token_exprs.push(quote! {
__tokens.push(Token::Literal(Literal::int(#i)))
});
}
syn::Lit::Str(s) => {
sql_token_exprs.push(quote! {
__tokens.push(Token::Literal(Literal::string(#s)))
});
}
syn::Lit::Float(f) => {
sql_token_exprs.push(quote! {
__tokens.push(Token::Literal(Literal::float(#f)))
});
}
// syn::Lit::ByteStr(_) => todo!(),
// syn::Lit::Byte(_) => todo!(),
// syn::Lit::Char(_) => todo!(),
// syn::Lit::Bool(_) => todo!(),
// syn::Lit::Verbatim(_) => todo!(),
_ => {
sql_token_exprs.push(quote_spanned! {
lit.span() =>
compile_error!("unsupported literal type")
});
}
}
}
}
Expand Down
68 changes: 67 additions & 1 deletion src/tokenizer.rs
Original file line number Diff line number Diff line change
Expand Up @@ -95,6 +95,16 @@ pub enum Token {
}

impl Token {
/// Construct a new identifier token.
pub fn ident(ident: &str) -> Self {
Self::Ident(Ident::new(ident))
}

/// Construct a new punctuation token.
pub fn punct(punct: &str) -> Self {
Self::Punct(Punct::new(punct))
}

/// Get the raw token for this token.
pub fn raw(&self) -> &RawToken {
match self {
Expand Down Expand Up @@ -541,6 +551,61 @@ impl<'input> ParseElem<'input> for TokenStream {
// literal-matching rules need to return a token, and `ParseLiteral` returns
// `()`.

/// Convert a value into tokens, and append them to a [`Vec`].
///
/// This is used as part of the implementation of [`sql_quote!`].
pub trait ToTokens {
/// Convert `self` into tokens, and append them to `tokens`.
fn to_tokens(&self, tokens: &mut Vec<Token>);
}

impl ToTokens for Token {
fn to_tokens(&self, tokens: &mut Vec<Token>) {
tokens.push(self.clone());
}
}

impl ToTokens for Ident {
fn to_tokens(&self, tokens: &mut Vec<Token>) {
tokens.push(Token::Ident(self.clone()));
}
}

impl ToTokens for Literal {
fn to_tokens(&self, tokens: &mut Vec<Token>) {
tokens.push(Token::Literal(self.clone()));
}
}

impl ToTokens for Punct {
fn to_tokens(&self, tokens: &mut Vec<Token>) {
tokens.push(Token::Punct(self.clone()));
}
}

impl<T: ToTokens> ToTokens for Option<T> {
fn to_tokens(&self, tokens: &mut Vec<Token>) {
if let Some(t) = self {
t.to_tokens(tokens);
}
}
}

impl<T: ToTokens> ToTokens for Vec<T> {
fn to_tokens(&self, tokens: &mut Vec<Token>) {
for t in self {
t.to_tokens(tokens);
}
}
}

impl ToTokens for TokenStream {
/// This allows composing `TokenStream`s before re-parsing them.
fn to_tokens(&self, tokens: &mut Vec<Token>) {
tokens.extend(self.tokens.clone());
}
}

/// A smart `Token` writer that knows when to insert whitespace to prevent
/// two adjact tokens from being combined into a single token. This can also
/// be used to write raw strings, which we do when emitting non-BigQuery SQL.
Expand Down Expand Up @@ -1015,8 +1080,9 @@ mod test {

#[test]
fn sql_quote_builds_a_token_stream() {
let optional_distinct = Some(sql_quote! { DISTINCT });
sql_quote! {
SELECT
SELECT #optional_distinct
generate_uuid() AS id,
"hello" AS message,
1 AS n,
Expand Down

0 comments on commit fc8a72c

Please sign in to comment.