Skip to content

Commit

Permalink
remove Snowflake at @emk suggestion
Browse files Browse the repository at this point in the history
Co-Authored-By: Eric Kidd <[email protected]>
  • Loading branch information
hanakslr and emk committed Dec 3, 2024
1 parent 31bc873 commit ac8831a
Show file tree
Hide file tree
Showing 23 changed files with 30 additions and 1,675 deletions.
1,078 changes: 15 additions & 1,063 deletions Cargo.lock

Large diffs are not rendered by default.

7 changes: 1 addition & 6 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -7,15 +7,11 @@ license = "Apache-2.0"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html

[features]
default = ["snowflake"]
snowflake = []
default = []
trace = ["peg/trace"]

[dependencies]
anstream = "0.6.4"
# `arrow` must match snowflake-api's arrow version.
arrow = "48.0.1"
arrow-json = "48.0.1"
async-rusqlite = "0.4.0"
async-trait = "0.1.73"
clap = { version = "4.4.6", features = ["derive", "wrap_help"] }
Expand All @@ -35,7 +31,6 @@ regex = "1.10.0"
rusqlite = { version = "0.29.0", features = ["bundled", "functions", "vtab"] }
serde = { version = "1.0.188", features = ["derive"] }
serde_json = "1.0.107"
snowflake-api = "0.3.0"
thiserror = "1.0.49"
tokio = { version = "1.33.0", features = ["macros", "rt-multi-thread"] }
tracing = "0.1.37"
Expand Down
13 changes: 0 additions & 13 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,6 @@ Options:

- Trino has passing unit tests for all our use cases, but probably not yours. Also, there's a difference between "works with the SQL test suite that _theoretically_ covers the features we support" and "works with gnarly production queries that do tricky things with correlated subqueries."
- AWS Athena 3 is basically Trino, except UDFs are different and we don't support them yet. There may also be dialect differences. Not currently tested.
- Snowflake has partial support.

## Design philosophy

Expand Down Expand Up @@ -114,18 +113,6 @@ just trino-shell

For more details on all these commands, see the [`Justfile`](./Justfile).

### Snowflake

This hasn't been updated recently, so some tests for newer features may fail.

You can specify Snowflake using

```txt
--database snowflake://<user>@<organization>-<account>[.privatelink]/<warehouse>/<database>
```

You'll also need to set the `SNOWFLAKE_PASSWORD` environment variable.

## Developing

See [ARCHITECTURE.md](./ARCHITECTURE.md) for an overview of the codebase.
Expand Down
88 changes: 5 additions & 83 deletions src/ast.rs
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,6 @@ use joinery_macros::{Emit, EmitDefault, Spanned, ToTokens};
use crate::{
drivers::{
bigquery::{BigQueryName, BigQueryString},
snowflake::{SnowflakeString, KEYWORDS as SNOWFLAKE_KEYWORDS},
sqlite3::KEYWORDS as SQLITE3_KEYWORDS,
trino::{TrinoString, KEYWORDS as TRINO_KEYWORDS},
},
Expand Down Expand Up @@ -68,7 +67,6 @@ static KEYWORDS: phf::Set<&'static str> = phf::phf_set! {
#[allow(dead_code)]
pub enum Target {
BigQuery,
Snowflake,
SQLite3,
Trino,
}
Expand All @@ -78,7 +76,6 @@ impl Target {
pub fn is_keyword(self, s: &str) -> bool {
let keywords = match self {
Target::BigQuery => &KEYWORDS,
Target::Snowflake => &SNOWFLAKE_KEYWORDS,
Target::SQLite3 => &SQLITE3_KEYWORDS,
Target::Trino => &TRINO_KEYWORDS,
};
Expand All @@ -90,7 +87,6 @@ impl fmt::Display for Target {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Target::BigQuery => write!(f, "bigquery"),
Target::Snowflake => write!(f, "snowflake"),
Target::SQLite3 => write!(f, "sqlite3"),
Target::Trino => write!(f, "trino"),
}
Expand Down Expand Up @@ -195,7 +191,7 @@ impl Emit for Ident {
if t.is_keyword(&self.name) || !is_c_ident(&self.name) {
match t {
Target::BigQuery => write!(f, "{}", BigQueryName(&self.name))?,
Target::Snowflake | Target::SQLite3 | Target::Trino => {
Target::SQLite3 | Target::Trino => {
write!(f, "{}", AnsiIdent(&self.name))?;
}
}
Expand Down Expand Up @@ -240,7 +236,6 @@ impl Emit for LiteralValue {
LiteralValue::Float64(fl) => write!(f, "{}", fl),
LiteralValue::String(s) => match t {
Target::BigQuery => write!(f, "{}", BigQueryString(s)),
Target::Snowflake => write!(f, "{}", SnowflakeString(s)),
Target::SQLite3 => write!(f, "{}", AnsiString(s)),
Target::Trino => write!(f, "{}", TrinoString(s)),
},
Expand Down Expand Up @@ -713,7 +708,7 @@ impl Emit for SetOperator {
// whitespace from the first token in those cases. In other cases,
// we'll substitute `UNION` with a comment saying what it really
// should be.
Target::Snowflake | Target::SQLite3 => match self {
Target::SQLite3 => match self {
SetOperator::UnionAll {
union_token,
all_token,
Expand Down Expand Up @@ -836,32 +831,14 @@ pub enum SelectListItem {
}

/// An `EXCEPT` clause.
#[derive(Clone, Debug, Drive, DriveMut, EmitDefault, Spanned, ToTokens)]
#[derive(Clone, Debug, Drive, DriveMut, Emit, EmitDefault, Spanned, ToTokens)]
pub struct Except {
pub except_token: Keyword,
pub paren1: Punct,
pub columns: NodeVec<Ident>,
pub paren2: Punct,
}

impl Emit for Except {
fn emit(&self, t: Target, f: &mut TokenWriter<'_>) -> io::Result<()> {
match t {
Target::Snowflake => {
self.except_token
.ident
.token
.with_str("EXCLUDE")
.emit(t, f)?;
self.paren1.token.with_ws_only().emit(t, f)?;
self.columns.emit(t, f)?;
self.paren2.token.with_ws_only().emit(t, f)
}
_ => self.emit_default(t, f),
}
}
}

/// An SQL expression.
#[derive(Clone, Debug, Drive, DriveMut, Emit, EmitDefault, Spanned, ToTokens)]
pub enum Expression {
Expand Down Expand Up @@ -963,9 +940,7 @@ pub enum CastType {
impl Emit for CastType {
fn emit(&self, t: Target, f: &mut TokenWriter<'_>) -> io::Result<()> {
match self {
CastType::SafeCast { safe_cast_token }
if t == Target::Snowflake || t == Target::Trino =>
{
CastType::SafeCast { safe_cast_token } if t == Target::Trino => {
safe_cast_token.ident.token.with_str("TRY_CAST").emit(t, f)
}
// TODO: This isn't strictly right, but it's as close as I know how to
Expand Down Expand Up @@ -1180,11 +1155,6 @@ impl Emit for ArrayExpression {
last_token.with_ws_only().emit(t, f)?;
}
_ => match t {
Target::Snowflake => {
self.delim1.token.with_str("[").emit(t, f)?;
self.definition.emit(t, f)?;
self.delim2.token.with_str("]").emit(t, f)?;
}
Target::SQLite3 => {
if let Some(array_token) = &self.array_token {
array_token.emit(t, f)?;
Expand Down Expand Up @@ -1407,7 +1377,7 @@ impl SpecialDateExpression {
}

/// An `ARRAY_AGG` expression.
#[derive(Clone, Debug, Drive, DriveMut, EmitDefault, Spanned, ToTokens)]
#[derive(Clone, Debug, Drive, DriveMut, Emit, EmitDefault, Spanned, ToTokens)]
pub struct ArrayAggExpression {
pub array_agg_token: PseudoKeyword,
pub paren1: Punct,
Expand All @@ -1417,33 +1387,6 @@ pub struct ArrayAggExpression {
pub paren2: Punct,
}

impl Emit for ArrayAggExpression {
fn emit(&self, t: Target, f: &mut TokenWriter<'_>) -> io::Result<()> {
match self {
// Snowflake formats ORDER BY as `ARRAY_AGG(expression) WITHIN GROUP
// (ORDER BY ...)`.
ArrayAggExpression {
array_agg_token,
paren1,
distinct,
expression,
order_by: Some(order_by),
paren2,
} if t == Target::Snowflake => {
array_agg_token.emit(t, f)?;
paren1.emit(t, f)?;
distinct.emit(t, f)?;
expression.emit(t, f)?;
paren2.emit(t, f)?;
f.write_token_start("WITHIN GROUP(")?;
order_by.emit(t, f)?;
f.write_token_start(")")
}
_ => self.emit_default(t, f),
}
}
}

/// A function call.
#[derive(Clone, Debug, Drive, DriveMut, Emit, EmitDefault, Spanned, ToTokens)]
pub struct FunctionCall {
Expand Down Expand Up @@ -1603,27 +1546,6 @@ pub enum DataType {
impl Emit for DataType {
fn emit(&self, t: Target, f: &mut TokenWriter<'_>) -> io::Result<()> {
match t {
Target::Snowflake => match self {
DataType::Bool(token) => token.ident.token.with_str("BOOLEAN").emit(t, f),
DataType::Bytes(token) => token.ident.token.with_str("BINARY").emit(t, f),
DataType::Int64(token) => token.ident.token.with_str("INTEGER").emit(t, f),
DataType::Date(token) => token.emit(t, f),
// "Wall clock" time with no timezone.
DataType::Datetime(token) => token.ident.token.with_str("TIMESTAMP_NTZ").emit(t, f),
DataType::Float64(token) => token.ident.token.with_str("FLOAT8").emit(t, f),
DataType::Geography(token) => token.emit(t, f),
DataType::Numeric(token) => token.emit(t, f),
DataType::String(token) => token.ident.token.with_str("TEXT").emit(t, f),
DataType::Time(token) => token.emit(t, f),
// `TIMESTAMP_TZ` will need very careful timezone handling.
DataType::Timestamp(token) => token.ident.token.with_str("TIMESTAMP_TZ").emit(t, f),
DataType::Array { array_token, .. } => {
array_token.ident.token.with_str("ARRAY").emit(t, f)
}
DataType::Struct { struct_token, .. } => {
struct_token.ident.token.with_str("OBJECT").emit(t, f)
}
},
Target::SQLite3 => match self {
DataType::Bool(token) | DataType::Int64(token) => {
token.ident.token.with_str("INTEGER").emit(t, f)
Expand Down
3 changes: 0 additions & 3 deletions src/drivers/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -14,13 +14,11 @@ use crate::{
};

use self::{
snowflake::{SnowflakeLocator, SNOWFLAKE_LOCATOR_PREFIX},
sqlite3::{SQLite3Locator, SQLITE3_LOCATOR_PREFIX},
trino::{TrinoLocator, TRINO_LOCATOR_PREFIX},
};

pub mod bigquery;
pub mod snowflake;
pub mod sqlite3;
pub mod trino;

Expand All @@ -44,7 +42,6 @@ impl FromStr for Box<dyn Locator> {
let prefix = &s[..colon_pos + 1];
match prefix {
SQLITE3_LOCATOR_PREFIX => Ok(Box::new(s.parse::<SQLite3Locator>()?)),
SNOWFLAKE_LOCATOR_PREFIX => Ok(Box::new(s.parse::<SnowflakeLocator>()?)),
TRINO_LOCATOR_PREFIX => Ok(Box::new(s.parse::<TrinoLocator>()?)),
_ => Err(format_err!("unsupported database type: {}", s)),
}
Expand Down
Loading

0 comments on commit ac8831a

Please sign in to comment.