diff --git a/Cargo.lock b/Cargo.lock index cfd52bd42c57..ec9b4ae466b2 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -3742,6 +3742,14 @@ dependencies = [ "winapi", ] +[[package]] +name = "query-builder" +version = "0.1.0" +dependencies = [ + "query-structure", + "serde", +] + [[package]] name = "query-connector" version = "0.1.0" @@ -3785,11 +3793,13 @@ dependencies = [ "prisma-metrics", "psl", "quaint", + "query-builder", "query-connector", "query-structure", "schema", "serde", "serde_json", + "sql-query-builder", "sql-query-connector", "telemetry", "thiserror", @@ -4004,6 +4014,7 @@ dependencies = [ "chrono", "cuid", "getrandom 0.2.11", + "indexmap 2.2.2", "itertools 0.12.0", "nanoid", "prisma-value", @@ -5118,6 +5129,21 @@ dependencies = [ "user-facing-errors", ] +[[package]] +name = "sql-query-builder" +version = "0.1.0" +dependencies = [ + "chrono", + "itertools 0.12.0", + "prisma-value", + "psl", + "quaint", + "query-builder", + "query-structure", + "serde_json", + "telemetry", +] + [[package]] name = "sql-query-connector" version = "0.1.0" @@ -5139,6 +5165,7 @@ dependencies = [ "rand 0.8.5", "serde", "serde_json", + "sql-query-builder", "telemetry", "thiserror", "tokio", diff --git a/Cargo.toml b/Cargo.toml index b78f4801fd56..4658ac4617c9 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -28,6 +28,7 @@ members = [ "query-engine/query-engine-c-abi", "query-engine/request-handlers", "query-engine/schema", + "query-engine/query-builders/*", "libs/*", "prisma-fmt", "prisma-schema-wasm", diff --git a/query-engine/connectors/mongodb-query-connector/src/interface/connection.rs b/query-engine/connectors/mongodb-query-connector/src/interface/connection.rs index 94f29b21535e..fc73a97a3066 100644 --- a/query-engine/connectors/mongodb-query-connector/src/interface/connection.rs +++ b/query-engine/connectors/mongodb-query-connector/src/interface/connection.rs @@ -5,9 +5,7 @@ use crate::{ MongoDbTransaction, }; use async_trait::async_trait; -use connector_interface::{ - Connection, ConnectionLike, ReadOperations, Transaction, UpdateType, WriteArgs, WriteOperations, -}; +use connector_interface::{Connection, ConnectionLike, ReadOperations, Transaction, UpdateType, WriteOperations}; use mongodb::{ClientSession, Database}; use query_structure::{prelude::*, RelationLoadStrategy, SelectionResult}; use std::collections::HashMap; @@ -55,7 +53,7 @@ impl WriteOperations for MongoDbConnection { async fn create_record( &mut self, model: &Model, - args: WriteArgs, + args: query_structure::WriteArgs, // The field selection on a create is never used on MongoDB as it cannot return more than the ID. _selected_fields: FieldSelection, _traceparent: Option, @@ -66,7 +64,7 @@ impl WriteOperations for MongoDbConnection { async fn create_records( &mut self, model: &Model, - args: Vec, + args: Vec, skip_duplicates: bool, _traceparent: Option, ) -> connector_interface::Result { @@ -83,7 +81,7 @@ impl WriteOperations for MongoDbConnection { async fn create_records_returning( &mut self, _model: &Model, - _args: Vec, + _args: Vec, _skip_duplicates: bool, _selected_fields: FieldSelection, _traceparent: Option, @@ -94,8 +92,8 @@ impl WriteOperations for MongoDbConnection { async fn update_records( &mut self, model: &Model, - record_filter: connector_interface::RecordFilter, - args: WriteArgs, + record_filter: query_structure::RecordFilter, + args: query_structure::WriteArgs, limit: Option, _traceparent: Option, ) -> connector_interface::Result { @@ -118,8 +116,8 @@ impl WriteOperations for MongoDbConnection { async fn update_records_returning( &mut self, _model: &Model, - _record_filter: connector_interface::RecordFilter, - _args: WriteArgs, + _record_filter: query_structure::RecordFilter, + _args: query_structure::WriteArgs, _selected_fields: FieldSelection, _limit: Option, _traceparent: Option, @@ -130,8 +128,8 @@ impl WriteOperations for MongoDbConnection { async fn update_record( &mut self, model: &Model, - record_filter: connector_interface::RecordFilter, - args: WriteArgs, + record_filter: query_structure::RecordFilter, + args: query_structure::WriteArgs, selected_fields: Option, _traceparent: Option, ) -> connector_interface::Result> { @@ -163,7 +161,7 @@ impl WriteOperations for MongoDbConnection { async fn delete_records( &mut self, model: &Model, - record_filter: connector_interface::RecordFilter, + record_filter: query_structure::RecordFilter, limit: Option, _traceparent: Option, ) -> connector_interface::Result { @@ -180,7 +178,7 @@ impl WriteOperations for MongoDbConnection { async fn delete_record( &mut self, model: &Model, - record_filter: connector_interface::RecordFilter, + record_filter: query_structure::RecordFilter, selected_fields: FieldSelection, _traceparent: Option, ) -> connector_interface::Result { @@ -314,7 +312,7 @@ impl ReadOperations for MongoDbConnection { &mut self, model: &Model, query_arguments: query_structure::QueryArguments, - selections: Vec, + selections: Vec, group_by: Vec, having: Option, _traceparent: Option, diff --git a/query-engine/connectors/mongodb-query-connector/src/interface/transaction.rs b/query-engine/connectors/mongodb-query-connector/src/interface/transaction.rs index 31943e0dd6cc..9f7b886d82da 100644 --- a/query-engine/connectors/mongodb-query-connector/src/interface/transaction.rs +++ b/query-engine/connectors/mongodb-query-connector/src/interface/transaction.rs @@ -80,7 +80,7 @@ impl WriteOperations for MongoDbTransaction<'_> { async fn create_record( &mut self, model: &Model, - args: connector_interface::WriteArgs, + args: query_structure::WriteArgs, // The field selection on a create is never used on MongoDB as it cannot return more than the ID. _selected_fields: FieldSelection, _traceparent: Option, @@ -97,7 +97,7 @@ impl WriteOperations for MongoDbTransaction<'_> { async fn create_records( &mut self, model: &Model, - args: Vec, + args: Vec, skip_duplicates: bool, _traceparent: Option, ) -> connector_interface::Result { @@ -114,7 +114,7 @@ impl WriteOperations for MongoDbTransaction<'_> { async fn create_records_returning( &mut self, _model: &Model, - _args: Vec, + _args: Vec, _skip_duplicates: bool, _selected_fields: FieldSelection, _traceparent: Option, @@ -125,8 +125,8 @@ impl WriteOperations for MongoDbTransaction<'_> { async fn update_records( &mut self, model: &Model, - record_filter: connector_interface::RecordFilter, - args: connector_interface::WriteArgs, + record_filter: query_structure::RecordFilter, + args: query_structure::WriteArgs, limit: Option, _traceparent: Option, ) -> connector_interface::Result { @@ -148,8 +148,8 @@ impl WriteOperations for MongoDbTransaction<'_> { async fn update_records_returning( &mut self, _model: &Model, - _record_filter: connector_interface::RecordFilter, - _args: connector_interface::WriteArgs, + _record_filter: query_structure::RecordFilter, + _args: query_structure::WriteArgs, _selected_fields: FieldSelection, _limit: Option, _traceparent: Option, @@ -160,8 +160,8 @@ impl WriteOperations for MongoDbTransaction<'_> { async fn update_record( &mut self, model: &Model, - record_filter: connector_interface::RecordFilter, - args: connector_interface::WriteArgs, + record_filter: query_structure::RecordFilter, + args: query_structure::WriteArgs, selected_fields: Option, _traceparent: Option, ) -> connector_interface::Result> { @@ -192,7 +192,7 @@ impl WriteOperations for MongoDbTransaction<'_> { async fn delete_records( &mut self, model: &Model, - record_filter: connector_interface::RecordFilter, + record_filter: query_structure::RecordFilter, limit: Option, _traceparent: Option, ) -> connector_interface::Result { @@ -209,7 +209,7 @@ impl WriteOperations for MongoDbTransaction<'_> { async fn delete_record( &mut self, model: &Model, - record_filter: connector_interface::RecordFilter, + record_filter: query_structure::RecordFilter, selected_fields: FieldSelection, _traceparent: Option, ) -> connector_interface::Result { @@ -348,7 +348,7 @@ impl ReadOperations for MongoDbTransaction<'_> { &mut self, model: &Model, query_arguments: query_structure::QueryArguments, - selections: Vec, + selections: Vec, group_by: Vec, having: Option, _traceparent: Option, diff --git a/query-engine/connectors/mongodb-query-connector/src/output_meta.rs b/query-engine/connectors/mongodb-query-connector/src/output_meta.rs index d1937bf7fee7..1a38448ff584 100644 --- a/query-engine/connectors/mongodb-query-connector/src/output_meta.rs +++ b/query-engine/connectors/mongodb-query-connector/src/output_meta.rs @@ -1,7 +1,7 @@ -use connector_interface::AggregationSelection; use indexmap::IndexMap; use query_structure::{ - ast::FieldArity, DefaultKind, FieldSelection, PrismaValue, ScalarFieldRef, SelectedField, TypeIdentifier, + ast::FieldArity, AggregationSelection, DefaultKind, FieldSelection, PrismaValue, ScalarFieldRef, SelectedField, + TypeIdentifier, }; /// Maps field db field names to their meta information. diff --git a/query-engine/connectors/mongodb-query-connector/src/query_builder/group_by_builder.rs b/query-engine/connectors/mongodb-query-connector/src/query_builder/group_by_builder.rs index c40c3ee8d0dc..a58a3928ed30 100644 --- a/query-engine/connectors/mongodb-query-connector/src/query_builder/group_by_builder.rs +++ b/query-engine/connectors/mongodb-query-connector/src/query_builder/group_by_builder.rs @@ -1,8 +1,7 @@ use crate::constants::*; use bson::{doc, Bson, Document}; -use connector_interface::AggregationSelection; -use query_structure::{AggregationFilter, Filter, ScalarFieldRef}; +use query_structure::{AggregationFilter, AggregationSelection, Filter, ScalarFieldRef}; use std::collections::HashSet; /// Represents a `$group` aggregation stage. diff --git a/query-engine/connectors/mongodb-query-connector/src/query_builder/read_query_builder.rs b/query-engine/connectors/mongodb-query-connector/src/query_builder/read_query_builder.rs index bfe48d5f851c..61e84de6fa6b 100644 --- a/query-engine/connectors/mongodb-query-connector/src/query_builder/read_query_builder.rs +++ b/query-engine/connectors/mongodb-query-connector/src/query_builder/read_query_builder.rs @@ -11,10 +11,11 @@ use crate::{ vacuum_cursor, BsonTransform, IntoBson, }; use bson::{doc, Document}; -use connector_interface::AggregationSelection; use itertools::Itertools; use mongodb::{options::AggregateOptions, ClientSession, Collection}; -use query_structure::{FieldSelection, Filter, Model, QueryArguments, ScalarFieldRef, VirtualSelection}; +use query_structure::{ + AggregationSelection, FieldSelection, Filter, Model, QueryArguments, ScalarFieldRef, VirtualSelection, +}; use std::convert::TryFrom; use std::future::IntoFuture; diff --git a/query-engine/connectors/mongodb-query-connector/src/root_queries/aggregate.rs b/query-engine/connectors/mongodb-query-connector/src/root_queries/aggregate.rs index 797e34127f8a..02bc36481d51 100644 --- a/query-engine/connectors/mongodb-query-connector/src/root_queries/aggregate.rs +++ b/query-engine/connectors/mongodb-query-connector/src/root_queries/aggregate.rs @@ -2,7 +2,7 @@ use crate::{constants::*, output_meta, query_builder::MongoReadQueryBuilder, val use connector_interface::*; use mongodb::{bson::Document, ClientSession, Database}; -use query_structure::{prelude::*, Filter, QueryArguments}; +use query_structure::{prelude::*, AggregationSelection, Filter, QueryArguments}; pub async fn aggregate<'conn>( database: &Database, diff --git a/query-engine/connectors/mongodb-query-connector/src/root_queries/update/expression.rs b/query-engine/connectors/mongodb-query-connector/src/root_queries/update/expression.rs index f1be40e57416..13cf72beba8f 100644 --- a/query-engine/connectors/mongodb-query-connector/src/root_queries/update/expression.rs +++ b/query-engine/connectors/mongodb-query-connector/src/root_queries/update/expression.rs @@ -1,8 +1,8 @@ use super::{into_expression::IntoUpdateExpression, operation}; use bson::{doc, Bson, Document}; -use connector_interface::FieldPath; use indexmap::IndexMap; +use query_structure::FieldPath; /// `UpdateExpression` is an intermediary AST that's used to represent MongoDB expressions. /// It is meant to be transformed into `BSON`. diff --git a/query-engine/connectors/mongodb-query-connector/src/root_queries/update/into_operation.rs b/query-engine/connectors/mongodb-query-connector/src/root_queries/update/into_operation.rs index 52ffbd70e338..34c843afc06b 100644 --- a/query-engine/connectors/mongodb-query-connector/src/root_queries/update/into_operation.rs +++ b/query-engine/connectors/mongodb-query-connector/src/root_queries/update/into_operation.rs @@ -2,8 +2,7 @@ use super::operation::*; use crate::*; use bson::doc; -use connector_interface::{CompositeWriteOperation, FieldPath, ScalarWriteOperation, WriteOperation}; -use query_structure::{Field, PrismaValue}; +use query_structure::{CompositeWriteOperation, Field, FieldPath, PrismaValue, ScalarWriteOperation, WriteOperation}; pub(crate) trait IntoUpdateOperation { fn into_update_operations(self, field: &Field, path: FieldPath) -> crate::Result>; diff --git a/query-engine/connectors/mongodb-query-connector/src/root_queries/update/mod.rs b/query-engine/connectors/mongodb-query-connector/src/root_queries/update/mod.rs index 481ce7a95d29..287d0c157d86 100644 --- a/query-engine/connectors/mongodb-query-connector/src/root_queries/update/mod.rs +++ b/query-engine/connectors/mongodb-query-connector/src/root_queries/update/mod.rs @@ -8,9 +8,9 @@ use super::*; use crate::*; use bson::Document; -use connector_interface::{FieldPath, WriteOperation}; use into_expression::IntoUpdateExpressions; use into_operation::IntoUpdateOperation; +use query_structure::{FieldPath, WriteOperation}; pub(crate) trait IntoUpdateDocumentExtension { fn into_update_docs(self, field: &Field, path: FieldPath) -> crate::Result>; diff --git a/query-engine/connectors/mongodb-query-connector/src/root_queries/update/operation.rs b/query-engine/connectors/mongodb-query-connector/src/root_queries/update/operation.rs index 62502f03a4aa..f32e55a0d4f3 100644 --- a/query-engine/connectors/mongodb-query-connector/src/root_queries/update/operation.rs +++ b/query-engine/connectors/mongodb-query-connector/src/root_queries/update/operation.rs @@ -1,7 +1,6 @@ use super::{expression, into_expression::IntoUpdateExpression}; use bson::{doc, Document}; -use connector_interface::FieldPath; -use query_structure::Filter; +use query_structure::{FieldPath, Filter}; /// `UpdateOperation` is an intermediary AST used to perform preliminary transformations from a `WriteOperation`. /// It is meant to be transformed into an `UpdateExpression`. diff --git a/query-engine/connectors/query-connector/src/interface.rs b/query-engine/connectors/query-connector/src/interface.rs index 3bf1614e0394..3db5a11a3b5c 100644 --- a/query-engine/connectors/query-connector/src/interface.rs +++ b/query-engine/connectors/query-connector/src/interface.rs @@ -1,7 +1,7 @@ -use crate::{NativeUpsert, WriteArgs}; +use crate::NativeUpsert; use async_trait::async_trait; use prisma_value::PrismaValue; -use query_structure::{ast::FieldArity, *}; +use query_structure::*; use std::collections::HashMap; use telemetry::TraceParent; @@ -50,125 +50,6 @@ pub trait Transaction: ConnectionLike { /// transactions into something that can is capable of writing to or reading from the database. pub trait ConnectionLike: ReadOperations + WriteOperations + Send + Sync {} -/// A wrapper struct allowing to either filter for records or for the core to -/// communicate already known record selectors to connectors. -/// -/// Connector implementations should use known selectors to skip unnecessary fetch operations -/// if the query core already determined the selectors in a previous step. Simply put, -/// `selectors` should always have precendence over `filter`. -#[derive(Debug, Clone)] -pub struct RecordFilter { - pub filter: Filter, - pub selectors: Option>, -} - -impl RecordFilter { - pub fn empty() -> Self { - Self { - filter: Filter::empty(), - selectors: None, - } - } - - pub fn has_selectors(&self) -> bool { - self.selectors.is_some() - } -} - -impl From for RecordFilter { - fn from(filter: Filter) -> Self { - Self { - filter, - selectors: None, - } - } -} - -impl From> for RecordFilter { - fn from(selectors: Vec) -> Self { - Self { - filter: Filter::empty(), - selectors: Some(selectors), - } - } -} - -impl From for RecordFilter { - fn from(selector: SelectionResult) -> Self { - Self { - filter: Filter::empty(), - selectors: Some(vec![selector]), - } - } -} - -/// Selections for aggregation queries. -#[derive(Debug, Clone)] -pub enum AggregationSelection { - /// Single field selector. Only valid in the context of group by statements. - Field(ScalarFieldRef), - - /// Counts records of the model that match the query. - /// `all` indicates that an all-records selection has been made (e.g. SQL *). - /// `fields` are specific fields to count on. By convention, if `all` is true, - /// it will always be the last of the count results. - Count { all: bool, fields: Vec }, - - /// Compute average for each field contained. - Average(Vec), - - /// Compute sum for each field contained. - Sum(Vec), - - /// Compute mininum for each field contained. - Min(Vec), - - /// Compute maximum for each field contained. - Max(Vec), -} - -impl AggregationSelection { - /// Returns (field_db_name, TypeIdentifier, FieldArity) - pub fn identifiers(&self) -> Vec<(String, TypeIdentifier, FieldArity)> { - match self { - AggregationSelection::Field(field) => { - vec![(field.db_name().to_owned(), field.type_identifier(), field.arity())] - } - - AggregationSelection::Count { all, fields } => { - let mut mapped = Self::map_field_types(fields, Some(TypeIdentifier::Int)); - - if *all { - mapped.push(("all".to_owned(), TypeIdentifier::Int, FieldArity::Required)); - } - - mapped - } - - AggregationSelection::Average(fields) => Self::map_field_types(fields, Some(TypeIdentifier::Float)), - AggregationSelection::Sum(fields) => Self::map_field_types(fields, None), - AggregationSelection::Min(fields) => Self::map_field_types(fields, None), - AggregationSelection::Max(fields) => Self::map_field_types(fields, None), - } - } - - fn map_field_types( - fields: &[ScalarFieldRef], - fixed_type: Option, - ) -> Vec<(String, TypeIdentifier, FieldArity)> { - fields - .iter() - .map(|f| { - ( - f.db_name().to_owned(), - fixed_type.unwrap_or_else(|| f.type_identifier()), - FieldArity::Required, - ) - }) - .collect() - } -} - pub type AggregationRow = Vec; /// Result of an aggregation operation on a model or field. diff --git a/query-engine/connectors/query-connector/src/lib.rs b/query-engine/connectors/query-connector/src/lib.rs index c497f121ae9d..b810e70a8b22 100644 --- a/query-engine/connectors/query-connector/src/lib.rs +++ b/query-engine/connectors/query-connector/src/lib.rs @@ -5,12 +5,10 @@ pub mod error; mod coerce; mod interface; mod upsert; -mod write_args; pub use coerce::*; pub use interface::*; pub use upsert::*; -pub use write_args::*; pub type Result = std::result::Result; diff --git a/query-engine/connectors/query-connector/src/upsert.rs b/query-engine/connectors/query-connector/src/upsert.rs index 9455fbc30c49..6b1872db95a9 100644 --- a/query-engine/connectors/query-connector/src/upsert.rs +++ b/query-engine/connectors/query-connector/src/upsert.rs @@ -1,5 +1,4 @@ -use crate::{RecordFilter, WriteArgs}; -use query_structure::{FieldSelection, Filter, Model, ScalarFieldRef}; +use query_structure::{FieldSelection, Filter, Model, RecordFilter, ScalarFieldRef, WriteArgs}; #[derive(Debug, Clone)] pub struct NativeUpsert { diff --git a/query-engine/connectors/sql-query-connector/Cargo.toml b/query-engine/connectors/sql-query-connector/Cargo.toml index 779ae6e50193..d53013d15832 100644 --- a/query-engine/connectors/sql-query-connector/Cargo.toml +++ b/query-engine/connectors/sql-query-connector/Cargo.toml @@ -29,7 +29,7 @@ all-native = [ # TODO: At the moment of writing (rustc 1.77.0), can_have_capability from psl does not eliminate joins # code from bundle for some reason, so we are doing it explicitly. Check with a newer version of compiler - if elimination # happens successfully, we don't need this feature anymore -relation_joins = [] +relation_joins = ["sql-query-builder/relation_joins"] # Enable Driver Adapters driver-adapters = [] @@ -62,6 +62,9 @@ path = "../query-connector" [dependencies.query-structure] path = "../../query-structure" +[dependencies.sql-query-builder] +path = "../../query-builders/sql-query-builder" + [dependencies.prisma-value] path = "../../../libs/prisma-value" diff --git a/query-engine/connectors/sql-query-connector/src/database/connection.rs b/query-engine/connectors/sql-query-connector/src/database/connection.rs index 614f174e562b..cc3489480f3c 100644 --- a/query-engine/connectors/sql-query-connector/src/database/connection.rs +++ b/query-engine/connectors/sql-query-connector/src/database/connection.rs @@ -1,19 +1,22 @@ #![cfg_attr(target_arch = "wasm32", allow(dead_code))] use super::{catch, transaction::SqlConnectorTransaction}; -use crate::{database::operations::*, Context, SqlError}; +use crate::{database::operations::*, SqlError}; use async_trait::async_trait; use connector::ConnectionLike; use connector_interface::{ - self as connector, AggregationRow, AggregationSelection, Connection, ReadOperations, RecordFilter, Transaction, - WriteArgs, WriteOperations, + self as connector, AggregationRow, Connection, ReadOperations, Transaction, WriteOperations, }; use prisma_value::PrismaValue; use quaint::{ connector::{IsolationLevel, TransactionCapable}, prelude::{ConnectionInfo, Queryable}, }; -use query_structure::{prelude::*, Filter, QueryArguments, RelationLoadStrategy, SelectionResult}; +use query_structure::{ + prelude::*, AggregationSelection, Filter, QueryArguments, RecordFilter, RelationLoadStrategy, SelectionResult, + WriteArgs, +}; +use sql_query_builder::Context; use std::{collections::HashMap, str::FromStr}; use telemetry::TraceParent; diff --git a/query-engine/connectors/sql-query-connector/src/database/operations/read.rs b/query-engine/connectors/sql-query-connector/src/database/operations/read.rs index 9eabee23ae72..9a9ac4469ce0 100644 --- a/query-engine/connectors/sql-query-connector/src/database/operations/read.rs +++ b/query-engine/connectors/sql-query-connector/src/database/operations/read.rs @@ -3,18 +3,13 @@ mod coerce; #[cfg(feature = "relation_joins")] mod process; -use crate::{ - column_metadata, - model_extensions::*, - query_arguments_ext::QueryArgumentsExt, - query_builder::{self, read}, - Context, QueryExt, Queryable, SqlError, -}; +use crate::{QueryExt, Queryable, SqlError}; use connector_interface::*; use futures::stream::{FuturesUnordered, StreamExt}; use quaint::ast::*; use query_structure::*; +use sql_query_builder::{column_metadata, read, AsColumns, AsTable, Context, QueryArgumentsExt, RelationFieldExt}; pub(crate) async fn get_single_record( conn: &dyn Queryable, @@ -53,7 +48,7 @@ async fn get_single_record_joins( &field_names, ); - let query = query_builder::select::SelectBuilder::build( + let query = sql_query_builder::select::SelectBuilder::build( QueryArguments::from((model.clone(), filter.clone())), &selected_fields, ctx, @@ -165,7 +160,7 @@ async fn get_many_records_joins( return Ok(records); }; - match ctx.max_bind_values { + match ctx.max_bind_values() { Some(chunk_size) if query_arguments.should_batch(chunk_size) => { return Err(SqlError::QueryParameterLimitExceeded( "Joined queries cannot be split into multiple queries.".to_string(), @@ -174,7 +169,7 @@ async fn get_many_records_joins( _ => (), }; - let query = query_builder::select::SelectBuilder::build(query_arguments.clone(), &selected_fields, ctx); + let query = sql_query_builder::select::SelectBuilder::build(query_arguments.clone(), &selected_fields, ctx); for item in conn.filter(query.into(), meta.as_slice(), ctx).await?.into_iter() { let mut record = Record::from(item); @@ -217,7 +212,7 @@ async fn get_many_records_wo_joins( // Todo: This can't work for all cases. Cursor-based pagination will not work, because it relies on the ordering // to determine the right queries to fire, and will default to incorrect orderings if no ordering is found. // The should_batch has been adjusted to reflect that as a band-aid, but deeper investigation is necessary. - match ctx.max_bind_values { + match ctx.max_bind_values() { Some(chunk_size) if query_arguments.should_batch(chunk_size) => { if query_arguments.has_unbatchable_ordering() { return Err(SqlError::QueryParameterLimitExceeded( @@ -312,7 +307,7 @@ pub(crate) async fn get_related_m2m_record_ids( // [DTODO] To verify: We might need chunked fetch here (too many parameters in the query). let select = Select::from_table(table) - .so_that(query_builder::in_conditions(&from_columns, from_record_ids, ctx)) + .so_that(sql_query_builder::in_conditions(&from_columns, from_record_ids, ctx)) .columns(from_columns.into_iter().chain(to_columns.into_iter())); let parent_model_id = from_field.model().primary_identifier(); diff --git a/query-engine/connectors/sql-query-connector/src/database/operations/read/process.rs b/query-engine/connectors/sql-query-connector/src/database/operations/read/process.rs index cfa03796ceaf..042dc2815b6a 100644 --- a/query-engine/connectors/sql-query-connector/src/database/operations/read/process.rs +++ b/query-engine/connectors/sql-query-connector/src/database/operations/read/process.rs @@ -2,8 +2,7 @@ use std::borrow::Cow; use itertools::{Either, Itertools}; use query_structure::{QueryArguments, Record}; - -use crate::query_arguments_ext::QueryArgumentsExt; +use sql_query_builder::QueryArgumentsExt; macro_rules! processor_state { ($name:ident $(-> $transition:ident($bound:ident))?) => { diff --git a/query-engine/connectors/sql-query-connector/src/database/operations/update.rs b/query-engine/connectors/sql-query-connector/src/database/operations/update.rs index 9ea13127a4fc..3ba3327d02db 100644 --- a/query-engine/connectors/sql-query-connector/src/database/operations/update.rs +++ b/query-engine/connectors/sql-query-connector/src/database/operations/update.rs @@ -1,17 +1,12 @@ use super::read::get_single_record; -use crate::column_metadata::{self, ColumnMetadata}; -use crate::filter::FilterBuilder; -use crate::model_extensions::AsColumns; -use crate::query_builder::write::{build_update_and_set_query, chunk_update_with_ids}; use crate::row::ToSqlRow; -use crate::{Context, QueryExt, Queryable}; +use crate::{QueryExt, Queryable}; -use crate::limit::wrap_with_limit_subquery_if_needed; -use connector_interface::*; use itertools::Itertools; use quaint::ast::*; use query_structure::*; +use sql_query_builder::{column_metadata, limit, write, AsColumns, ColumnMetadata, Context, FilterBuilder}; /// Performs an update with an explicit selection set. /// This function is called for connectors that supports the `UpdateReturning` capability. @@ -34,7 +29,7 @@ pub(crate) async fn update_one_with_selection( let cond = FilterBuilder::without_top_level_joins().visit_filter(build_update_one_filter(record_filter), ctx); - let update = build_update_and_set_query(model, args, Some(&selected_fields), ctx).so_that(cond); + let update = write::build_update_and_set_query(model, args, Some(&selected_fields), ctx).so_that(cond); let field_names: Vec<_> = selected_fields.db_names().collect(); let idents = selected_fields.type_identifiers_with_arities(); @@ -107,8 +102,8 @@ pub(super) async fn update_many_from_filter( limit: Option, ctx: &Context<'_>, ) -> crate::Result> { - let update = build_update_and_set_query(model, args, None, ctx); - let filter_condition = wrap_with_limit_subquery_if_needed( + let update = write::build_update_and_set_query(model, args, None, ctx); + let filter_condition = limit::wrap_with_limit_subquery_if_needed( model, FilterBuilder::without_top_level_joins().visit_filter(record_filter.filter, ctx), limit, @@ -144,10 +139,10 @@ pub(super) async fn update_many_from_ids_and_filter( } let updates = { - let update = build_update_and_set_query(model, args, selected_fields, ctx); + let update = write::build_update_and_set_query(model, args, selected_fields, ctx); let ids: Vec<&SelectionResult> = ids.iter().take(limit.unwrap_or(usize::MAX)).collect(); - chunk_update_with_ids(update, model, &ids, filter_condition, ctx)? + write::chunk_update_with_ids(update, model, &ids, filter_condition, ctx) }; Ok((updates, ids)) diff --git a/query-engine/connectors/sql-query-connector/src/database/operations/upsert.rs b/query-engine/connectors/sql-query-connector/src/database/operations/upsert.rs index f086e4c60798..e95175e33b1a 100644 --- a/query-engine/connectors/sql-query-connector/src/database/operations/upsert.rs +++ b/query-engine/connectors/sql-query-connector/src/database/operations/upsert.rs @@ -1,14 +1,8 @@ -use crate::{ - column_metadata, - filter::FilterBuilder, - model_extensions::AsColumns, - query_builder::write::{build_update_and_set_query, create_record}, - row::ToSqlRow, - Context, Queryable, -}; +use crate::{row::ToSqlRow, Queryable}; use connector_interface::NativeUpsert; use quaint::prelude::{OnConflict, Query}; use query_structure::{ModelProjection, Record, SingleRecord}; +use sql_query_builder::{column_metadata, write, AsColumns, Context, FilterBuilder}; pub(crate) async fn native_upsert( conn: &dyn Queryable, @@ -23,9 +17,9 @@ pub(crate) async fn native_upsert( let where_condition = FilterBuilder::without_top_level_joins().visit_filter(upsert.filter().clone(), ctx); let update = - build_update_and_set_query(upsert.model(), upsert.update().clone(), None, ctx).so_that(where_condition); + write::build_update_and_set_query(upsert.model(), upsert.update().clone(), None, ctx).so_that(where_condition); - let insert = create_record(upsert.model(), upsert.create().clone(), &selected_fields, ctx); + let insert = write::create_record(upsert.model(), upsert.create().clone(), &selected_fields, ctx); let constraints: Vec<_> = upsert.unique_constraints().as_columns(ctx).collect(); let query: Query = insert.on_conflict(OnConflict::Update(update, constraints)).into(); diff --git a/query-engine/connectors/sql-query-connector/src/database/operations/write.rs b/query-engine/connectors/sql-query-connector/src/database/operations/write.rs index 3df576c3fe03..07a385bab3ce 100644 --- a/query-engine/connectors/sql-query-connector/src/database/operations/write.rs +++ b/query-engine/connectors/sql-query-connector/src/database/operations/write.rs @@ -1,19 +1,16 @@ use super::update::*; -use crate::column_metadata; -use crate::filter::FilterBuilder; use crate::row::ToSqlRow; -use crate::{ - error::SqlError, model_extensions::*, query_builder::write, sql_trace::SqlTraceComment, Context, QueryExt, - Queryable, -}; -use connector_interface::*; +use crate::value::to_prisma_value; +use crate::{error::SqlError, QueryExt, Queryable}; use itertools::Itertools; use quaint::ast::{Insert, Query}; +use quaint::prelude::ResultSet; use quaint::{ error::ErrorKind, prelude::{native_uuid, uuid_to_bin, uuid_to_bin_swapped, Aliasable, Select, SqlFamily}, }; use query_structure::*; +use sql_query_builder::{column_metadata, write, Context, FilterBuilder, SelectionResultExt, SqlTraceComment}; use std::borrow::Cow; use std::{ collections::{HashMap, HashSet}, @@ -72,7 +69,7 @@ async fn generate_id( // db generate values only if needed if need_select { - let pk_select = id_select.add_traceparent(ctx.traceparent); + let pk_select = id_select.add_traceparent(ctx.traceparent()); let pk_result = conn.query(pk_select.into()).await?; let result = try_convert(&(id_field.into()), pk_result)?; @@ -287,7 +284,7 @@ pub(crate) async fn create_records_returning( /// Partitions data into batches, respecting `max_bind_values` and `max_insert_rows` settings from /// the `Context`. fn partition_into_batches(args: Vec, ctx: &Context<'_>) -> Vec> { - let batches = if let Some(max_params) = ctx.max_bind_values { + let batches = if let Some(max_params) = ctx.max_bind_values() { // We need to split inserts if they are above a parameter threshold, as well as split based on number of rows. // -> Horizontal partitioning by row number, vertical by number of args. args.into_iter() @@ -328,7 +325,7 @@ fn partition_into_batches(args: Vec, ctx: &Context<'_>) -> Vec) -> crate::Result { Ok(conn.raw_json(inputs).await?) } + +fn try_convert(model_projection: &ModelProjection, result_set: ResultSet) -> crate::Result { + let columns: Vec = result_set.columns().iter().map(|c| c.to_string()).collect(); + let mut record_projection = SelectionResult::default(); + + if let Some(row) = result_set.into_iter().next() { + for (i, val) in row.into_iter().enumerate() { + match model_projection.map_db_name(columns[i].as_str()) { + Some(field) => { + record_projection.add((field, to_prisma_value(val)?)); + } + None => { + return Err(SqlError::DomainError(DomainError::ScalarFieldNotFound { + name: columns[i].clone(), + container_type: "model", + container_name: String::from("unspecified"), + })) + } + } + } + } + + if model_projection.scalar_length() == record_projection.len() { + Ok(record_projection) + } else { + Err(SqlError::DomainError(DomainError::ConversionFailure( + "ResultSet".to_owned(), + "RecordProjection".to_owned(), + ))) + } +} diff --git a/query-engine/connectors/sql-query-connector/src/database/transaction.rs b/query-engine/connectors/sql-query-connector/src/database/transaction.rs index 6528343f54fa..9d43dfb25cbf 100644 --- a/query-engine/connectors/sql-query-connector/src/database/transaction.rs +++ b/query-engine/connectors/sql-query-connector/src/database/transaction.rs @@ -1,14 +1,15 @@ use super::catch; -use crate::{database::operations::*, Context, SqlError}; +use crate::{database::operations::*, SqlError}; use async_trait::async_trait; use connector::ConnectionLike; -use connector_interface::{ - self as connector, AggregationRow, AggregationSelection, ReadOperations, RecordFilter, Transaction, WriteArgs, - WriteOperations, -}; +use connector_interface::{self as connector, AggregationRow, ReadOperations, Transaction, WriteOperations}; use prisma_value::PrismaValue; use quaint::prelude::ConnectionInfo; -use query_structure::{prelude::*, Filter, QueryArguments, RelationLoadStrategy, SelectionResult}; +use query_structure::{ + prelude::*, AggregationSelection, Filter, QueryArguments, RecordFilter, RelationLoadStrategy, SelectionResult, + WriteArgs, +}; +use sql_query_builder::Context; use std::collections::HashMap; use telemetry::TraceParent; diff --git a/query-engine/connectors/sql-query-connector/src/lib.rs b/query-engine/connectors/sql-query-connector/src/lib.rs index dc809c7bf627..28ec5862e227 100644 --- a/query-engine/connectors/sql-query-connector/src/lib.rs +++ b/query-engine/connectors/sql-query-connector/src/lib.rs @@ -1,26 +1,14 @@ #![allow(clippy::wrong_self_convention)] #![deny(unsafe_code)] -mod column_metadata; -pub mod context; -mod cursor_condition; mod database; mod error; -mod filter; -mod join_utils; -mod limit; -pub mod model_extensions; -mod nested_aggregations; -mod ordering; -pub mod query_arguments_ext; -pub mod query_builder; mod query_ext; mod row; mod ser_raw; -mod sql_trace; mod value; -use self::{column_metadata::*, context::Context, query_ext::QueryExt, row::*}; +use self::{query_ext::QueryExt, row::*}; use quaint::prelude::Queryable; pub use database::operations::write::generate_insert_statements; diff --git a/query-engine/connectors/sql-query-connector/src/model_extensions/mod.rs b/query-engine/connectors/sql-query-connector/src/model_extensions/mod.rs deleted file mode 100644 index d1bff1954100..000000000000 --- a/query-engine/connectors/sql-query-connector/src/model_extensions/mod.rs +++ /dev/null @@ -1,9 +0,0 @@ -mod column; -mod record; -mod relation; -mod scalar_field; -mod selection_result; -mod table; - -pub use self::{column::*, record::*, scalar_field::*}; -pub(crate) use self::{relation::*, selection_result::*, table::*}; diff --git a/query-engine/connectors/sql-query-connector/src/model_extensions/record.rs b/query-engine/connectors/sql-query-connector/src/model_extensions/record.rs deleted file mode 100644 index e764aa8e58ba..000000000000 --- a/query-engine/connectors/sql-query-connector/src/model_extensions/record.rs +++ /dev/null @@ -1,34 +0,0 @@ -use crate::{value::to_prisma_value, SqlError}; -use quaint::connector::ResultSet; -use query_structure::{DomainError, ModelProjection, SelectionResult}; - -pub fn try_convert(model_projection: &ModelProjection, result_set: ResultSet) -> crate::Result { - let columns: Vec = result_set.columns().iter().map(|c| c.to_string()).collect(); - let mut record_projection = SelectionResult::default(); - - if let Some(row) = result_set.into_iter().next() { - for (i, val) in row.into_iter().enumerate() { - match model_projection.map_db_name(columns[i].as_str()) { - Some(field) => { - record_projection.add((field, to_prisma_value(val)?)); - } - None => { - return Err(SqlError::DomainError(DomainError::ScalarFieldNotFound { - name: columns[i].clone(), - container_type: "model", - container_name: String::from("unspecified"), - })) - } - } - } - } - - if model_projection.scalar_length() == record_projection.len() { - Ok(record_projection) - } else { - Err(SqlError::DomainError(DomainError::ConversionFailure( - "ResultSet".to_owned(), - "RecordProjection".to_owned(), - ))) - } -} diff --git a/query-engine/connectors/sql-query-connector/src/query_ext.rs b/query-engine/connectors/sql-query-connector/src/query_ext.rs index dcf5b2f143b2..842ee100363c 100644 --- a/query-engine/connectors/sql-query-connector/src/query_ext.rs +++ b/query-engine/connectors/sql-query-connector/src/query_ext.rs @@ -1,15 +1,12 @@ -use crate::filter::FilterBuilder; use crate::ser_raw::SerializedResultSet; -use crate::{ - column_metadata, error::*, model_extensions::*, sql_trace::SqlTraceComment, ColumnMetadata, Context, SqlRow, - ToSqlRow, -}; +use crate::{error::*, SqlRow, ToSqlRow}; use async_trait::async_trait; -use connector_interface::RecordFilter; +use chrono::Utc; use futures::future::FutureExt; use itertools::Itertools; use quaint::{ast::*, connector::Queryable}; use query_structure::*; +use sql_query_builder::{column_metadata, AsColumns, AsTable, ColumnMetadata, Context, FilterBuilder, SqlTraceComment}; use std::{collections::HashMap, panic::AssertUnwindSafe}; use tracing::info_span; use tracing_futures::Instrument; @@ -25,7 +22,7 @@ impl QueryExt for Q { let span = info_span!("prisma:engine:filter_read_query"); let q = match q { - Query::Select(x) => Query::Select(Box::from(x.add_traceparent(ctx.traceparent))), + Query::Select(x) => Query::Select(Box::from(x.add_traceparent(ctx.traceparent()))), q => q, }; @@ -107,7 +104,7 @@ impl QueryExt for Q { let select = Select::from_table(model.as_table(ctx)) .columns(id_cols) - .add_traceparent(ctx.traceparent) + .add_traceparent(ctx.traceparent()) .so_that(condition); self.select_ids(select, model_id, ctx).await @@ -196,3 +193,40 @@ pub(crate) trait QueryExt { ctx: &Context<'_>, ) -> crate::Result>; } + +/// Attempts to convert a PrismaValue to a database value without any additional type information. +/// Can't reliably map Null values. +fn convert_lossy<'a>(pv: PrismaValue) -> Value<'a> { + match pv { + PrismaValue::String(s) => s.into(), + PrismaValue::Float(f) => f.into(), + PrismaValue::Boolean(b) => b.into(), + PrismaValue::DateTime(d) => d.with_timezone(&Utc).into(), + PrismaValue::Enum(e) => e.into(), + PrismaValue::Int(i) => i.into(), + PrismaValue::BigInt(i) => i.into(), + PrismaValue::Uuid(u) => u.to_string().into(), + PrismaValue::List(l) => Value::array(l.into_iter().map(convert_lossy)), + PrismaValue::Json(s) => Value::json(serde_json::from_str(&s).unwrap()), + PrismaValue::Bytes(b) => Value::bytes(b), + PrismaValue::Null => Value::null_int32(), // Can't tell which type the null is supposed to be. + PrismaValue::Object(_) => unimplemented!(), + PrismaValue::Placeholder { name, r#type } => Value::var(name, convert_placeholder_type_to_var_type(&r#type)), + } +} + +fn convert_placeholder_type_to_var_type(pt: &PlaceholderType) -> VarType { + match pt { + PlaceholderType::Any => VarType::Unknown, + PlaceholderType::String => VarType::Text, + PlaceholderType::Int => VarType::Int32, + PlaceholderType::BigInt => VarType::Int64, + PlaceholderType::Float => VarType::Numeric, + PlaceholderType::Boolean => VarType::Boolean, + PlaceholderType::Decimal => VarType::Numeric, + PlaceholderType::Date => VarType::DateTime, + PlaceholderType::Array(t) => VarType::Array(Box::new(convert_placeholder_type_to_var_type(t))), + PlaceholderType::Object => VarType::Json, + PlaceholderType::Bytes => VarType::Bytes, + } +} diff --git a/query-engine/connectors/sql-query-connector/src/row.rs b/query-engine/connectors/sql-query-connector/src/row.rs index 59947bbf386b..bb18ae623a36 100644 --- a/query-engine/connectors/sql-query-connector/src/row.rs +++ b/query-engine/connectors/sql-query-connector/src/row.rs @@ -1,10 +1,11 @@ -use crate::{column_metadata::ColumnMetadata, error::SqlError, value::to_prisma_value}; +use crate::{error::SqlError, value::to_prisma_value}; use bigdecimal::{BigDecimal, FromPrimitive, ToPrimitive}; use chrono::{DateTime, NaiveDate, Utc}; -use connector_interface::{coerce_null_to_zero_value, AggregationResult, AggregationSelection}; +use connector_interface::{coerce_null_to_zero_value, AggregationResult}; use core::{f32, f64}; use quaint::{connector::ResultRow, Value, ValueType}; -use query_structure::{ConversionFailure, FieldArity, PrismaValue, Record, TypeIdentifier}; +use query_structure::{AggregationSelection, ConversionFailure, FieldArity, PrismaValue, Record, TypeIdentifier}; +use sql_query_builder::ColumnMetadata; use std::{io, str::FromStr}; use uuid::Uuid; diff --git a/query-engine/core/Cargo.toml b/query-engine/core/Cargo.toml index a6aa37e1f63a..cd41c4ccf840 100644 --- a/query-engine/core/Cargo.toml +++ b/query-engine/core/Cargo.toml @@ -20,6 +20,8 @@ indexmap.workspace = true itertools.workspace = true once_cell = "1" petgraph = "0.4" +query-builder = { path = "../query-builders/query-builder" } +sql-query-builder = { path = "../query-builders/sql-query-builder" } query-structure = { path = "../query-structure", features = [ "default_generators", ] } diff --git a/query-engine/core/src/compiler/expression.rs b/query-engine/core/src/compiler/expression.rs index 1a3a3606bb68..4ba6f22a46b0 100644 --- a/query-engine/core/src/compiler/expression.rs +++ b/query-engine/core/src/compiler/expression.rs @@ -1,4 +1,4 @@ -use query_structure::PrismaValue; +use query_builder::DbQuery; use serde::Serialize; mod format; @@ -21,18 +21,6 @@ impl std::fmt::Display for Binding { } } -#[derive(Debug, Serialize)] -pub struct DbQuery { - pub query: String, - pub params: Vec, -} - -impl DbQuery { - pub fn new(query: String, params: Vec) -> Self { - Self { query, params } - } -} - #[derive(Debug, Serialize)] #[serde(rename_all = "camelCase")] pub struct JoinExpression { diff --git a/query-engine/core/src/compiler/expression/format.rs b/query-engine/core/src/compiler/expression/format.rs index 8c61747356df..cd2128c3a0b0 100644 --- a/query-engine/core/src/compiler/expression/format.rs +++ b/query-engine/core/src/compiler/expression/format.rs @@ -41,19 +41,19 @@ where pub fn expression(&'a self, expression: &'a Expression) -> DocBuilder<'a, PrettyPrinter<'a, D>, ColorSpec> { match expression { - Expression::Seq(vec) => self.seq(&vec), - Expression::Get { name } => self.get(&name), - Expression::Let { bindings, expr } => self.r#let(&bindings, &expr), - Expression::GetFirstNonEmpty { names } => self.get_first_non_empty(&names), - Expression::Query(db_query) => self.query("query", &db_query), - Expression::Execute(db_query) => self.query("execute", &db_query), - Expression::Reverse(expression) => self.unary_function("reverse", &expression), - Expression::Sum(vec) => self.function("sum", &vec), - Expression::Concat(vec) => self.function("concat", &vec), + Expression::Seq(vec) => self.seq(vec), + Expression::Get { name } => self.get(name), + Expression::Let { bindings, expr } => self.r#let(bindings, expr), + Expression::GetFirstNonEmpty { names } => self.get_first_non_empty(names), + Expression::Query(db_query) => self.query("query", db_query), + Expression::Execute(db_query) => self.query("execute", db_query), + Expression::Reverse(expression) => self.unary_function("reverse", expression), + Expression::Sum(vec) => self.function("sum", vec), + Expression::Concat(vec) => self.function("concat", vec), Expression::Unique(expression) => self.unary_function("unique", expression), Expression::Required(expression) => self.unary_function("required", expression), - Expression::Join { parent, children } => self.join(&parent, &children), - Expression::MapField { field, records } => self.map_field(&field, &records), + Expression::Join { parent, children } => self.join(parent, children), + Expression::MapField { field, records } => self.map_field(field, records), } } @@ -116,7 +116,7 @@ where }) .parens(), ), - PrismaValue::List(values) => self.list(&values), + PrismaValue::List(values) => self.list(values), _ => self .keyword("const") .append(self.text(format!("{value:?}")).annotate(color_lit()).parens()), diff --git a/query-engine/core/src/compiler/translate/query.rs b/query-engine/core/src/compiler/translate/query.rs index f3ff82c95298..a54c0fe1cea5 100644 --- a/query-engine/core/src/compiler/translate/query.rs +++ b/query-engine/core/src/compiler/translate/query.rs @@ -6,14 +6,12 @@ use quaint::{ prelude::{ConnectionInfo, ExternalConnectionInfo, SqlFamily}, visitor::Visitor, }; +use query_builder::DbQuery; use read::translate_read_query; -use sql_query_connector::context::Context; +use sql_query_builder::Context; use write::translate_write_query; -use crate::{ - compiler::expression::{DbQuery, Expression}, - Query, -}; +use crate::{compiler::expression::Expression, Query}; use super::TranslateResult; diff --git a/query-engine/core/src/compiler/translate/query/read.rs b/query-engine/core/src/compiler/translate/query/read.rs index 785d6d13c0ff..ab540d024722 100644 --- a/query-engine/core/src/compiler/translate/query/read.rs +++ b/query-engine/core/src/compiler/translate/query/read.rs @@ -1,13 +1,5 @@ use std::collections::HashSet; -use itertools::Itertools; -use query_structure::{ - ConditionValue, Filter, ModelProjection, PrismaValue, QueryMode, ScalarCondition, ScalarFilter, ScalarProjection, -}; -use sql_query_connector::{ - context::Context, model_extensions::AsColumns, query_arguments_ext::QueryArgumentsExt, query_builder, -}; - use crate::{ compiler::{ expression::{Binding, Expression, JoinExpression}, @@ -15,6 +7,11 @@ use crate::{ }, FilteredQuery, ReadQuery, RelatedRecordsQuery, }; +use itertools::Itertools; +use query_structure::{ + ConditionValue, Filter, ModelProjection, PrismaValue, QueryMode, ScalarCondition, ScalarFilter, ScalarProjection, +}; +use sql_query_builder::{read, AsColumns, Context, QueryArgumentsExt}; use super::build_db_query; @@ -23,7 +20,7 @@ pub(crate) fn translate_read_query(query: ReadQuery, ctx: &Context<'_>) -> Trans ReadQuery::RecordQuery(rq) => { let selected_fields = rq.selected_fields.without_relations().into_virtuals_last(); - let query = query_builder::read::get_records( + let query = read::get_records( &rq.model, ModelProjection::from(&selected_fields) .as_columns(ctx) @@ -49,7 +46,7 @@ pub(crate) fn translate_read_query(query: ReadQuery, ctx: &Context<'_>) -> Trans let needs_reversed_order = mrq.args.needs_reversed_order(); // TODO: we ignore chunking for now - let query = query_builder::read::get_records( + let query = read::get_records( &mrq.model, ModelProjection::from(&selected_fields) .as_columns(ctx) @@ -177,7 +174,7 @@ fn build_read_one2m_query(rrq: RelatedRecordsQuery, ctx: &Context<'_>) -> Transl let to_one_relation = !rrq.parent_field.arity().is_list(); // TODO: we ignore chunking for now - let query = query_builder::read::get_records( + let query = read::get_records( &rrq.parent_field.related_model(), ModelProjection::from(&selected_fields) .as_columns(ctx) diff --git a/query-engine/core/src/compiler/translate/query/write.rs b/query-engine/core/src/compiler/translate/query/write.rs index b3ae28a7b76e..2dbbf12327c7 100644 --- a/query-engine/core/src/compiler/translate/query/write.rs +++ b/query-engine/core/src/compiler/translate/query/write.rs @@ -1,5 +1,6 @@ use query_structure::ModelProjection; -use sql_query_connector::{context::Context, generate_insert_statements, query_builder}; +use sql_query_builder::{write, Context}; +use sql_query_connector::generate_insert_statements; use crate::{ compiler::{expression::Expression, translate::TranslateResult}, @@ -13,12 +14,7 @@ pub(crate) fn translate_write_query(query: WriteQuery, ctx: &Context<'_>) -> Tra WriteQuery::CreateRecord(cr) => { // TODO: MySQL needs additional logic to generate IDs on our side. // See sql_query_connector::database::operations::write::create_record - let query = query_builder::write::create_record( - &cr.model, - cr.args, - &ModelProjection::from(&cr.selected_fields), - ctx, - ); + let query = write::create_record(&cr.model, cr.args, &ModelProjection::from(&cr.selected_fields), ctx); // TODO: we probably need some additional node type or extra info in the WriteQuery node // to help the client executor figure out the returned ID in the case when it's inferred diff --git a/query-engine/core/src/interpreter/query_interpreters/write.rs b/query-engine/core/src/interpreter/query_interpreters/write.rs index 3dcb992b4356..f6267bf0488c 100644 --- a/query-engine/core/src/interpreter/query_interpreters/write.rs +++ b/query-engine/core/src/interpreter/query_interpreters/write.rs @@ -5,8 +5,8 @@ use crate::{ query_ast::*, QueryResult, RecordSelection, }; -use connector::{ConnectionLike, DatasourceFieldName, NativeUpsert, WriteArgs}; -use query_structure::{ManyRecords, Model, RawJson}; +use connector::{ConnectionLike, NativeUpsert}; +use query_structure::{DatasourceFieldName, ManyRecords, Model, RawJson, WriteArgs}; use telemetry::TraceParent; pub(crate) async fn execute( diff --git a/query-engine/core/src/query_ast/read.rs b/query-engine/core/src/query_ast/read.rs index 2326183b7b54..09a5af04dc79 100644 --- a/query-engine/core/src/query_ast/read.rs +++ b/query-engine/core/src/query_ast/read.rs @@ -1,9 +1,8 @@ //! Prisma read query AST use super::FilteredQuery; use crate::ToGraphviz; -use connector::AggregationSelection; use enumflags2::BitFlags; -use query_structure::{prelude::*, Filter, QueryArguments, RelationLoadStrategy}; +use query_structure::{prelude::*, AggregationSelection, Filter, QueryArguments, RelationLoadStrategy}; use std::fmt::Display; #[allow(clippy::enum_variant_names)] diff --git a/query-engine/core/src/query_ast/write.rs b/query-engine/core/src/query_ast/write.rs index ca0287179e32..b538e2675e32 100644 --- a/query-engine/core/src/query_ast/write.rs +++ b/query-engine/core/src/query_ast/write.rs @@ -1,8 +1,8 @@ //! Write query AST use super::{FilteredNestedMutation, FilteredQuery}; use crate::{ReadQuery, RecordQuery, ToGraphviz}; -use connector::{DatasourceFieldName, NativeUpsert, RecordFilter, WriteArgs}; -use query_structure::{prelude::*, Filter}; +use connector::NativeUpsert; +use query_structure::{prelude::*, DatasourceFieldName, Filter, RecordFilter, WriteArgs}; use std::collections::HashMap; #[derive(Debug, Clone)] diff --git a/query-engine/core/src/query_graph_builder/read/aggregations/mod.rs b/query-engine/core/src/query_graph_builder/read/aggregations/mod.rs index 94e8b1bcbdc9..685d9c4e1e23 100644 --- a/query-engine/core/src/query_graph_builder/read/aggregations/mod.rs +++ b/query-engine/core/src/query_graph_builder/read/aggregations/mod.rs @@ -6,9 +6,8 @@ pub(crate) use group_by::*; use super::*; use crate::FieldPair; -use connector::AggregationSelection; use itertools::Itertools; -use query_structure::{Model, ScalarFieldRef}; +use query_structure::{AggregationSelection, Model, ScalarFieldRef}; use schema::constants::aggregations::*; /// Resolves the given field as a aggregation query. diff --git a/query-engine/core/src/query_graph_builder/write/create.rs b/query-engine/core/src/query_graph_builder/write/create.rs index 86360291ead5..9dc0c0821465 100644 --- a/query-engine/core/src/query_graph_builder/write/create.rs +++ b/query-engine/core/src/query_graph_builder/write/create.rs @@ -4,9 +4,8 @@ use crate::{ query_graph::{Node, NodeRef, QueryGraph, QueryGraphDependency}, ArgumentListLookup, ParsedField, ParsedInputList, ParsedInputMap, }; -use connector::WriteArgs; use psl::{datamodel_connector::ConnectorCapability, parser_database::RelationFieldId}; -use query_structure::{IntoFilter, Model, Zipper}; +use query_structure::{IntoFilter, Model, WriteArgs, Zipper}; use schema::{constants::args, QuerySchema}; use std::convert::TryInto; use write_args_parser::*; diff --git a/query-engine/core/src/query_graph_builder/write/nested/delete_nested.rs b/query-engine/core/src/query_graph_builder/write/nested/delete_nested.rs index 5044451193f3..c3de8071d804 100644 --- a/query-engine/core/src/query_graph_builder/write/nested/delete_nested.rs +++ b/query-engine/core/src/query_graph_builder/write/nested/delete_nested.rs @@ -4,8 +4,7 @@ use crate::{ query_graph::{Node, NodeRef, QueryGraph, QueryGraphDependency}, ParsedInputMap, ParsedInputValue, }; -use connector::RecordFilter; -use query_structure::{Filter, Model, PrismaValue, RelationFieldRef}; +use query_structure::{Filter, Model, PrismaValue, RecordFilter, RelationFieldRef}; use std::convert::TryInto; /// Adds a delete (single) record node to the graph and connects it to the parent. diff --git a/query-engine/core/src/query_graph_builder/write/utils.rs b/query-engine/core/src/query_graph_builder/write/utils.rs index b5db88b2240d..a882c5a2d312 100644 --- a/query-engine/core/src/query_graph_builder/write/utils.rs +++ b/query-engine/core/src/query_graph_builder/write/utils.rs @@ -3,10 +3,12 @@ use crate::{ query_graph::{Flow, Node, NodeRef, QueryGraph, QueryGraphDependency}, Computation, ParsedInputValue, QueryGraphBuilderError, QueryGraphBuilderResult, }; -use connector::{DatasourceFieldName, RecordFilter, WriteArgs, WriteOperation}; use indexmap::IndexMap; use psl::parser_database::ReferentialAction; -use query_structure::{FieldSelection, Filter, Model, PrismaValue, RelationFieldRef, SelectionResult}; +use query_structure::{ + DatasourceFieldName, FieldSelection, Filter, Model, PrismaValue, RecordFilter, RelationFieldRef, SelectionResult, + WriteArgs, WriteOperation, +}; use schema::QuerySchema; /// Coerces single values (`ParsedInputValue::Single` and `ParsedInputValue::Map`) into a vector. diff --git a/query-engine/core/src/query_graph_builder/write/write_args_parser.rs b/query-engine/core/src/query_graph_builder/write/write_args_parser.rs index 5e5cc464fa51..0b79f1a0e7ca 100644 --- a/query-engine/core/src/query_graph_builder/write/write_args_parser.rs +++ b/query-engine/core/src/query_graph_builder/write/write_args_parser.rs @@ -1,7 +1,9 @@ use super::*; use crate::query_document::{ParsedInputMap, ParsedInputValue}; -use connector::{DatasourceFieldName, WriteArgs, WriteOperation}; -use query_structure::{CompositeFieldRef, Field, Model, PrismaValue, RelationFieldRef, ScalarFieldRef, TypeIdentifier}; +use query_structure::{ + CompositeFieldRef, DatasourceFieldName, Field, Model, PrismaValue, RelationFieldRef, ScalarFieldRef, + TypeIdentifier, WriteArgs, WriteOperation, +}; use schema::constants::{args, json_null, operations}; use std::{borrow::Cow, convert::TryInto}; diff --git a/query-engine/query-builders/query-builder/Cargo.toml b/query-engine/query-builders/query-builder/Cargo.toml new file mode 100644 index 000000000000..4c35b489e828 --- /dev/null +++ b/query-engine/query-builders/query-builder/Cargo.toml @@ -0,0 +1,9 @@ +[package] +edition = "2021" +name = "query-builder" +version = "0.1.0" + +[dependencies] +serde.workspace = true + +query-structure = { path = "../../query-structure" } diff --git a/query-engine/query-builders/query-builder/src/lib.rs b/query-engine/query-builders/query-builder/src/lib.rs new file mode 100644 index 000000000000..240ca848580c --- /dev/null +++ b/query-engine/query-builders/query-builder/src/lib.rs @@ -0,0 +1,14 @@ +use query_structure::PrismaValue; +use serde::Serialize; + +#[derive(Debug, Serialize)] +pub struct DbQuery { + pub query: String, + pub params: Vec, +} + +impl DbQuery { + pub fn new(query: String, params: Vec) -> Self { + Self { query, params } + } +} diff --git a/query-engine/query-builders/sql-query-builder/Cargo.toml b/query-engine/query-builders/sql-query-builder/Cargo.toml new file mode 100644 index 000000000000..80cccff5f961 --- /dev/null +++ b/query-engine/query-builders/sql-query-builder/Cargo.toml @@ -0,0 +1,19 @@ +[package] +edition = "2021" +name = "sql-query-builder" +version = "0.1.0" + +[dependencies] +quaint = { path = "../../../quaint" } +query-structure = { path = "../../query-structure" } +query-builder = { path = "../query-builder" } +telemetry = { path = "../../../libs/telemetry" } +prisma-value = { path = "../../../libs/prisma-value" } +psl = { path = "../../../psl/psl" } + +itertools.workspace = true +chrono.workspace = true +serde_json.workspace = true + +[features] +relation_joins = [] diff --git a/query-engine/connectors/sql-query-connector/src/column_metadata.rs b/query-engine/query-builders/sql-query-builder/src/column_metadata.rs similarity index 84% rename from query-engine/connectors/sql-query-connector/src/column_metadata.rs rename to query-engine/query-builders/sql-query-builder/src/column_metadata.rs index c64871b7eb22..0e3ab88df37e 100644 --- a/query-engine/connectors/sql-query-connector/src/column_metadata.rs +++ b/query-engine/query-builders/sql-query-builder/src/column_metadata.rs @@ -2,7 +2,7 @@ use query_structure::{FieldArity, TypeIdentifier}; /// Helps dealing with column value conversion and possible error resolution. #[derive(Clone, Debug, Copy)] -pub(crate) struct ColumnMetadata<'a> { +pub struct ColumnMetadata<'a> { identifier: &'a TypeIdentifier, name: Option<&'a str>, arity: FieldArity, @@ -41,7 +41,7 @@ impl<'a> ColumnMetadata<'a> { /// Create a set of metadata objects, combining column names and type /// information. -pub(crate) fn create<'a, T>(field_names: &'a [T], idents: &'a [(TypeIdentifier, FieldArity)]) -> Vec> +pub fn create<'a, T>(field_names: &'a [T], idents: &'a [(TypeIdentifier, FieldArity)]) -> Vec> where T: AsRef, { @@ -55,7 +55,7 @@ where } /// Create a set of metadata objects. -pub(crate) fn create_anonymous(idents: &[(TypeIdentifier, FieldArity)]) -> Vec> { +pub fn create_anonymous(idents: &[(TypeIdentifier, FieldArity)]) -> Vec> { idents .iter() .map(|(identifier, arity)| ColumnMetadata::new(identifier, *arity)) diff --git a/query-engine/connectors/sql-query-connector/src/context.rs b/query-engine/query-builders/sql-query-builder/src/context.rs similarity index 78% rename from query-engine/connectors/sql-query-connector/src/context.rs rename to query-engine/query-builders/sql-query-builder/src/context.rs index b3e28c8152c1..6bb1f2a1414a 100644 --- a/query-engine/connectors/sql-query-connector/src/context.rs +++ b/query-engine/query-builders/sql-query-builder/src/context.rs @@ -25,7 +25,19 @@ impl<'a> Context<'a> { } } + pub fn traceparent(&self) -> Option { + self.traceparent + } + pub(crate) fn schema_name(&self) -> &str { self.connection_info.schema_name() } + + pub fn max_insert_rows(&self) -> Option { + self.max_insert_rows + } + + pub fn max_bind_values(&self) -> Option { + self.max_bind_values + } } diff --git a/query-engine/connectors/sql-query-connector/src/cursor_condition.rs b/query-engine/query-builders/sql-query-builder/src/cursor_condition.rs similarity index 100% rename from query-engine/connectors/sql-query-connector/src/cursor_condition.rs rename to query-engine/query-builders/sql-query-builder/src/cursor_condition.rs diff --git a/query-engine/connectors/sql-query-connector/src/filter/alias.rs b/query-engine/query-builders/sql-query-builder/src/filter/alias.rs similarity index 95% rename from query-engine/connectors/sql-query-connector/src/filter/alias.rs rename to query-engine/query-builders/sql-query-builder/src/filter/alias.rs index 10fc31080aae..5487cb108ab0 100644 --- a/query-engine/connectors/sql-query-connector/src/filter/alias.rs +++ b/query-engine/query-builders/sql-query-builder/src/filter/alias.rs @@ -43,7 +43,7 @@ impl Alias { /// A string representation of the current alias. The current mode can be /// overridden by defining the `mode_override`. - pub fn to_string(&self, mode_override: Option) -> String { + pub fn to_string(self, mode_override: Option) -> String { match mode_override.unwrap_or(self.mode) { AliasMode::Table => format!("t{}", self.counter), AliasMode::Join => format!("j{}", self.counter), @@ -51,7 +51,7 @@ impl Alias { } #[cfg(feature = "relation_joins")] - pub fn to_table_string(&self) -> String { + pub fn to_table_string(self) -> String { self.to_string(Some(AliasMode::Table)) } } diff --git a/query-engine/connectors/sql-query-connector/src/filter/mod.rs b/query-engine/query-builders/sql-query-builder/src/filter/mod.rs similarity index 79% rename from query-engine/connectors/sql-query-connector/src/filter/mod.rs rename to query-engine/query-builders/sql-query-builder/src/filter/mod.rs index 573024845b45..7cbe091a816a 100644 --- a/query-engine/connectors/sql-query-connector/src/filter/mod.rs +++ b/query-engine/query-builders/sql-query-builder/src/filter/mod.rs @@ -7,16 +7,16 @@ pub use visitor::*; use crate::{context::Context, join_utils::AliasedJoin}; -pub(crate) struct FilterBuilder {} +pub struct FilterBuilder {} pub(crate) struct FilterBuilderWithJoins {} -pub(crate) struct FilterBuilderWithoutJoins {} +pub struct FilterBuilderWithoutJoins {} impl FilterBuilder { pub(crate) fn with_top_level_joins() -> FilterBuilderWithJoins { FilterBuilderWithJoins {} } - pub(crate) fn without_top_level_joins() -> FilterBuilderWithoutJoins { + pub fn without_top_level_joins() -> FilterBuilderWithoutJoins { FilterBuilderWithoutJoins {} } } @@ -34,7 +34,7 @@ impl FilterBuilderWithJoins { impl FilterBuilderWithoutJoins { /// Visits a filter without any top-level joins. Can be safely used in any context. - pub(crate) fn visit_filter(&self, filter: Filter, ctx: &Context) -> ConditionTree<'static> { + pub fn visit_filter(&self, filter: Filter, ctx: &Context) -> ConditionTree<'static> { let (cond, _) = FilterVisitor::without_top_level_joins().visit_filter(filter, ctx); cond diff --git a/query-engine/connectors/sql-query-connector/src/filter/visitor.rs b/query-engine/query-builders/sql-query-builder/src/filter/visitor.rs similarity index 100% rename from query-engine/connectors/sql-query-connector/src/filter/visitor.rs rename to query-engine/query-builders/sql-query-builder/src/filter/visitor.rs diff --git a/query-engine/connectors/sql-query-connector/src/join_utils.rs b/query-engine/query-builders/sql-query-builder/src/join_utils.rs similarity index 100% rename from query-engine/connectors/sql-query-connector/src/join_utils.rs rename to query-engine/query-builders/sql-query-builder/src/join_utils.rs diff --git a/query-engine/connectors/sql-query-connector/src/query_builder/mod.rs b/query-engine/query-builders/sql-query-builder/src/lib.rs similarity index 65% rename from query-engine/connectors/sql-query-connector/src/query_builder/mod.rs rename to query-engine/query-builders/sql-query-builder/src/lib.rs index 15d696b4e7ea..dafabf1f3772 100644 --- a/query-engine/connectors/sql-query-connector/src/query_builder/mod.rs +++ b/query-engine/query-builders/sql-query-builder/src/lib.rs @@ -1,16 +1,32 @@ +pub mod column_metadata; +mod context; +mod cursor_condition; +mod filter; +mod join_utils; +pub mod limit; +mod model_extensions; +mod nested_aggregations; +mod ordering; +mod query_arguments_ext; pub mod read; #[cfg(feature = "relation_joins")] pub mod select; +mod sql_trace; pub mod write; -use crate::context::Context; -use crate::model_extensions::SelectionResultExt; use quaint::ast::{Column, Comparable, ConditionTree, Query, Row, Values}; use query_structure::SelectionResult; +pub use column_metadata::ColumnMetadata; +pub use context::Context; +pub use filter::FilterBuilder; +pub use model_extensions::{AsColumn, AsColumns, AsTable, RelationFieldExt, SelectionResultExt}; +pub use query_arguments_ext::QueryArgumentsExt; +pub use sql_trace::SqlTraceComment; + const PARAMETER_LIMIT: usize = 2000; -pub(super) fn chunked_conditions( +pub fn chunked_conditions( columns: &[Column<'static>], records: &[&SelectionResult], ctx: &Context<'_>, @@ -29,7 +45,7 @@ where .collect() } -pub(super) fn in_conditions<'a>( +pub fn in_conditions<'a>( columns: &'a [Column<'static>], results: impl IntoIterator, ctx: &Context<'_>, diff --git a/query-engine/query-builders/sql-query-builder/src/limit.rs b/query-engine/query-builders/sql-query-builder/src/limit.rs new file mode 100644 index 000000000000..1e392fea529c --- /dev/null +++ b/query-engine/query-builders/sql-query-builder/src/limit.rs @@ -0,0 +1,31 @@ +use crate::{model_extensions::*, Context}; +use quaint::ast::*; +use query_structure::*; + +pub fn wrap_with_limit_subquery_if_needed<'a>( + model: &Model, + filter_condition: ConditionTree<'a>, + limit: Option, + ctx: &Context, +) -> ConditionTree<'a> { + if let Some(limit) = limit { + let columns = model + .primary_identifier() + .as_scalar_fields() + .expect("primary identifier must contain scalar fields") + .into_iter() + .map(|f| f.as_column(ctx)) + .collect::>(); + + ConditionTree::from( + Row::from(columns.clone()).in_selection( + Select::from_table(model.as_table(ctx)) + .columns(columns) + .so_that(filter_condition) + .limit(limit), + ), + ) + } else { + filter_condition + } +} diff --git a/query-engine/connectors/sql-query-connector/src/model_extensions/column.rs b/query-engine/query-builders/sql-query-builder/src/model_extensions/column.rs similarity index 100% rename from query-engine/connectors/sql-query-connector/src/model_extensions/column.rs rename to query-engine/query-builders/sql-query-builder/src/model_extensions/column.rs diff --git a/query-engine/query-builders/sql-query-builder/src/model_extensions/mod.rs b/query-engine/query-builders/sql-query-builder/src/model_extensions/mod.rs new file mode 100644 index 000000000000..4a92c88d4b85 --- /dev/null +++ b/query-engine/query-builders/sql-query-builder/src/model_extensions/mod.rs @@ -0,0 +1,8 @@ +mod column; +mod relation; +mod scalar_field; +mod selection_result; +mod table; + +pub use self::{column::*, relation::*, selection_result::*, table::*}; +pub(crate) use scalar_field::*; diff --git a/query-engine/connectors/sql-query-connector/src/model_extensions/relation.rs b/query-engine/query-builders/sql-query-builder/src/model_extensions/relation.rs similarity index 99% rename from query-engine/connectors/sql-query-connector/src/model_extensions/relation.rs rename to query-engine/query-builders/sql-query-builder/src/model_extensions/relation.rs index 981390536807..49518c90f4c5 100644 --- a/query-engine/connectors/sql-query-connector/src/model_extensions/relation.rs +++ b/query-engine/query-builders/sql-query-builder/src/model_extensions/relation.rs @@ -5,7 +5,7 @@ use crate::{ use quaint::{ast::Table, prelude::Column}; use query_structure::{walkers, ModelProjection, Relation, RelationField}; -pub(crate) trait RelationFieldExt { +pub trait RelationFieldExt { fn m2m_columns(&self, ctx: &Context<'_>) -> Vec>; fn join_columns(&self, ctx: &Context<'_>) -> ColumnIterator; fn identifier_columns(&self, ctx: &Context<'_>) -> ColumnIterator; diff --git a/query-engine/connectors/sql-query-connector/src/model_extensions/scalar_field.rs b/query-engine/query-builders/sql-query-builder/src/model_extensions/scalar_field.rs similarity index 78% rename from query-engine/connectors/sql-query-connector/src/model_extensions/scalar_field.rs rename to query-engine/query-builders/sql-query-builder/src/model_extensions/scalar_field.rs index a3e88aa1d403..b94dc6be698f 100644 --- a/query-engine/connectors/sql-query-connector/src/model_extensions/scalar_field.rs +++ b/query-engine/query-builders/sql-query-builder/src/model_extensions/scalar_field.rs @@ -1,6 +1,6 @@ use crate::context::Context; use chrono::Utc; -use prisma_value::{PlaceholderType, PrismaValue}; +use prisma_value::PrismaValue; use quaint::{ ast::{EnumName, Value, ValueType, VarType}, prelude::{EnumVariant, TypeDataLength, TypeFamily}, @@ -123,43 +123,6 @@ impl ScalarFieldExt for ScalarField { } } -/// Attempts to convert a PrismaValue to a database value without any additional type information. -/// Can't reliably map Null values. -pub fn convert_lossy<'a>(pv: PrismaValue) -> Value<'a> { - match pv { - PrismaValue::String(s) => s.into(), - PrismaValue::Float(f) => f.into(), - PrismaValue::Boolean(b) => b.into(), - PrismaValue::DateTime(d) => d.with_timezone(&Utc).into(), - PrismaValue::Enum(e) => e.into(), - PrismaValue::Int(i) => i.into(), - PrismaValue::BigInt(i) => i.into(), - PrismaValue::Uuid(u) => u.to_string().into(), - PrismaValue::List(l) => Value::array(l.into_iter().map(convert_lossy)), - PrismaValue::Json(s) => Value::json(serde_json::from_str(&s).unwrap()), - PrismaValue::Bytes(b) => Value::bytes(b), - PrismaValue::Null => Value::null_int32(), // Can't tell which type the null is supposed to be. - PrismaValue::Object(_) => unimplemented!(), - PrismaValue::Placeholder { name, r#type } => Value::var(name, convert_placeholder_type_to_var_type(&r#type)), - } -} - -fn convert_placeholder_type_to_var_type(pt: &PlaceholderType) -> VarType { - match pt { - PlaceholderType::Any => VarType::Unknown, - PlaceholderType::String => VarType::Text, - PlaceholderType::Int => VarType::Int32, - PlaceholderType::BigInt => VarType::Int64, - PlaceholderType::Float => VarType::Numeric, - PlaceholderType::Boolean => VarType::Boolean, - PlaceholderType::Decimal => VarType::Numeric, - PlaceholderType::Date => VarType::DateTime, - PlaceholderType::Array(t) => VarType::Array(Box::new(convert_placeholder_type_to_var_type(t))), - PlaceholderType::Object => VarType::Json, - PlaceholderType::Bytes => VarType::Bytes, - } -} - fn parse_scalar_length(sf: &ScalarField) -> Option { sf.native_type() .and_then(|nt| nt.args().into_iter().next()) diff --git a/query-engine/connectors/sql-query-connector/src/model_extensions/selection_result.rs b/query-engine/query-builders/sql-query-builder/src/model_extensions/selection_result.rs similarity index 97% rename from query-engine/connectors/sql-query-connector/src/model_extensions/selection_result.rs rename to query-engine/query-builders/sql-query-builder/src/model_extensions/selection_result.rs index 21d6aac3dbe2..4031f7f169a3 100644 --- a/query-engine/connectors/sql-query-connector/src/model_extensions/selection_result.rs +++ b/query-engine/query-builders/sql-query-builder/src/model_extensions/selection_result.rs @@ -3,7 +3,7 @@ use crate::context::Context; use quaint::Value; use query_structure::{PrismaValue, SelectedField, SelectionResult}; -pub(crate) trait SelectionResultExt { +pub trait SelectionResultExt { fn misses_autogen_value(&self) -> bool; fn db_values<'a>(&self, ctx: &Context<'_>) -> Vec>; diff --git a/query-engine/connectors/sql-query-connector/src/model_extensions/table.rs b/query-engine/query-builders/sql-query-builder/src/model_extensions/table.rs similarity index 98% rename from query-engine/connectors/sql-query-connector/src/model_extensions/table.rs rename to query-engine/query-builders/sql-query-builder/src/model_extensions/table.rs index ead15c34658e..5d8275e38b84 100644 --- a/query-engine/connectors/sql-query-connector/src/model_extensions/table.rs +++ b/query-engine/query-builders/sql-query-builder/src/model_extensions/table.rs @@ -12,7 +12,7 @@ pub(crate) fn db_name_with_schema(model: &Model, ctx: &Context<'_>) -> Table<'st (schema_prefix, model_db_name).into() } -pub(crate) trait AsTable { +pub trait AsTable { fn as_table(&self, ctx: &Context<'_>) -> Table<'static>; } diff --git a/query-engine/connectors/sql-query-connector/src/nested_aggregations.rs b/query-engine/query-builders/sql-query-builder/src/nested_aggregations.rs similarity index 100% rename from query-engine/connectors/sql-query-connector/src/nested_aggregations.rs rename to query-engine/query-builders/sql-query-builder/src/nested_aggregations.rs diff --git a/query-engine/connectors/sql-query-connector/src/ordering.rs b/query-engine/query-builders/sql-query-builder/src/ordering.rs similarity index 100% rename from query-engine/connectors/sql-query-connector/src/ordering.rs rename to query-engine/query-builders/sql-query-builder/src/ordering.rs diff --git a/query-engine/connectors/sql-query-connector/src/query_arguments_ext.rs b/query-engine/query-builders/sql-query-builder/src/query_arguments_ext.rs similarity index 100% rename from query-engine/connectors/sql-query-connector/src/query_arguments_ext.rs rename to query-engine/query-builders/sql-query-builder/src/query_arguments_ext.rs diff --git a/query-engine/connectors/sql-query-connector/src/query_builder/read.rs b/query-engine/query-builders/sql-query-builder/src/read.rs similarity index 97% rename from query-engine/connectors/sql-query-connector/src/query_builder/read.rs rename to query-engine/query-builders/sql-query-builder/src/read.rs index e33d51857a2f..f359b94c25d7 100644 --- a/query-engine/connectors/sql-query-connector/src/query_builder/read.rs +++ b/query-engine/query-builders/sql-query-builder/src/read.rs @@ -1,12 +1,17 @@ -use crate::{ - cursor_condition, filter::FilterBuilder, model_extensions::*, nested_aggregations, ordering::OrderByBuilder, - sql_trace::SqlTraceComment, Context, -}; -use connector_interface::AggregationSelection; use itertools::Itertools; use quaint::ast::*; use query_structure::*; +use crate::{ + context::Context, + cursor_condition, + filter::FilterBuilder, + model_extensions::{AsColumn, AsColumns, AsTable}, + nested_aggregations, + ordering::OrderByBuilder, + sql_trace::SqlTraceComment, +}; + pub trait SelectDefinition { fn into_select<'a>( self, @@ -168,7 +173,7 @@ where /// ``` /// Important note: Do not use the AsColumn trait here as we need to construct column references that are relative, /// not absolute - e.g. `SELECT "field" FROM (...)` NOT `SELECT "full"."path"."to"."field" FROM (...)`. -pub(crate) fn aggregate( +pub fn aggregate( model: &Model, selections: &[AggregationSelection], args: QueryArguments, @@ -222,7 +227,7 @@ pub(crate) fn aggregate( ) } -pub(crate) fn group_by_aggregate( +pub fn group_by_aggregate( model: &Model, args: QueryArguments, selections: &[AggregationSelection], diff --git a/query-engine/connectors/sql-query-connector/src/query_builder/select/lateral.rs b/query-engine/query-builders/sql-query-builder/src/select/lateral.rs similarity index 100% rename from query-engine/connectors/sql-query-connector/src/query_builder/select/lateral.rs rename to query-engine/query-builders/sql-query-builder/src/select/lateral.rs diff --git a/query-engine/connectors/sql-query-connector/src/query_builder/select/mod.rs b/query-engine/query-builders/sql-query-builder/src/select/mod.rs similarity index 99% rename from query-engine/connectors/sql-query-connector/src/query_builder/select/mod.rs rename to query-engine/query-builders/sql-query-builder/src/select/mod.rs index f0b4fd7abe2a..a2682d670557 100644 --- a/query-engine/connectors/sql-query-connector/src/query_builder/select/mod.rs +++ b/query-engine/query-builders/sql-query-builder/src/select/mod.rs @@ -23,7 +23,7 @@ use self::{lateral::LateralJoinSelectBuilder, subquery::SubqueriesSelectBuilder} pub(crate) const JSON_AGG_IDENT: &str = "__prisma_data__"; -pub(crate) struct SelectBuilder; +pub struct SelectBuilder; impl SelectBuilder { pub fn build(args: QueryArguments, selected_fields: &FieldSelection, ctx: &Context<'_>) -> Select<'static> { diff --git a/query-engine/connectors/sql-query-connector/src/query_builder/select/subquery.rs b/query-engine/query-builders/sql-query-builder/src/select/subquery.rs similarity index 100% rename from query-engine/connectors/sql-query-connector/src/query_builder/select/subquery.rs rename to query-engine/query-builders/sql-query-builder/src/select/subquery.rs diff --git a/query-engine/connectors/sql-query-connector/src/sql_trace.rs b/query-engine/query-builders/sql-query-builder/src/sql_trace.rs similarity index 100% rename from query-engine/connectors/sql-query-connector/src/sql_trace.rs rename to query-engine/query-builders/sql-query-builder/src/sql_trace.rs diff --git a/query-engine/connectors/sql-query-connector/src/query_builder/write.rs b/query-engine/query-builders/sql-query-builder/src/write.rs similarity index 94% rename from query-engine/connectors/sql-query-connector/src/query_builder/write.rs rename to query-engine/query-builders/sql-query-builder/src/write.rs index 5cfbd8002fe0..1059cb6069f8 100644 --- a/query-engine/connectors/sql-query-connector/src/query_builder/write.rs +++ b/query-engine/query-builders/sql-query-builder/src/write.rs @@ -1,6 +1,5 @@ use crate::limit::wrap_with_limit_subquery_if_needed; use crate::{model_extensions::*, sql_trace::SqlTraceComment, Context}; -use connector_interface::{DatasourceFieldName, ScalarWriteOperation, WriteArgs}; use quaint::ast::*; use query_structure::*; use std::{collections::HashSet, convert::TryInto}; @@ -40,7 +39,7 @@ pub fn create_record( /// where each `WriteArg` in the Vec is one row. /// Requires `affected_fields` to be non-empty to produce valid SQL. #[allow(clippy::mutable_key_type)] -pub(crate) fn create_records_nonempty( +pub fn create_records_nonempty( model: &Model, args: Vec, skip_duplicates: bool, @@ -97,7 +96,7 @@ pub(crate) fn create_records_nonempty( } /// `INSERT` empty records statement. -pub(crate) fn create_records_empty( +pub fn create_records_empty( model: &Model, skip_duplicates: bool, selected_fields: Option<&ModelProjection>, @@ -117,7 +116,7 @@ pub(crate) fn create_records_empty( insert } -pub(crate) fn build_update_and_set_query( +pub fn build_update_and_set_query( model: &Model, args: WriteArgs, selected_fields: Option<&ModelProjection>, @@ -185,22 +184,20 @@ pub(crate) fn build_update_and_set_query( query } -pub(crate) fn chunk_update_with_ids( +pub fn chunk_update_with_ids( update: Update<'static>, model: &Model, ids: &[&SelectionResult], filter_condition: ConditionTree<'static>, ctx: &Context<'_>, -) -> crate::Result>> { +) -> Vec> { let columns: Vec<_> = ModelProjection::from(model.primary_identifier()) .as_columns(ctx) .collect(); - let query = super::chunked_conditions(&columns, ids, ctx, |conditions| { + super::chunked_conditions(&columns, ids, ctx, |conditions| { update.clone().so_that(conditions.and(filter_condition.clone())) - }); - - Ok(query) + }) } /// Converts a list of selected fields into an iterator of table columns. @@ -211,7 +208,7 @@ fn projection_into_columns( selected_fields.as_columns(ctx).map(|c| c.set_is_selected(true)) } -pub(crate) fn delete_returning( +pub fn delete_returning( model: &Model, filter: ConditionTree<'static>, selected_fields: &ModelProjection, @@ -224,7 +221,7 @@ pub(crate) fn delete_returning( .into() } -pub(crate) fn delete_many_from_filter( +pub fn delete_many_from_filter( model: &Model, filter_condition: ConditionTree<'static>, limit: Option, @@ -238,7 +235,7 @@ pub(crate) fn delete_many_from_filter( .into() } -pub(crate) fn delete_many_from_ids_and_filter( +pub fn delete_many_from_ids_and_filter( model: &Model, ids: &[&SelectionResult], filter_condition: ConditionTree<'static>, @@ -254,7 +251,7 @@ pub(crate) fn delete_many_from_ids_and_filter( }) } -pub(crate) fn create_relation_table_records( +pub fn create_relation_table_records( field: &RelationFieldRef, parent_id: &SelectionResult, child_ids: &[SelectionResult], @@ -279,7 +276,7 @@ pub(crate) fn create_relation_table_records( insert.build().on_conflict(OnConflict::DoNothing).into() } -pub(crate) fn delete_relation_table_records( +pub fn delete_relation_table_records( parent_field: &RelationFieldRef, parent_id: &SelectionResult, child_ids: &[SelectionResult], diff --git a/query-engine/query-structure/Cargo.toml b/query-engine/query-structure/Cargo.toml index 183dd2847301..78c0e4469fc6 100644 --- a/query-engine/query-structure/Cargo.toml +++ b/query-engine/query-structure/Cargo.toml @@ -16,6 +16,7 @@ cuid = { workspace = true, optional = true } ulid = { workspace = true, optional = true } nanoid = { version = "0.4.0", optional = true } chrono.workspace = true +indexmap.workspace = true [target.'cfg(target_arch = "wasm32")'.dependencies] getrandom = { workspace = true, features = ["js"] } diff --git a/query-engine/query-structure/src/aggregate_selection.rs b/query-engine/query-structure/src/aggregate_selection.rs new file mode 100644 index 000000000000..f83bf6e0ff95 --- /dev/null +++ b/query-engine/query-structure/src/aggregate_selection.rs @@ -0,0 +1,70 @@ +use psl::schema_ast::ast::FieldArity; + +use crate::{ScalarFieldRef, TypeIdentifier}; + +/// Selections for aggregation queries. +#[derive(Debug, Clone)] +pub enum AggregationSelection { + /// Single field selector. Only valid in the context of group by statements. + Field(ScalarFieldRef), + + /// Counts records of the model that match the query. + /// `all` indicates that an all-records selection has been made (e.g. SQL *). + /// `fields` are specific fields to count on. By convention, if `all` is true, + /// it will always be the last of the count results. + Count { all: bool, fields: Vec }, + + /// Compute average for each field contained. + Average(Vec), + + /// Compute sum for each field contained. + Sum(Vec), + + /// Compute mininum for each field contained. + Min(Vec), + + /// Compute maximum for each field contained. + Max(Vec), +} + +impl AggregationSelection { + /// Returns (field_db_name, TypeIdentifier, FieldArity) + pub fn identifiers(&self) -> Vec<(String, TypeIdentifier, FieldArity)> { + match self { + AggregationSelection::Field(field) => { + vec![(field.db_name().to_owned(), field.type_identifier(), field.arity())] + } + + AggregationSelection::Count { all, fields } => { + let mut mapped = Self::map_field_types(fields, Some(TypeIdentifier::Int)); + + if *all { + mapped.push(("all".to_owned(), TypeIdentifier::Int, FieldArity::Required)); + } + + mapped + } + + AggregationSelection::Average(fields) => Self::map_field_types(fields, Some(TypeIdentifier::Float)), + AggregationSelection::Sum(fields) => Self::map_field_types(fields, None), + AggregationSelection::Min(fields) => Self::map_field_types(fields, None), + AggregationSelection::Max(fields) => Self::map_field_types(fields, None), + } + } + + fn map_field_types( + fields: &[ScalarFieldRef], + fixed_type: Option, + ) -> Vec<(String, TypeIdentifier, FieldArity)> { + fields + .iter() + .map(|f| { + ( + f.db_name().to_owned(), + fixed_type.unwrap_or_else(|| f.type_identifier()), + FieldArity::Required, + ) + }) + .collect() + } +} diff --git a/query-engine/query-structure/src/filter/mod.rs b/query-engine/query-structure/src/filter/mod.rs index 82727f35c80e..64e837e62f3c 100644 --- a/query-engine/query-structure/src/filter/mod.rs +++ b/query-engine/query-structure/src/filter/mod.rs @@ -10,6 +10,7 @@ mod composite; mod into_filter; mod json; mod list; +mod record; mod relation; mod scalar; @@ -18,6 +19,7 @@ pub use composite::*; pub use into_filter::*; pub use json::*; pub use list::*; +pub use record::*; pub use relation::*; pub use scalar::*; diff --git a/query-engine/query-structure/src/filter/record.rs b/query-engine/query-structure/src/filter/record.rs new file mode 100644 index 000000000000..41ea87c47325 --- /dev/null +++ b/query-engine/query-structure/src/filter/record.rs @@ -0,0 +1,55 @@ +use crate::SelectionResult; + +use super::Filter; + +/// A wrapper struct allowing to either filter for records or for the core to +/// communicate already known record selectors to connectors. +/// +/// Connector implementations should use known selectors to skip unnecessary fetch operations +/// if the query core already determined the selectors in a previous step. Simply put, +/// `selectors` should always have precendence over `filter`. +#[derive(Debug, Clone)] +pub struct RecordFilter { + pub filter: Filter, + pub selectors: Option>, +} + +impl RecordFilter { + pub fn empty() -> Self { + Self { + filter: Filter::empty(), + selectors: None, + } + } + + pub fn has_selectors(&self) -> bool { + self.selectors.is_some() + } +} + +impl From for RecordFilter { + fn from(filter: Filter) -> Self { + Self { + filter, + selectors: None, + } + } +} + +impl From> for RecordFilter { + fn from(selectors: Vec) -> Self { + Self { + filter: Filter::empty(), + selectors: Some(selectors), + } + } +} + +impl From for RecordFilter { + fn from(selector: SelectionResult) -> Self { + Self { + filter: Filter::empty(), + selectors: Some(vec![selector]), + } + } +} diff --git a/query-engine/query-structure/src/lib.rs b/query-engine/query-structure/src/lib.rs index abf47fe8447c..66c4af906dc2 100644 --- a/query-engine/query-structure/src/lib.rs +++ b/query-engine/query-structure/src/lib.rs @@ -1,3 +1,4 @@ +mod aggregate_selection; mod composite_type; mod convert; mod default_value; @@ -18,12 +19,14 @@ mod query_arguments; mod record; mod relation; mod selection_result; +mod write_args; mod zipper; pub mod filter; pub mod prelude; pub use self::{default_value::*, native_type_instance::*, zipper::*}; +pub use aggregate_selection::*; pub use composite_type::*; pub use convert::convert; pub use distinct::*; @@ -41,6 +44,7 @@ pub use query_arguments::*; pub use record::*; pub use relation::*; pub use selection_result::*; +pub use write_args::*; // Re-exports pub use prisma_value::*; diff --git a/query-engine/connectors/query-connector/src/write_args.rs b/query-engine/query-structure/src/write_args.rs similarity index 98% rename from query-engine/connectors/query-connector/src/write_args.rs rename to query-engine/query-structure/src/write_args.rs index b02fa873f83c..6429bed497c1 100644 --- a/query-engine/connectors/query-connector/src/write_args.rs +++ b/query-engine/query-structure/src/write_args.rs @@ -1,9 +1,8 @@ -use crate::error::{ConnectorError, ErrorKind}; -use indexmap::{map::Keys, IndexMap}; -use query_structure::{ +use crate::{ CompositeFieldRef, Field, Filter, Model, ModelProjection, PrismaValue, ScalarFieldRef, SelectedField, SelectionResult, }; +use indexmap::{map::Keys, IndexMap}; use std::{borrow::Borrow, convert::TryInto, ops::Deref}; /// WriteArgs represent data to be written to an underlying data source. @@ -334,19 +333,20 @@ impl From<(&SelectedField, PrismaValue)> for WriteOperation { } impl TryInto for WriteOperation { - type Error = ConnectorError; + type Error = UnexpectedWriteOperation; fn try_into(self) -> Result { match self { WriteOperation::Scalar(ScalarWriteOperation::Set(pv)) => Ok(pv), WriteOperation::Composite(CompositeWriteOperation::Set(pv)) => Ok(pv), - x => Err(ConnectorError::from_kind(ErrorKind::InternalConversionError(format!( - "Unable to convert write expression {x:?} into prisma value." - )))), + x => Err(UnexpectedWriteOperation(x)), } } } +#[derive(Debug)] +pub struct UnexpectedWriteOperation(pub WriteOperation); + impl WriteArgs { pub fn new(args: IndexMap, request_now: PrismaValue) -> Self { Self { args, request_now }