From 740a356eb7392bdf5465ffb3cd0ec8944a50df21 Mon Sep 17 00:00:00 2001 From: Alexey Orlenko Date: Thu, 24 Oct 2024 23:13:58 +0200 Subject: [PATCH 01/65] Prepare playground --- Cargo.lock | 1 + libs/prisma-value/src/lib.rs | 1 + query-engine/core/src/compiler/mod.rs | 13 +++++ query-engine/core/src/lib.rs | 1 + .../core/src/query_graph_builder/error.rs | 8 ++++ query-engine/query-engine/Cargo.toml | 1 + .../query-engine/examples/compiler.rs | 47 +++++++++++++++++++ 7 files changed, 72 insertions(+) create mode 100644 query-engine/core/src/compiler/mod.rs create mode 100644 query-engine/query-engine/examples/compiler.rs diff --git a/Cargo.lock b/Cargo.lock index c3cd6e22f2ca..4af52ac9dc2b 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -3799,6 +3799,7 @@ dependencies = [ "enumflags2", "graphql-parser", "hyper", + "indexmap 2.2.2", "indoc 2.0.3", "mongodb-query-connector", "prisma-metrics", diff --git a/libs/prisma-value/src/lib.rs b/libs/prisma-value/src/lib.rs index 8a1b10c2aedb..4953a08a2a7f 100644 --- a/libs/prisma-value/src/lib.rs +++ b/libs/prisma-value/src/lib.rs @@ -47,6 +47,7 @@ pub enum PrismaValue { #[serde(serialize_with = "serialize_bytes")] Bytes(Vec), + // Placeholder(TypeIdentifier), } /// Stringify a date to the following format diff --git a/query-engine/core/src/compiler/mod.rs b/query-engine/core/src/compiler/mod.rs new file mode 100644 index 000000000000..8bff8e9188e3 --- /dev/null +++ b/query-engine/core/src/compiler/mod.rs @@ -0,0 +1,13 @@ +use query_structure::SelectionResult; + +use crate::QueryGraph; + +#[derive(Debug)] +pub enum Expression { + Sequence(Vec), + Query { sql: String, params: SelectionResult }, +} + +pub fn translate(mut graph: QueryGraph) -> Expression { + unimplemented!() +} diff --git a/query-engine/core/src/lib.rs b/query-engine/core/src/lib.rs index 7e1868cc017f..3280660dd458 100644 --- a/query-engine/core/src/lib.rs +++ b/query-engine/core/src/lib.rs @@ -3,6 +3,7 @@ #[macro_use] extern crate tracing; +pub mod compiler; pub mod constants; pub mod executor; pub mod protocol; diff --git a/query-engine/core/src/query_graph_builder/error.rs b/query-engine/core/src/query_graph_builder/error.rs index 825b312bbbf5..937b3842d34f 100644 --- a/query-engine/core/src/query_graph_builder/error.rs +++ b/query-engine/core/src/query_graph_builder/error.rs @@ -43,6 +43,14 @@ pub enum QueryGraphBuilderError { QueryGraphError(QueryGraphError), } +impl std::fmt::Display for QueryGraphBuilderError { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + std::fmt::Debug::fmt(self, f) + } +} + +impl std::error::Error for QueryGraphBuilderError {} + #[derive(Debug)] pub struct RelationViolation { pub(crate) relation_name: String, diff --git a/query-engine/query-engine/Cargo.toml b/query-engine/query-engine/Cargo.toml index 439a64f987c9..db011f9238d7 100644 --- a/query-engine/query-engine/Cargo.toml +++ b/query-engine/query-engine/Cargo.toml @@ -38,6 +38,7 @@ telemetry = { path = "../../libs/telemetry" } serial_test = "*" quaint.workspace = true indoc.workspace = true +indexmap.workspace = true [build-dependencies] build-utils.path = "../../libs/build-utils" diff --git a/query-engine/query-engine/examples/compiler.rs b/query-engine/query-engine/examples/compiler.rs new file mode 100644 index 000000000000..8b45bb111413 --- /dev/null +++ b/query-engine/query-engine/examples/compiler.rs @@ -0,0 +1,47 @@ +use std::sync::Arc; + +use indexmap::IndexMap; +use query_core::{query_graph_builder::QueryGraphBuilder, schema::QueryTag, QueryDocument}; +use request_handlers::{Action, FieldQuery, JsonBody, JsonSingleQuery, RequestBody, SelectionSet, SelectionSetValue}; + +pub fn main() -> anyhow::Result<()> { + let schema_path = std::env::var("PRISMA_DML_PATH")?; + let schema_string = std::fs::read_to_string(schema_path)?; + let schema = psl::validate(schema_string.into()); + + if schema.diagnostics.has_errors() { + anyhow::bail!("invalid schema"); + } + + let schema = Arc::new(schema); + let query_schema = Arc::new(query_core::schema::build(schema, true)); + + let request = RequestBody::Json(JsonBody::Single(JsonSingleQuery { + model_name: Some("User".into()), + action: Action::new(QueryTag::FindMany), + query: FieldQuery { + arguments: None, + selection: SelectionSet::new({ + let mut map = IndexMap::new(); + map.insert("$scalars".into(), SelectionSetValue::Shorthand(true)); + map + }), + }, + })); + + let doc = request.into_doc(&query_schema)?; + + let QueryDocument::Single(query) = doc else { + anyhow::bail!("expected single query"); + }; + + let (graph, _serializer) = QueryGraphBuilder::new(&query_schema).build(query)?; + + println!("{}", graph.to_string()); + + let expr = query_core::compiler::translate(graph); + + println!("{expr:?}"); + + Ok(()) +} From b89fd0d642440d4e3fbad3747902d753dea14c67 Mon Sep 17 00:00:00 2001 From: Alexey Orlenko Date: Fri, 25 Oct 2024 10:12:20 +0200 Subject: [PATCH 02/65] Implement more structure --- query-engine/core/src/compiler/expression.rs | 74 +++++++++ query-engine/core/src/compiler/mod.rs | 15 +- query-engine/core/src/compiler/translate.rs | 142 ++++++++++++++++++ .../query-engine/examples/compiler.rs | 4 +- 4 files changed, 221 insertions(+), 14 deletions(-) create mode 100644 query-engine/core/src/compiler/expression.rs create mode 100644 query-engine/core/src/compiler/translate.rs diff --git a/query-engine/core/src/compiler/expression.rs b/query-engine/core/src/compiler/expression.rs new file mode 100644 index 000000000000..ce2833a81052 --- /dev/null +++ b/query-engine/core/src/compiler/expression.rs @@ -0,0 +1,74 @@ +use query_structure::PrismaValue; + +#[derive(Debug)] +pub struct Binding { + pub name: String, + pub expr: Expression, +} + +impl std::fmt::Display for Binding { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "{} = {}", self.name, self.expr) + } +} + +#[derive(Debug)] +pub enum Expression { + Seq(Vec), + Get { + name: String, + }, + Let { + bindings: Vec, + expr: Box, + }, + GetFirstNonEmpty { + names: Vec, + }, + Query { + sql: String, + params: Vec, + }, +} + +impl Expression { + fn display(&self, f: &mut std::fmt::Formatter<'_>, level: usize) -> std::fmt::Result { + let indent = " ".repeat(level); + + match self { + Self::Seq(exprs) => { + for expr in exprs { + expr.display(f, level)?; + write!(f, ";\n")?; + } + } + Self::Get { name } => { + write!(f, "{indent}get {name}")?; + } + Self::Let { bindings, expr } => { + write!(f, "{indent}let\n")?; + for binding in bindings { + write!(f, "{indent} {binding},\n")?; + } + write!(f, "{indent}in\n")?; + expr.display(f, level + 1)?; + } + Self::GetFirstNonEmpty { names } => { + write!(f, "{indent}getFirstNonEmpty")?; + for name in names { + write!(f, " {}", name)?; + } + } + Self::Query { sql, params } => { + write!(f, "{indent}query {{{sql}}}\" with {params:?}")?; + } + } + Ok(()) + } +} + +impl std::fmt::Display for Expression { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + self.display(f, 0) + } +} diff --git a/query-engine/core/src/compiler/mod.rs b/query-engine/core/src/compiler/mod.rs index 8bff8e9188e3..f3f5aba6c017 100644 --- a/query-engine/core/src/compiler/mod.rs +++ b/query-engine/core/src/compiler/mod.rs @@ -1,13 +1,4 @@ -use query_structure::SelectionResult; +pub mod expression; +pub mod translate; -use crate::QueryGraph; - -#[derive(Debug)] -pub enum Expression { - Sequence(Vec), - Query { sql: String, params: SelectionResult }, -} - -pub fn translate(mut graph: QueryGraph) -> Expression { - unimplemented!() -} +pub use translate::translate; diff --git a/query-engine/core/src/compiler/translate.rs b/query-engine/core/src/compiler/translate.rs new file mode 100644 index 000000000000..0195f0125bc1 --- /dev/null +++ b/query-engine/core/src/compiler/translate.rs @@ -0,0 +1,142 @@ +use thiserror::Error; + +use crate::{EdgeRef, Node, NodeRef, Query, QueryGraph}; + +use super::expression::{Binding, Expression}; + +#[derive(Debug, Error)] +pub enum TranslateError { + #[error("node {0} has no content")] + NodeContentEmpty(String), +} + +pub type TranslateResult = Result; + +pub fn translate(mut graph: QueryGraph) -> TranslateResult { + graph + .root_nodes() + .into_iter() + .map(|node| NodeTranslator::new(&mut graph, node, &[]).translate()) + .collect::>>() + .map(Expression::Seq) +} + +struct NodeTranslator<'a, 'b> { + graph: &'a mut QueryGraph, + node: NodeRef, + parent_edges: &'b [EdgeRef], +} + +impl<'a, 'b> NodeTranslator<'a, 'b> { + fn new(graph: &'a mut QueryGraph, node: NodeRef, parent_edges: &'b [EdgeRef]) -> Self { + Self { + graph, + node, + parent_edges, + } + } + + fn translate(&mut self) -> TranslateResult { + let node = self + .graph + .node_content(&self.node) + .ok_or_else(|| TranslateError::NodeContentEmpty(self.node.id()))?; + + match node { + Node::Query(_) => self.translate_query(), + _ => unimplemented!(), + } + } + + fn translate_query(&mut self) -> TranslateResult { + self.graph.mark_visited(&self.node); + + let query: Query = self + .graph + .pluck_node(&self.node) + .try_into() + .expect("current node must be query"); + + unimplemented!() + } + + fn process_children(&mut self) -> TranslateResult> { + let mut child_pairs = self.graph.direct_child_pairs(&self.node); + + // Find the positions of all result returning graph nodes. + let mut result_positions = child_pairs + .iter() + .enumerate() + .filter_map(|(idx, (_, child_node))| { + if self.graph.subgraph_contains_result(child_node) { + Some(idx) + } else { + None + } + }) + .collect::>(); + + // Start removing the highest indices first to not invalidate subsequent removals. + result_positions.sort_unstable(); + result_positions.reverse(); + + let result_subgraphs = result_positions + .into_iter() + .map(|pos| child_pairs.remove(pos)) + .collect::>(); + + // Because we split from right to left, everything remaining in `child_pairs` + // doesn't belong into results, and is executed before all result scopes. + let mut expressions: Vec = child_pairs + .into_iter() + .map(|(_, node)| { + let edges = self.graph.incoming_edges(&node); + NodeTranslator::new(self.graph, node, &edges).translate() + }) + .collect::, _>>()?; + + // Fold result scopes into one expression. + if !result_subgraphs.is_empty() { + let result_exp = self.fold_result_scopes(result_subgraphs)?; + expressions.push(result_exp); + } + + Ok(expressions) + } + + fn fold_result_scopes(&mut self, result_subgraphs: Vec<(EdgeRef, NodeRef)>) -> TranslateResult { + // if the subgraphs all point to the same result node, we fold them in sequence + // if not, we can separate them with a getfirstnonempty + let bindings = result_subgraphs + .into_iter() + .map(|(_, node)| { + let name = node.id(); + let edges = self.graph.incoming_edges(&node); + let expr = NodeTranslator::new(self.graph, node, &edges).translate()?; + Ok(Binding { name, expr }) + }) + .collect::>>()?; + + let result_nodes = self.graph.result_nodes(); + let result_binding_names = bindings.iter().map(|b| b.name.clone()).collect::>(); + + if result_nodes.len() == 1 { + Ok(Expression::Let { + bindings, + expr: Box::new(Expression::Get { + name: result_binding_names + .into_iter() + .last() + .expect("no binding for result node"), + }), + }) + } else { + Ok(Expression::Let { + bindings, + expr: Box::new(Expression::GetFirstNonEmpty { + names: result_binding_names, + }), + }) + } + } +} diff --git a/query-engine/query-engine/examples/compiler.rs b/query-engine/query-engine/examples/compiler.rs index 8b45bb111413..6b095633c3fd 100644 --- a/query-engine/query-engine/examples/compiler.rs +++ b/query-engine/query-engine/examples/compiler.rs @@ -39,9 +39,9 @@ pub fn main() -> anyhow::Result<()> { println!("{}", graph.to_string()); - let expr = query_core::compiler::translate(graph); + let expr = query_core::compiler::translate(graph)?; - println!("{expr:?}"); + println!("{expr}"); Ok(()) } From 82537d97768177cf395578bc044afd1ada577ec5 Mon Sep 17 00:00:00 2001 From: Alexey Orlenko Date: Fri, 25 Oct 2024 11:28:19 +0200 Subject: [PATCH 03/65] Inline the schema in the compiler example --- .../query-engine/examples/compiler.rs | 3 +-- .../query-engine/examples/schema.prisma | 27 +++++++++++++++++++ 2 files changed, 28 insertions(+), 2 deletions(-) create mode 100644 query-engine/query-engine/examples/schema.prisma diff --git a/query-engine/query-engine/examples/compiler.rs b/query-engine/query-engine/examples/compiler.rs index 6b095633c3fd..45442210ed54 100644 --- a/query-engine/query-engine/examples/compiler.rs +++ b/query-engine/query-engine/examples/compiler.rs @@ -5,8 +5,7 @@ use query_core::{query_graph_builder::QueryGraphBuilder, schema::QueryTag, Query use request_handlers::{Action, FieldQuery, JsonBody, JsonSingleQuery, RequestBody, SelectionSet, SelectionSetValue}; pub fn main() -> anyhow::Result<()> { - let schema_path = std::env::var("PRISMA_DML_PATH")?; - let schema_string = std::fs::read_to_string(schema_path)?; + let schema_string = include_str!("./schema.prisma"); let schema = psl::validate(schema_string.into()); if schema.diagnostics.has_errors() { diff --git a/query-engine/query-engine/examples/schema.prisma b/query-engine/query-engine/examples/schema.prisma new file mode 100644 index 000000000000..ab9cd218da49 --- /dev/null +++ b/query-engine/query-engine/examples/schema.prisma @@ -0,0 +1,27 @@ +generator client { + provider = "prisma-client-js" +} + +datasource db { + provider = "postgresql" + url = "postgresql://postgres:prisma@localhost:5438" +} + +model User { + id String @id @default(cuid()) + email String @unique + name String? + posts Post[] + val Int? +} + +model Post { + id String @id @default(cuid()) + createdAt DateTime @default(now()) + updatedAt DateTime @updatedAt + published Boolean + title String + content String? + authorId String? + author User? @relation(fields: [authorId], references: [id]) +} From f2f088f2b6c67cbbd46c4cf4d3074585221af948 Mon Sep 17 00:00:00 2001 From: Alexey Orlenko Date: Fri, 25 Oct 2024 11:32:54 +0200 Subject: [PATCH 04/65] Remove useless explicit to_string --- query-engine/query-engine/examples/compiler.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/query-engine/query-engine/examples/compiler.rs b/query-engine/query-engine/examples/compiler.rs index 45442210ed54..2de007a8c77e 100644 --- a/query-engine/query-engine/examples/compiler.rs +++ b/query-engine/query-engine/examples/compiler.rs @@ -36,7 +36,7 @@ pub fn main() -> anyhow::Result<()> { let (graph, _serializer) = QueryGraphBuilder::new(&query_schema).build(query)?; - println!("{}", graph.to_string()); + println!("{graph}"); let expr = query_core::compiler::translate(graph)?; From a364a7891d36a831cd468d3884289b2d44861065 Mon Sep 17 00:00:00 2001 From: Alexey Orlenko Date: Fri, 25 Oct 2024 16:56:35 +0200 Subject: [PATCH 05/65] Fix expression formatting --- query-engine/core/src/compiler/expression.rs | 18 +++++++++++------- 1 file changed, 11 insertions(+), 7 deletions(-) diff --git a/query-engine/core/src/compiler/expression.rs b/query-engine/core/src/compiler/expression.rs index ce2833a81052..2f874664b403 100644 --- a/query-engine/core/src/compiler/expression.rs +++ b/query-engine/core/src/compiler/expression.rs @@ -37,20 +37,24 @@ impl Expression { match self { Self::Seq(exprs) => { + writeln!(f, "{indent}{{")?; for expr in exprs { - expr.display(f, level)?; - write!(f, ";\n")?; + expr.display(f, level + 1)?; + writeln!(f, ";")?; } + write!(f, "{indent}}}")?; } Self::Get { name } => { write!(f, "{indent}get {name}")?; } Self::Let { bindings, expr } => { - write!(f, "{indent}let\n")?; - for binding in bindings { - write!(f, "{indent} {binding},\n")?; + writeln!(f, "{indent}let")?; + for Binding { name, expr } in bindings { + writeln!(f, "{indent} {name} =")?; + expr.display(f, level + 2)?; + writeln!(f, ";")?; } - write!(f, "{indent}in\n")?; + writeln!(f, "{indent}in")?; expr.display(f, level + 1)?; } Self::GetFirstNonEmpty { names } => { @@ -60,7 +64,7 @@ impl Expression { } } Self::Query { sql, params } => { - write!(f, "{indent}query {{{sql}}}\" with {params:?}")?; + write!(f, "{indent}query {{\n{indent} {sql}\n{indent}}} with {params:?}")?; } } Ok(()) From 6be72d06a619e8f4245d906433226821a6c2b7d7 Mon Sep 17 00:00:00 2001 From: Alexey Orlenko Date: Fri, 25 Oct 2024 17:23:57 +0200 Subject: [PATCH 06/65] Split ReadQuery and WriteQuery --- query-engine/core/src/compiler/expression.rs | 29 +++++++++++++++----- 1 file changed, 22 insertions(+), 7 deletions(-) diff --git a/query-engine/core/src/compiler/expression.rs b/query-engine/core/src/compiler/expression.rs index 2f874664b403..9729f2949bdc 100644 --- a/query-engine/core/src/compiler/expression.rs +++ b/query-engine/core/src/compiler/expression.rs @@ -12,6 +12,12 @@ impl std::fmt::Display for Binding { } } +#[derive(Debug)] +pub struct DbQuery { + pub query: String, + pub params: Vec, +} + #[derive(Debug)] pub enum Expression { Seq(Vec), @@ -25,10 +31,8 @@ pub enum Expression { GetFirstNonEmpty { names: Vec, }, - Query { - sql: String, - params: Vec, - }, + ReadQuery(DbQuery), + WriteQuery(DbQuery), } impl Expression { @@ -63,12 +67,23 @@ impl Expression { write!(f, " {}", name)?; } } - Self::Query { sql, params } => { - write!(f, "{indent}query {{\n{indent} {sql}\n{indent}}} with {params:?}")?; - } + Self::ReadQuery(query) => self.display_query("readQuery", query, f, level)?, + Self::WriteQuery(query) => self.display_query("writeQuery", query, f, level)?, } Ok(()) } + + fn display_query( + &self, + op: &str, + db_query: &DbQuery, + f: &mut std::fmt::Formatter<'_>, + level: usize, + ) -> std::fmt::Result { + let indent = " ".repeat(level); + let DbQuery { query, params } = db_query; + write!(f, "{indent}{op} {{\n{indent} {query}\n{indent}}} with {params:?}") + } } impl std::fmt::Display for Expression { From d9afbeb5e47a137494da26c845f0fa049491b8f0 Mon Sep 17 00:00:00 2001 From: Alexey Orlenko Date: Fri, 25 Oct 2024 18:58:51 +0200 Subject: [PATCH 07/65] Implement translating simple read query --- Cargo.lock | 2 + libs/prisma-value/src/lib.rs | 5 +- .../sql-query-connector/src/context.rs | 4 +- .../connectors/sql-query-connector/src/lib.rs | 6 +- .../src/model_extensions/column.rs | 4 +- .../src/model_extensions/mod.rs | 2 +- .../src/query_builder/mod.rs | 6 +- .../src/query_builder/read.rs | 4 +- query-engine/core/Cargo.toml | 8 + query-engine/core/src/compiler/expression.rs | 6 + query-engine/core/src/compiler/translate.rs | 8 +- .../core/src/compiler/translate/query.rs | 138 ++++++++++++++++++ 12 files changed, 178 insertions(+), 15 deletions(-) create mode 100644 query-engine/core/src/compiler/translate/query.rs diff --git a/Cargo.lock b/Cargo.lock index 4af52ac9dc2b..5f104e97bb5c 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -3771,11 +3771,13 @@ dependencies = [ "petgraph", "prisma-metrics", "psl", + "quaint", "query-connector", "query-structure", "schema", "serde", "serde_json", + "sql-query-connector", "telemetry", "thiserror", "tokio", diff --git a/libs/prisma-value/src/lib.rs b/libs/prisma-value/src/lib.rs index 4953a08a2a7f..f6dcede81da7 100644 --- a/libs/prisma-value/src/lib.rs +++ b/libs/prisma-value/src/lib.rs @@ -47,7 +47,10 @@ pub enum PrismaValue { #[serde(serialize_with = "serialize_bytes")] Bytes(Vec), - // Placeholder(TypeIdentifier), + // Placeholder { + // name: String, + // r#type: Type, + // } } /// Stringify a date to the following format diff --git a/query-engine/connectors/sql-query-connector/src/context.rs b/query-engine/connectors/sql-query-connector/src/context.rs index 3f90e94a027d..b3e28c8152c1 100644 --- a/query-engine/connectors/sql-query-connector/src/context.rs +++ b/query-engine/connectors/sql-query-connector/src/context.rs @@ -1,7 +1,7 @@ use quaint::prelude::ConnectionInfo; use telemetry::TraceParent; -pub(super) struct Context<'a> { +pub struct Context<'a> { connection_info: &'a ConnectionInfo, pub(crate) traceparent: Option, /// Maximum rows allowed at once for an insert query. @@ -13,7 +13,7 @@ pub(super) struct Context<'a> { } impl<'a> Context<'a> { - pub(crate) fn new(connection_info: &'a ConnectionInfo, traceparent: Option) -> Self { + pub fn new(connection_info: &'a ConnectionInfo, traceparent: Option) -> Self { let max_insert_rows = connection_info.max_insert_rows(); let max_bind_values = connection_info.max_bind_values(); diff --git a/query-engine/connectors/sql-query-connector/src/lib.rs b/query-engine/connectors/sql-query-connector/src/lib.rs index 2f019b41e54c..a5403eefd998 100644 --- a/query-engine/connectors/sql-query-connector/src/lib.rs +++ b/query-engine/connectors/sql-query-connector/src/lib.rs @@ -2,18 +2,18 @@ #![deny(unsafe_code)] mod column_metadata; -mod context; +pub mod context; mod cursor_condition; mod database; mod error; mod filter; mod join_utils; mod limit; -mod model_extensions; +pub mod model_extensions; mod nested_aggregations; mod ordering; mod query_arguments_ext; -mod query_builder; +pub mod query_builder; mod query_ext; mod row; mod ser_raw; diff --git a/query-engine/connectors/sql-query-connector/src/model_extensions/column.rs b/query-engine/connectors/sql-query-connector/src/model_extensions/column.rs index 1ee4c358b0d2..42557aa01b3e 100644 --- a/query-engine/connectors/sql-query-connector/src/model_extensions/column.rs +++ b/query-engine/connectors/sql-query-connector/src/model_extensions/column.rs @@ -32,7 +32,7 @@ impl From>> for ColumnIterator { } } -pub(crate) trait AsColumns { +pub trait AsColumns { fn as_columns(&self, ctx: &Context<'_>) -> ColumnIterator; } @@ -48,7 +48,7 @@ impl AsColumns for ModelProjection { } } -pub(crate) trait AsColumn { +pub trait AsColumn { fn as_column(&self, ctx: &Context<'_>) -> Column<'static>; } diff --git a/query-engine/connectors/sql-query-connector/src/model_extensions/mod.rs b/query-engine/connectors/sql-query-connector/src/model_extensions/mod.rs index 66cb072cc7af..2c6096ffb481 100644 --- a/query-engine/connectors/sql-query-connector/src/model_extensions/mod.rs +++ b/query-engine/connectors/sql-query-connector/src/model_extensions/mod.rs @@ -5,4 +5,4 @@ mod scalar_field; mod selection_result; mod table; -pub(crate) use self::{column::*, record::*, relation::*, scalar_field::*, selection_result::*, table::*}; +pub use self::{column::*, record::*, relation::*, scalar_field::*, selection_result::*, table::*}; diff --git a/query-engine/connectors/sql-query-connector/src/query_builder/mod.rs b/query-engine/connectors/sql-query-connector/src/query_builder/mod.rs index 199847a2f340..15d696b4e7ea 100644 --- a/query-engine/connectors/sql-query-connector/src/query_builder/mod.rs +++ b/query-engine/connectors/sql-query-connector/src/query_builder/mod.rs @@ -1,7 +1,7 @@ -pub(crate) mod read; +pub mod read; #[cfg(feature = "relation_joins")] -pub(crate) mod select; -pub(crate) mod write; +pub mod select; +pub mod write; use crate::context::Context; use crate::model_extensions::SelectionResultExt; diff --git a/query-engine/connectors/sql-query-connector/src/query_builder/read.rs b/query-engine/connectors/sql-query-connector/src/query_builder/read.rs index 7b1806948688..e33d51857a2f 100644 --- a/query-engine/connectors/sql-query-connector/src/query_builder/read.rs +++ b/query-engine/connectors/sql-query-connector/src/query_builder/read.rs @@ -7,7 +7,7 @@ use itertools::Itertools; use quaint::ast::*; use query_structure::*; -pub(crate) trait SelectDefinition { +pub trait SelectDefinition { fn into_select<'a>( self, _: &Model, @@ -122,7 +122,7 @@ impl SelectDefinition for QueryArguments { } } -pub(crate) fn get_records<'a, T>( +pub fn get_records<'a, T>( model: &Model, columns: impl Iterator>, virtual_selections: impl IntoIterator, diff --git a/query-engine/core/Cargo.toml b/query-engine/core/Cargo.toml index 6edb37979c24..d8f97ec2cec1 100644 --- a/query-engine/core/Cargo.toml +++ b/query-engine/core/Cargo.toml @@ -41,3 +41,11 @@ telemetry = { path = "../../libs/telemetry" } lru = "0.7.7" enumflags2.workspace = true derive_more.workspace = true + +# HACK: query builders need to be a separate crate, and maybe the compiler too +# HACK: we hardcode PostgreSQL as the dialect for now +sql-query-connector = { path = "../connectors/sql-query-connector", features = [ + "postgresql", +] } +# HACK: this should not be in core either +quaint.workspace = true diff --git a/query-engine/core/src/compiler/expression.rs b/query-engine/core/src/compiler/expression.rs index 9729f2949bdc..d9329128db54 100644 --- a/query-engine/core/src/compiler/expression.rs +++ b/query-engine/core/src/compiler/expression.rs @@ -18,6 +18,12 @@ pub struct DbQuery { pub params: Vec, } +impl DbQuery { + pub fn new(query: String, params: Vec) -> Self { + Self { query, params } + } +} + #[derive(Debug)] pub enum Expression { Seq(Vec), diff --git a/query-engine/core/src/compiler/translate.rs b/query-engine/core/src/compiler/translate.rs index 0195f0125bc1..f0346120ae4d 100644 --- a/query-engine/core/src/compiler/translate.rs +++ b/query-engine/core/src/compiler/translate.rs @@ -1,3 +1,6 @@ +mod query; + +use query::translate_query; use thiserror::Error; use crate::{EdgeRef, Node, NodeRef, Query, QueryGraph}; @@ -8,6 +11,9 @@ use super::expression::{Binding, Expression}; pub enum TranslateError { #[error("node {0} has no content")] NodeContentEmpty(String), + + #[error("{0}")] + QuaintError(#[from] quaint::error::Error), } pub type TranslateResult = Result; @@ -57,7 +63,7 @@ impl<'a, 'b> NodeTranslator<'a, 'b> { .try_into() .expect("current node must be query"); - unimplemented!() + translate_query(query) } fn process_children(&mut self) -> TranslateResult> { diff --git a/query-engine/core/src/compiler/translate/query.rs b/query-engine/core/src/compiler/translate/query.rs new file mode 100644 index 000000000000..fb69274461ce --- /dev/null +++ b/query-engine/core/src/compiler/translate/query.rs @@ -0,0 +1,138 @@ +use bigdecimal::{BigDecimal, FromPrimitive}; +use chrono::{DateTime, NaiveDate, Utc}; +use quaint::{ + prelude::{ConnectionInfo, ExternalConnectionInfo, SqlFamily}, + visitor::Visitor, +}; +use query_structure::{ModelProjection, PrismaValue}; +use sql_query_connector::{context::Context, model_extensions::AsColumns, query_builder}; + +use crate::{ + compiler::expression::{DbQuery, Expression}, + Query, ReadQuery, +}; + +use super::TranslateResult; + +pub(crate) fn translate_query(query: Query) -> TranslateResult { + let connection_info = ConnectionInfo::External(ExternalConnectionInfo::new( + SqlFamily::Postgres, + "public".to_owned(), + None, + )); + + let ctx = Context::new(&connection_info, None); + + match query { + Query::Read(rq) => translate_read_query(rq, &ctx), + _ => unimplemented!(), + } +} + +fn translate_read_query(query: ReadQuery, ctx: &Context<'_>) -> TranslateResult { + let select = match query { + ReadQuery::RecordQuery(rq) => { + let selected_fields = rq.selected_fields.without_relations().into_virtuals_last(); + query_builder::read::get_records( + &rq.model, + ModelProjection::from(&selected_fields) + .as_columns(ctx) + .mark_all_selected(), + selected_fields.virtuals(), + rq.filter.expect("ReadOne query should always have filter set"), + ctx, + ) + .limit(1) + } + + ReadQuery::ManyRecordsQuery(mrq) => { + let selected_fields = mrq.selected_fields.without_relations().into_virtuals_last(); + + // TODO: we ignore chunking for now + query_builder::read::get_records( + &mrq.model, + ModelProjection::from(&selected_fields) + .as_columns(ctx) + .mark_all_selected(), + selected_fields.virtuals(), + mrq.args, + ctx, + ) + } + + _ => unimplemented!(), + }; + + let db_query = build_db_query(select)?; + + Ok(Expression::ReadQuery(db_query)) +} + +fn build_db_query<'a>(query: impl Into>) -> TranslateResult { + let (sql, params) = quaint::visitor::Postgres::build(query)?; + let params = params.into_iter().map(quaint_value_to_prisma_value).collect::>(); + Ok(DbQuery::new(sql, params)) +} + +fn quaint_value_to_prisma_value(value: quaint::Value<'_>) -> PrismaValue { + match value.typed { + quaint::ValueType::Int32(Some(i)) => PrismaValue::Int(i.into()), + quaint::ValueType::Int32(None) => PrismaValue::Null, + quaint::ValueType::Int64(Some(i)) => PrismaValue::BigInt(i), + quaint::ValueType::Int64(None) => PrismaValue::Null, + quaint::ValueType::Float(Some(f)) => PrismaValue::Float( + BigDecimal::from_f32(f) + .expect("float to decimal conversion should succeed") + .normalized(), + ), + quaint::ValueType::Float(None) => PrismaValue::Null, + quaint::ValueType::Double(Some(d)) => PrismaValue::Float( + BigDecimal::from_f64(d) + .expect("double to decimal conversion should succeed") + .normalized(), + ), + quaint::ValueType::Double(None) => PrismaValue::Null, + quaint::ValueType::Text(Some(s)) => PrismaValue::String(s.into_owned()), + quaint::ValueType::Text(None) => PrismaValue::Null, + quaint::ValueType::Enum(Some(e), _) => PrismaValue::Enum(e.into_owned()), + quaint::ValueType::Enum(None, _) => PrismaValue::Null, + quaint::ValueType::EnumArray(Some(es), _) => PrismaValue::List( + es.into_iter() + .map(|e| e.into_text()) + .map(quaint_value_to_prisma_value) + .collect(), + ), + quaint::ValueType::EnumArray(None, _) => PrismaValue::Null, + quaint::ValueType::Bytes(Some(b)) => PrismaValue::Bytes(b.into_owned()), + quaint::ValueType::Bytes(None) => PrismaValue::Null, + quaint::ValueType::Boolean(Some(b)) => PrismaValue::Boolean(b), + quaint::ValueType::Boolean(None) => PrismaValue::Null, + quaint::ValueType::Char(Some(c)) => PrismaValue::String(c.to_string()), + quaint::ValueType::Char(None) => PrismaValue::Null, + quaint::ValueType::Array(Some(a)) => { + PrismaValue::List(a.into_iter().map(quaint_value_to_prisma_value).collect()) + } + quaint::ValueType::Array(None) => PrismaValue::Null, + quaint::ValueType::Numeric(Some(bd)) => PrismaValue::Float(bd), + quaint::ValueType::Numeric(None) => PrismaValue::Null, + quaint::ValueType::Json(Some(j)) => PrismaValue::Json(j.to_string()), + quaint::ValueType::Json(None) => PrismaValue::Null, + quaint::ValueType::Xml(Some(x)) => PrismaValue::String(x.into_owned()), + quaint::ValueType::Xml(None) => PrismaValue::Null, + quaint::ValueType::Uuid(Some(u)) => PrismaValue::Uuid(u), + quaint::ValueType::Uuid(None) => PrismaValue::Null, + quaint::ValueType::DateTime(Some(dt)) => PrismaValue::DateTime(dt.into()), + quaint::ValueType::DateTime(None) => PrismaValue::Null, + quaint::ValueType::Date(Some(d)) => { + let dt = DateTime::::from_naive_utc_and_offset(d.and_hms_opt(0, 0, 0).unwrap(), Utc); + PrismaValue::DateTime(dt.into()) + } + quaint::ValueType::Date(None) => PrismaValue::Null, + quaint::ValueType::Time(Some(t)) => { + let d = NaiveDate::from_ymd_opt(1970, 1, 1).unwrap(); + let dt = DateTime::::from_naive_utc_and_offset(d.and_time(t), Utc); + PrismaValue::DateTime(dt.into()) + } + quaint::ValueType::Time(None) => PrismaValue::Null, + } +} From 22e73a7bee65f39ef7546fe95d65b57641ee127e Mon Sep 17 00:00:00 2001 From: Alexey Orlenko Date: Fri, 25 Oct 2024 20:42:34 +0200 Subject: [PATCH 08/65] Implement more queries --- .../src/database/operations/write.rs | 2 +- .../connectors/sql-query-connector/src/lib.rs | 4 +- .../src/query_arguments_ext.rs | 2 +- .../src/query_builder/write.rs | 2 +- query-engine/core/src/compiler/expression.rs | 74 +++++++++-- .../core/src/compiler/translate/query.rs | 122 ++---------------- .../src/compiler/translate/query/convert.rs | 66 ++++++++++ .../core/src/compiler/translate/query/read.rs | 74 +++++++++++ .../src/compiler/translate/query/write.rs | 57 ++++++++ 9 files changed, 280 insertions(+), 123 deletions(-) create mode 100644 query-engine/core/src/compiler/translate/query/convert.rs create mode 100644 query-engine/core/src/compiler/translate/query/read.rs create mode 100644 query-engine/core/src/compiler/translate/query/write.rs diff --git a/query-engine/connectors/sql-query-connector/src/database/operations/write.rs b/query-engine/connectors/sql-query-connector/src/database/operations/write.rs index 19a52fa94fbf..3df576c3fe03 100644 --- a/query-engine/connectors/sql-query-connector/src/database/operations/write.rs +++ b/query-engine/connectors/sql-query-connector/src/database/operations/write.rs @@ -207,7 +207,7 @@ fn collect_affected_fields(args: &[WriteArgs], model: &Model) -> HashSet, skip_duplicates: bool, diff --git a/query-engine/connectors/sql-query-connector/src/lib.rs b/query-engine/connectors/sql-query-connector/src/lib.rs index a5403eefd998..dc809c7bf627 100644 --- a/query-engine/connectors/sql-query-connector/src/lib.rs +++ b/query-engine/connectors/sql-query-connector/src/lib.rs @@ -12,7 +12,7 @@ mod limit; pub mod model_extensions; mod nested_aggregations; mod ordering; -mod query_arguments_ext; +pub mod query_arguments_ext; pub mod query_builder; mod query_ext; mod row; @@ -23,6 +23,8 @@ mod value; use self::{column_metadata::*, context::Context, query_ext::QueryExt, row::*}; use quaint::prelude::Queryable; +pub use database::operations::write::generate_insert_statements; + pub use database::FromSource; #[cfg(feature = "driver-adapters")] pub use database::Js; diff --git a/query-engine/connectors/sql-query-connector/src/query_arguments_ext.rs b/query-engine/connectors/sql-query-connector/src/query_arguments_ext.rs index 33db6ff17676..1e2aebb3535e 100644 --- a/query-engine/connectors/sql-query-connector/src/query_arguments_ext.rs +++ b/query-engine/connectors/sql-query-connector/src/query_arguments_ext.rs @@ -1,6 +1,6 @@ use query_structure::QueryArguments; -pub(crate) trait QueryArgumentsExt { +pub trait QueryArgumentsExt { /// If we need to take rows before a cursor position, then we need to reverse the order in SQL. fn needs_reversed_order(&self) -> bool; diff --git a/query-engine/connectors/sql-query-connector/src/query_builder/write.rs b/query-engine/connectors/sql-query-connector/src/query_builder/write.rs index 2f4ab525e845..5cfbd8002fe0 100644 --- a/query-engine/connectors/sql-query-connector/src/query_builder/write.rs +++ b/query-engine/connectors/sql-query-connector/src/query_builder/write.rs @@ -7,7 +7,7 @@ use std::{collections::HashSet, convert::TryInto}; /// `INSERT` a new record to the database. Resulting an `INSERT` ast and an /// optional `RecordProjection` if available from the arguments or model. -pub(crate) fn create_record( +pub fn create_record( model: &Model, mut args: WriteArgs, selected_fields: &ModelProjection, diff --git a/query-engine/core/src/compiler/expression.rs b/query-engine/core/src/compiler/expression.rs index d9329128db54..a1fda58c7b5f 100644 --- a/query-engine/core/src/compiler/expression.rs +++ b/query-engine/core/src/compiler/expression.rs @@ -6,6 +6,12 @@ pub struct Binding { pub expr: Expression, } +impl Binding { + pub fn new(name: String, expr: Expression) -> Self { + Self { name, expr } + } +} + impl std::fmt::Display for Binding { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { write!(f, "{} = {}", self.name, self.expr) @@ -26,19 +32,35 @@ impl DbQuery { #[derive(Debug)] pub enum Expression { + /// Sequence of statements. Seq(Vec), - Get { - name: String, - }, + + /// Get binding value. + Get { name: String }, + + /// A lexical scope with let-bindings. Let { bindings: Vec, expr: Box, }, - GetFirstNonEmpty { - names: Vec, - }, - ReadQuery(DbQuery), - WriteQuery(DbQuery), + + /// Gets the first non-empty value from a list of bindings. + GetFirstNonEmpty { names: Vec }, + + /// A database query that returns data. + Query(DbQuery), + + /// A database query that returns the number of affected rows. + Execute(DbQuery), + + /// Reverses the result of an expression in memory. + Reverse(Box), + + /// Sums a list of scalars returned by the expressions. + Sum(Vec), + + /// Concatenates a list of lists. + Concat(Vec), } impl Expression { @@ -54,9 +76,11 @@ impl Expression { } write!(f, "{indent}}}")?; } + Self::Get { name } => { write!(f, "{indent}get {name}")?; } + Self::Let { bindings, expr } => { writeln!(f, "{indent}let")?; for Binding { name, expr } in bindings { @@ -67,15 +91,29 @@ impl Expression { writeln!(f, "{indent}in")?; expr.display(f, level + 1)?; } + Self::GetFirstNonEmpty { names } => { write!(f, "{indent}getFirstNonEmpty")?; for name in names { write!(f, " {}", name)?; } } - Self::ReadQuery(query) => self.display_query("readQuery", query, f, level)?, - Self::WriteQuery(query) => self.display_query("writeQuery", query, f, level)?, + + Self::Query(query) => self.display_query("query", query, f, level)?, + + Self::Execute(query) => self.display_query("execute", query, f, level)?, + + Self::Reverse(expr) => { + writeln!(f, "{indent}reverse (")?; + expr.display(f, level + 1)?; + write!(f, "{indent})")?; + } + + Self::Sum(exprs) => self.display_function("sum", exprs, f, level)?, + + Self::Concat(exprs) => self.display_function("concat", exprs, f, level)?, } + Ok(()) } @@ -90,6 +128,22 @@ impl Expression { let DbQuery { query, params } = db_query; write!(f, "{indent}{op} {{\n{indent} {query}\n{indent}}} with {params:?}") } + + fn display_function( + &self, + name: &str, + args: &[Expression], + f: &mut std::fmt::Formatter<'_>, + level: usize, + ) -> std::fmt::Result { + let indent = " ".repeat(level); + write!(f, "{indent}{name} (")?; + for arg in args { + arg.display(f, level + 1)?; + writeln!(f, ",")?; + } + write!(f, ")") + } } impl std::fmt::Display for Expression { diff --git a/query-engine/core/src/compiler/translate/query.rs b/query-engine/core/src/compiler/translate/query.rs index fb69274461ce..f3ff82c95298 100644 --- a/query-engine/core/src/compiler/translate/query.rs +++ b/query-engine/core/src/compiler/translate/query.rs @@ -1,15 +1,18 @@ -use bigdecimal::{BigDecimal, FromPrimitive}; -use chrono::{DateTime, NaiveDate, Utc}; +mod convert; +mod read; +mod write; + use quaint::{ prelude::{ConnectionInfo, ExternalConnectionInfo, SqlFamily}, visitor::Visitor, }; -use query_structure::{ModelProjection, PrismaValue}; -use sql_query_connector::{context::Context, model_extensions::AsColumns, query_builder}; +use read::translate_read_query; +use sql_query_connector::context::Context; +use write::translate_write_query; use crate::{ compiler::expression::{DbQuery, Expression}, - Query, ReadQuery, + Query, }; use super::TranslateResult; @@ -25,114 +28,15 @@ pub(crate) fn translate_query(query: Query) -> TranslateResult { match query { Query::Read(rq) => translate_read_query(rq, &ctx), - _ => unimplemented!(), + Query::Write(wq) => translate_write_query(wq, &ctx), } } -fn translate_read_query(query: ReadQuery, ctx: &Context<'_>) -> TranslateResult { - let select = match query { - ReadQuery::RecordQuery(rq) => { - let selected_fields = rq.selected_fields.without_relations().into_virtuals_last(); - query_builder::read::get_records( - &rq.model, - ModelProjection::from(&selected_fields) - .as_columns(ctx) - .mark_all_selected(), - selected_fields.virtuals(), - rq.filter.expect("ReadOne query should always have filter set"), - ctx, - ) - .limit(1) - } - - ReadQuery::ManyRecordsQuery(mrq) => { - let selected_fields = mrq.selected_fields.without_relations().into_virtuals_last(); - - // TODO: we ignore chunking for now - query_builder::read::get_records( - &mrq.model, - ModelProjection::from(&selected_fields) - .as_columns(ctx) - .mark_all_selected(), - selected_fields.virtuals(), - mrq.args, - ctx, - ) - } - - _ => unimplemented!(), - }; - - let db_query = build_db_query(select)?; - - Ok(Expression::ReadQuery(db_query)) -} - fn build_db_query<'a>(query: impl Into>) -> TranslateResult { let (sql, params) = quaint::visitor::Postgres::build(query)?; - let params = params.into_iter().map(quaint_value_to_prisma_value).collect::>(); + let params = params + .into_iter() + .map(convert::quaint_value_to_prisma_value) + .collect::>(); Ok(DbQuery::new(sql, params)) } - -fn quaint_value_to_prisma_value(value: quaint::Value<'_>) -> PrismaValue { - match value.typed { - quaint::ValueType::Int32(Some(i)) => PrismaValue::Int(i.into()), - quaint::ValueType::Int32(None) => PrismaValue::Null, - quaint::ValueType::Int64(Some(i)) => PrismaValue::BigInt(i), - quaint::ValueType::Int64(None) => PrismaValue::Null, - quaint::ValueType::Float(Some(f)) => PrismaValue::Float( - BigDecimal::from_f32(f) - .expect("float to decimal conversion should succeed") - .normalized(), - ), - quaint::ValueType::Float(None) => PrismaValue::Null, - quaint::ValueType::Double(Some(d)) => PrismaValue::Float( - BigDecimal::from_f64(d) - .expect("double to decimal conversion should succeed") - .normalized(), - ), - quaint::ValueType::Double(None) => PrismaValue::Null, - quaint::ValueType::Text(Some(s)) => PrismaValue::String(s.into_owned()), - quaint::ValueType::Text(None) => PrismaValue::Null, - quaint::ValueType::Enum(Some(e), _) => PrismaValue::Enum(e.into_owned()), - quaint::ValueType::Enum(None, _) => PrismaValue::Null, - quaint::ValueType::EnumArray(Some(es), _) => PrismaValue::List( - es.into_iter() - .map(|e| e.into_text()) - .map(quaint_value_to_prisma_value) - .collect(), - ), - quaint::ValueType::EnumArray(None, _) => PrismaValue::Null, - quaint::ValueType::Bytes(Some(b)) => PrismaValue::Bytes(b.into_owned()), - quaint::ValueType::Bytes(None) => PrismaValue::Null, - quaint::ValueType::Boolean(Some(b)) => PrismaValue::Boolean(b), - quaint::ValueType::Boolean(None) => PrismaValue::Null, - quaint::ValueType::Char(Some(c)) => PrismaValue::String(c.to_string()), - quaint::ValueType::Char(None) => PrismaValue::Null, - quaint::ValueType::Array(Some(a)) => { - PrismaValue::List(a.into_iter().map(quaint_value_to_prisma_value).collect()) - } - quaint::ValueType::Array(None) => PrismaValue::Null, - quaint::ValueType::Numeric(Some(bd)) => PrismaValue::Float(bd), - quaint::ValueType::Numeric(None) => PrismaValue::Null, - quaint::ValueType::Json(Some(j)) => PrismaValue::Json(j.to_string()), - quaint::ValueType::Json(None) => PrismaValue::Null, - quaint::ValueType::Xml(Some(x)) => PrismaValue::String(x.into_owned()), - quaint::ValueType::Xml(None) => PrismaValue::Null, - quaint::ValueType::Uuid(Some(u)) => PrismaValue::Uuid(u), - quaint::ValueType::Uuid(None) => PrismaValue::Null, - quaint::ValueType::DateTime(Some(dt)) => PrismaValue::DateTime(dt.into()), - quaint::ValueType::DateTime(None) => PrismaValue::Null, - quaint::ValueType::Date(Some(d)) => { - let dt = DateTime::::from_naive_utc_and_offset(d.and_hms_opt(0, 0, 0).unwrap(), Utc); - PrismaValue::DateTime(dt.into()) - } - quaint::ValueType::Date(None) => PrismaValue::Null, - quaint::ValueType::Time(Some(t)) => { - let d = NaiveDate::from_ymd_opt(1970, 1, 1).unwrap(); - let dt = DateTime::::from_naive_utc_and_offset(d.and_time(t), Utc); - PrismaValue::DateTime(dt.into()) - } - quaint::ValueType::Time(None) => PrismaValue::Null, - } -} diff --git a/query-engine/core/src/compiler/translate/query/convert.rs b/query-engine/core/src/compiler/translate/query/convert.rs new file mode 100644 index 000000000000..d4c678035fc0 --- /dev/null +++ b/query-engine/core/src/compiler/translate/query/convert.rs @@ -0,0 +1,66 @@ +use bigdecimal::{BigDecimal, FromPrimitive}; +use chrono::{DateTime, NaiveDate, Utc}; +use query_structure::PrismaValue; + +pub(crate) fn quaint_value_to_prisma_value(value: quaint::Value<'_>) -> PrismaValue { + match value.typed { + quaint::ValueType::Int32(Some(i)) => PrismaValue::Int(i.into()), + quaint::ValueType::Int32(None) => PrismaValue::Null, + quaint::ValueType::Int64(Some(i)) => PrismaValue::BigInt(i), + quaint::ValueType::Int64(None) => PrismaValue::Null, + quaint::ValueType::Float(Some(f)) => PrismaValue::Float( + BigDecimal::from_f32(f) + .expect("float to decimal conversion should succeed") + .normalized(), + ), + quaint::ValueType::Float(None) => PrismaValue::Null, + quaint::ValueType::Double(Some(d)) => PrismaValue::Float( + BigDecimal::from_f64(d) + .expect("double to decimal conversion should succeed") + .normalized(), + ), + quaint::ValueType::Double(None) => PrismaValue::Null, + quaint::ValueType::Text(Some(s)) => PrismaValue::String(s.into_owned()), + quaint::ValueType::Text(None) => PrismaValue::Null, + quaint::ValueType::Enum(Some(e), _) => PrismaValue::Enum(e.into_owned()), + quaint::ValueType::Enum(None, _) => PrismaValue::Null, + quaint::ValueType::EnumArray(Some(es), _) => PrismaValue::List( + es.into_iter() + .map(|e| e.into_text()) + .map(quaint_value_to_prisma_value) + .collect(), + ), + quaint::ValueType::EnumArray(None, _) => PrismaValue::Null, + quaint::ValueType::Bytes(Some(b)) => PrismaValue::Bytes(b.into_owned()), + quaint::ValueType::Bytes(None) => PrismaValue::Null, + quaint::ValueType::Boolean(Some(b)) => PrismaValue::Boolean(b), + quaint::ValueType::Boolean(None) => PrismaValue::Null, + quaint::ValueType::Char(Some(c)) => PrismaValue::String(c.to_string()), + quaint::ValueType::Char(None) => PrismaValue::Null, + quaint::ValueType::Array(Some(a)) => { + PrismaValue::List(a.into_iter().map(quaint_value_to_prisma_value).collect()) + } + quaint::ValueType::Array(None) => PrismaValue::Null, + quaint::ValueType::Numeric(Some(bd)) => PrismaValue::Float(bd), + quaint::ValueType::Numeric(None) => PrismaValue::Null, + quaint::ValueType::Json(Some(j)) => PrismaValue::Json(j.to_string()), + quaint::ValueType::Json(None) => PrismaValue::Null, + quaint::ValueType::Xml(Some(x)) => PrismaValue::String(x.into_owned()), + quaint::ValueType::Xml(None) => PrismaValue::Null, + quaint::ValueType::Uuid(Some(u)) => PrismaValue::Uuid(u), + quaint::ValueType::Uuid(None) => PrismaValue::Null, + quaint::ValueType::DateTime(Some(dt)) => PrismaValue::DateTime(dt.into()), + quaint::ValueType::DateTime(None) => PrismaValue::Null, + quaint::ValueType::Date(Some(d)) => { + let dt = DateTime::::from_naive_utc_and_offset(d.and_hms_opt(0, 0, 0).unwrap(), Utc); + PrismaValue::DateTime(dt.into()) + } + quaint::ValueType::Date(None) => PrismaValue::Null, + quaint::ValueType::Time(Some(t)) => { + let d = NaiveDate::from_ymd_opt(1970, 1, 1).unwrap(); + let dt = DateTime::::from_naive_utc_and_offset(d.and_time(t), Utc); + PrismaValue::DateTime(dt.into()) + } + quaint::ValueType::Time(None) => PrismaValue::Null, + } +} diff --git a/query-engine/core/src/compiler/translate/query/read.rs b/query-engine/core/src/compiler/translate/query/read.rs new file mode 100644 index 000000000000..cabf9421e250 --- /dev/null +++ b/query-engine/core/src/compiler/translate/query/read.rs @@ -0,0 +1,74 @@ +use query_structure::ModelProjection; +use sql_query_connector::{ + context::Context, model_extensions::AsColumns, query_arguments_ext::QueryArgumentsExt, query_builder, +}; + +use crate::{ + compiler::{expression::Expression, translate::TranslateResult}, + ReadQuery, RelatedRecordsQuery, +}; + +use super::build_db_query; + +pub(crate) fn translate_read_query(query: ReadQuery, ctx: &Context<'_>) -> TranslateResult { + Ok(match query { + ReadQuery::RecordQuery(rq) => { + let selected_fields = rq.selected_fields.without_relations().into_virtuals_last(); + + let query = query_builder::read::get_records( + &rq.model, + ModelProjection::from(&selected_fields) + .as_columns(ctx) + .mark_all_selected(), + selected_fields.virtuals(), + rq.filter.expect("ReadOne query should always have filter set"), + ctx, + ) + .limit(1); + + Expression::Query(build_db_query(query)?) + } + + ReadQuery::ManyRecordsQuery(mrq) => { + let selected_fields = mrq.selected_fields.without_relations().into_virtuals_last(); + let needs_reversed_order = mrq.args.needs_reversed_order(); + + // TODO: we ignore chunking for now + let query = query_builder::read::get_records( + &mrq.model, + ModelProjection::from(&selected_fields) + .as_columns(ctx) + .mark_all_selected(), + selected_fields.virtuals(), + mrq.args, + ctx, + ); + + let expr = Expression::Query(build_db_query(query)?); + + if needs_reversed_order { + Expression::Reverse(Box::new(expr)) + } else { + expr + } + } + + ReadQuery::RelatedRecordsQuery(rrq) => { + if rrq.parent_field.relation().is_many_to_many() { + build_read_m2m_query(rrq, ctx)? + } else { + build_read_one2m_query(rrq, ctx)? + } + } + + _ => unimplemented!(), + }) +} + +fn build_read_m2m_query(query: RelatedRecordsQuery, ctx: &Context<'_>) -> TranslateResult { + todo!() +} + +fn build_read_one2m_query(query: RelatedRecordsQuery, ctx: &Context<'_>) -> TranslateResult { + todo!() +} diff --git a/query-engine/core/src/compiler/translate/query/write.rs b/query-engine/core/src/compiler/translate/query/write.rs new file mode 100644 index 000000000000..a3a39f2372da --- /dev/null +++ b/query-engine/core/src/compiler/translate/query/write.rs @@ -0,0 +1,57 @@ +use query_structure::ModelProjection; +use sql_query_connector::{context::Context, generate_insert_statements, query_builder}; + +use crate::{ + compiler::{expression::Expression, translate::TranslateResult}, + WriteQuery, +}; + +use super::build_db_query; + +pub(crate) fn translate_write_query(query: WriteQuery, ctx: &Context<'_>) -> TranslateResult { + Ok(match query { + WriteQuery::CreateRecord(cr) => { + // TODO: MySQL needs additional logic to generate IDs on our side. + // See sql_query_connector::database::operations::write::create_record + let query = query_builder::write::create_record( + &cr.model, + cr.args, + &ModelProjection::from(&cr.selected_fields), + ctx, + ); + + // TODO: we probably need some additional node type or extra info in the WriteQuery node + // to help the client executor figure out the returned ID in the case when it's inferred + // from the query arguments. + Expression::Execute(build_db_query(query)?) + } + + WriteQuery::CreateManyRecords(cmr) => { + if let Some(selected_fields) = cmr.selected_fields { + Expression::Concat( + generate_insert_statements( + &cmr.model, + cmr.args, + cmr.skip_duplicates, + Some(&selected_fields.fields.into()), + ctx, + ) + .into_iter() + .map(build_db_query) + .map(|maybe_db_query| maybe_db_query.map(Expression::Execute)) + .collect::>>()?, + ) + } else { + Expression::Sum( + generate_insert_statements(&cmr.model, cmr.args, cmr.skip_duplicates, None, ctx) + .into_iter() + .map(build_db_query) + .map(|maybe_db_query| maybe_db_query.map(Expression::Execute)) + .collect::>>()?, + ) + } + } + + _ => todo!(), + }) +} From ff2390e136353ca823901db9dd62d6ab6b414d6a Mon Sep 17 00:00:00 2001 From: Alexey Orlenko Date: Sat, 26 Oct 2024 03:23:55 +0200 Subject: [PATCH 09/65] Implement placeholders --- libs/prisma-value/src/lib.rs | 73 ++++++++++++- quaint/src/ast.rs | 2 +- quaint/src/ast/values.rs | 66 +++++++++++ quaint/src/connector/column_type.rs | 57 +++++++--- .../src/connector/mssql/native/conversion.rs | 11 +- .../src/connector/mysql/native/conversion.rs | 4 + .../connector/postgres/native/conversion.rs | 103 ++++++++++++------ .../src/connector/sqlite/native/conversion.rs | 4 + quaint/src/error/mod.rs | 6 + quaint/src/visitor/mssql.rs | 4 + quaint/src/visitor/mysql.rs | 4 + quaint/src/visitor/postgres.rs | 5 + quaint/src/visitor/sqlite.rs | 4 + .../sql-query-connector/src/error.rs | 2 + .../src/model_extensions/scalar_field.rs | 37 ++++++- .../sql-query-connector/src/ser_raw.rs | 1 + .../sql-query-connector/src/value.rs | 28 +++++ .../src/compiler/translate/query/convert.rs | 30 ++++- query-engine/core/src/constants.rs | 1 + .../core/src/query_document/parser.rs | 5 + .../src/ast_builders/datamodel_ast_builder.rs | 1 + .../src/conversion/js_arg_type.rs | 1 + .../query-engine/examples/compiler.rs | 14 ++- .../src/protocols/json/protocol_adapter.rs | 64 ++++++----- 24 files changed, 440 insertions(+), 87 deletions(-) diff --git a/libs/prisma-value/src/lib.rs b/libs/prisma-value/src/lib.rs index f6dcede81da7..3551a45cdcdb 100644 --- a/libs/prisma-value/src/lib.rs +++ b/libs/prisma-value/src/lib.rs @@ -47,10 +47,45 @@ pub enum PrismaValue { #[serde(serialize_with = "serialize_bytes")] Bytes(Vec), - // Placeholder { - // name: String, - // r#type: Type, - // } + + #[serde(serialize_with = "serialize_placeholder")] + Placeholder { + name: String, + r#type: PlaceholderType, + }, +} + +#[derive(Debug, Clone, Eq, PartialEq, Hash, Serialize, Deserialize, PartialOrd, Ord)] +pub enum PlaceholderType { + Any, + String, + Int, + BigInt, + Float, + Boolean, + Decimal, + Date, + Array(Box), + Object, + Bytes, +} + +impl std::fmt::Display for PlaceholderType { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + PlaceholderType::Any => write!(f, "Any"), + PlaceholderType::String => write!(f, "String"), + PlaceholderType::Int => write!(f, "Int"), + PlaceholderType::BigInt => write!(f, "BigInt"), + PlaceholderType::Float => write!(f, "Float"), + PlaceholderType::Boolean => write!(f, "Boolean"), + PlaceholderType::Decimal => write!(f, "Decimal"), + PlaceholderType::Date => write!(f, "Date"), + PlaceholderType::Array(t) => write!(f, "Array<{}>", t), + PlaceholderType::Object => write!(f, "Object"), + PlaceholderType::Bytes => write!(f, "Bytes"), + } + } } /// Stringify a date to the following format @@ -111,6 +146,7 @@ impl TryFrom for PrismaValue { Ok(PrismaValue::DateTime(date)) } + Some("bigint") => { let value = obj .get("prisma__value") @@ -121,6 +157,7 @@ impl TryFrom for PrismaValue { .map(PrismaValue::BigInt) .map_err(|_| ConversionFailure::new("JSON bigint value", "PrismaValue")) } + Some("decimal") => { let value = obj .get("prisma__value") @@ -131,6 +168,7 @@ impl TryFrom for PrismaValue { .map(PrismaValue::Float) .map_err(|_| ConversionFailure::new("JSON decimal value", "PrismaValue")) } + Some("bytes") => { let value = obj .get("prisma__value") @@ -140,6 +178,19 @@ impl TryFrom for PrismaValue { decode_bytes(value).map(PrismaValue::Bytes) } + Some("param") => { + let name = obj + .get("prisma__value") + .and_then(|v| v.as_str()) + .ok_or_else(|| ConversionFailure::new("JSON param value", "PrismaValue"))? + .to_owned(); + + Ok(PrismaValue::Placeholder { + name, + r#type: PlaceholderType::Any, + }) + } + _ => Ok(PrismaValue::Json(serde_json::to_string(&obj).unwrap())), }, } @@ -201,6 +252,19 @@ where map.end() } +fn serialize_placeholder(name: &str, r#type: &PlaceholderType, serializer: S) -> Result +where + S: Serializer, +{ + let mut map = serializer.serialize_map(Some(3))?; + + map.serialize_entry("prisma__type", "param")?; + map.serialize_entry("prisma__value", name)?; + map.serialize_entry("prisma__paramType", r#type)?; + + map.end() +} + struct BigDecimalVisitor; impl serde::de::Visitor<'_> for BigDecimalVisitor { @@ -349,6 +413,7 @@ impl fmt::Display for PrismaValue { write!(f, "{{ {joined} }}") } + PrismaValue::Placeholder { name, r#type } => write!(f, "var({name}: {type})"), } } } diff --git a/quaint/src/ast.rs b/quaint/src/ast.rs index 66d37a5754a7..50aa38cc4d1f 100644 --- a/quaint/src/ast.rs +++ b/quaint/src/ast.rs @@ -53,5 +53,5 @@ pub use select::{DistinctType, Select}; pub use table::*; pub use union::Union; pub use update::*; -pub use values::{IntoRaw, Raw, Value, ValueType, Values}; +pub use values::{IntoRaw, Raw, Value, ValueType, Values, VarType}; pub(crate) use values::{NativeColumnType, Params}; diff --git a/quaint/src/ast/values.rs b/quaint/src/ast/values.rs index 008191150618..256479fce6c1 100644 --- a/quaint/src/ast/values.rs +++ b/quaint/src/ast/values.rs @@ -225,6 +225,11 @@ impl<'a> Value<'a> { ValueType::xml(value).into_value() } + /// Creates a new variable. + pub fn var(name: impl Into>, ty: VarType) -> Self { + ValueType::var(name, ty).into_value() + } + /// `true` if the `Value` is null. pub fn is_null(&self) -> bool { self.typed.is_null() @@ -553,6 +558,59 @@ pub enum ValueType<'a> { Date(Option), /// A time value. Time(Option), + /// A variable that doesn't have a value assigned yet. + Var(Cow<'a, str>, VarType), +} + +#[derive(Debug, Clone, PartialEq)] +pub enum VarType { + Unknown, + Int32, + Int64, + Float, + Double, + Text, + Enum, + Bytes, + Boolean, + Char, + Array(Box), + Numeric, + Json, + Xml, + Uuid, + DateTime, + Date, + Time, +} + +impl fmt::Display for VarType { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + VarType::Unknown => write!(f, "Unknown"), + VarType::Int32 => write!(f, "Int32"), + VarType::Int64 => write!(f, "Int64"), + VarType::Float => write!(f, "Float"), + VarType::Double => write!(f, "Double"), + VarType::Text => write!(f, "Text"), + VarType::Enum => write!(f, "Enum"), + VarType::Bytes => write!(f, "Bytes"), + VarType::Boolean => write!(f, "Boolean"), + VarType::Char => write!(f, "Char"), + VarType::Array(t) => { + write!(f, "Array<")?; + t.fmt(f)?; + write!(f, ">") + } + VarType::Numeric => write!(f, "Numeric"), + VarType::Json => write!(f, "Json"), + VarType::Xml => write!(f, "Xml"), + VarType::Uuid => write!(f, "Uuid"), + VarType::DateTime => write!(f, "DateTime"), + VarType::Date => write!(f, "Date"), + VarType::Time => write!(f, "Time"), + } + } } pub(crate) struct Params<'a>(pub(crate) &'a [Value<'a>]); @@ -619,6 +677,7 @@ impl fmt::Display for ValueType<'_> { ValueType::DateTime(val) => val.map(|v| write!(f, "\"{v}\"")), ValueType::Date(val) => val.map(|v| write!(f, "\"{v}\"")), ValueType::Time(val) => val.map(|v| write!(f, "\"{v}\"")), + ValueType::Var(name, ty) => Some(write!(f, "${name} as {ty}")), }; match res { @@ -677,6 +736,7 @@ impl<'a> From> for serde_json::Value { ValueType::DateTime(dt) => dt.map(|dt| serde_json::Value::String(dt.to_rfc3339())), ValueType::Date(date) => date.map(|date| serde_json::Value::String(format!("{date}"))), ValueType::Time(time) => time.map(|time| serde_json::Value::String(format!("{time}"))), + ValueType::Var(_, _) => todo!(), }; match res { @@ -830,6 +890,11 @@ impl<'a> ValueType<'a> { Self::Xml(Some(value.into())) } + /// Creates a new variable. + pub fn var(name: impl Into>, ty: VarType) -> Self { + Self::Var(name.into(), ty) + } + /// `true` if the `Value` is null. pub fn is_null(&self) -> bool { match self { @@ -851,6 +916,7 @@ impl<'a> ValueType<'a> { Self::Date(d) => d.is_none(), Self::Time(t) => t.is_none(), Self::Json(json) => json.is_none(), + Self::Var(_, _) => false, } } diff --git a/quaint/src/connector/column_type.rs b/quaint/src/connector/column_type.rs index 38fb3d786dc0..d8cd9d46d19f 100644 --- a/quaint/src/connector/column_type.rs +++ b/quaint/src/connector/column_type.rs @@ -1,7 +1,7 @@ #[cfg(not(target_arch = "wasm32"))] use super::TypeIdentifier; -use crate::{Value, ValueType}; +use crate::{ast::VarType, Value, ValueType}; #[derive(Debug, Clone, Copy, PartialEq, Eq)] pub enum ColumnType { @@ -99,23 +99,24 @@ impl From<&Value<'_>> for ColumnType { impl From<&ValueType<'_>> for ColumnType { fn from(value: &ValueType) -> Self { match value { - ValueType::Int32(_) => ColumnType::Int32, - ValueType::Int64(_) => ColumnType::Int64, - ValueType::Float(_) => ColumnType::Float, - ValueType::Double(_) => ColumnType::Double, - ValueType::Text(_) => ColumnType::Text, - ValueType::Enum(_, _) => ColumnType::Enum, + ValueType::Int32(_) | ValueType::Var(_, VarType::Int32) => ColumnType::Int32, + ValueType::Int64(_) | ValueType::Var(_, VarType::Int64) => ColumnType::Int64, + ValueType::Float(_) | ValueType::Var(_, VarType::Float) => ColumnType::Float, + ValueType::Double(_) | ValueType::Var(_, VarType::Double) => ColumnType::Double, + ValueType::Text(_) | ValueType::Var(_, VarType::Text) => ColumnType::Text, + ValueType::Enum(_, _) | ValueType::Var(_, VarType::Enum) => ColumnType::Enum, ValueType::EnumArray(_, _) => ColumnType::TextArray, - ValueType::Bytes(_) => ColumnType::Bytes, - ValueType::Boolean(_) => ColumnType::Boolean, - ValueType::Char(_) => ColumnType::Char, - ValueType::Numeric(_) => ColumnType::Numeric, - ValueType::Json(_) => ColumnType::Json, - ValueType::Xml(_) => ColumnType::Xml, - ValueType::Uuid(_) => ColumnType::Uuid, - ValueType::DateTime(_) => ColumnType::DateTime, - ValueType::Date(_) => ColumnType::Date, - ValueType::Time(_) => ColumnType::Time, + ValueType::Var(_, VarType::Array(vt)) if **vt == VarType::Enum => ColumnType::TextArray, + ValueType::Bytes(_) | ValueType::Var(_, VarType::Bytes) => ColumnType::Bytes, + ValueType::Boolean(_) | ValueType::Var(_, VarType::Boolean) => ColumnType::Boolean, + ValueType::Char(_) | ValueType::Var(_, VarType::Char) => ColumnType::Char, + ValueType::Numeric(_) | ValueType::Var(_, VarType::Numeric) => ColumnType::Numeric, + ValueType::Json(_) | ValueType::Var(_, VarType::Json) => ColumnType::Json, + ValueType::Xml(_) | ValueType::Var(_, VarType::Xml) => ColumnType::Xml, + ValueType::Uuid(_) | ValueType::Var(_, VarType::Uuid) => ColumnType::Uuid, + ValueType::DateTime(_) | ValueType::Var(_, VarType::DateTime) => ColumnType::DateTime, + ValueType::Date(_) | ValueType::Var(_, VarType::Date) => ColumnType::Date, + ValueType::Time(_) | ValueType::Var(_, VarType::Time) => ColumnType::Time, ValueType::Array(Some(vals)) if !vals.is_empty() => match &vals[0].typed { ValueType::Int32(_) => ColumnType::Int32Array, ValueType::Int64(_) => ColumnType::Int64Array, @@ -135,8 +136,30 @@ impl From<&ValueType<'_>> for ColumnType { ValueType::Time(_) => ColumnType::TimeArray, ValueType::Array(_) => ColumnType::Unknown, ValueType::EnumArray(_, _) => ColumnType::Unknown, + ValueType::Var(_, _) => ColumnType::Unknown, }, ValueType::Array(_) => ColumnType::Unknown, + ValueType::Var(_, VarType::Unknown) => ColumnType::Unknown, + ValueType::Var(_, VarType::Array(vt)) => match **vt { + VarType::Int32 => ColumnType::Int32Array, + VarType::Int64 => ColumnType::Int64Array, + VarType::Float => ColumnType::FloatArray, + VarType::Double => ColumnType::DoubleArray, + VarType::Text => ColumnType::TextArray, + VarType::Enum => ColumnType::TextArray, + VarType::Bytes => ColumnType::BytesArray, + VarType::Boolean => ColumnType::BooleanArray, + VarType::Char => ColumnType::CharArray, + VarType::Numeric => ColumnType::NumericArray, + VarType::Json => ColumnType::JsonArray, + VarType::Xml => ColumnType::TextArray, + VarType::Uuid => ColumnType::UuidArray, + VarType::DateTime => ColumnType::DateTimeArray, + VarType::Date => ColumnType::DateArray, + VarType::Time => ColumnType::TimeArray, + VarType::Unknown => ColumnType::Unknown, + VarType::Array(_) => ColumnType::Unknown, + }, } } } diff --git a/quaint/src/connector/mssql/native/conversion.rs b/quaint/src/connector/mssql/native/conversion.rs index 5d2eb2eb08b8..1ab099946252 100644 --- a/quaint/src/connector/mssql/native/conversion.rs +++ b/quaint/src/connector/mssql/native/conversion.rs @@ -1,4 +1,7 @@ -use crate::ast::{Value, ValueType}; +use crate::{ + ast::{Value, ValueType}, + error::{Error, ErrorKind}, +}; use bigdecimal::BigDecimal; use std::{borrow::Cow, convert::TryFrom}; @@ -25,6 +28,12 @@ impl<'a> IntoSql<'a> for &'a Value<'a> { ValueType::DateTime(val) => val.into_sql(), ValueType::Date(val) => val.into_sql(), ValueType::Time(val) => val.into_sql(), + ValueType::Var(name, _) => { + panic!( + "conversion error: {:?}", + Error::builder(ErrorKind::RanQueryWithVarParam(name.clone().into_owned())).build() + ) + } } } } diff --git a/quaint/src/connector/mysql/native/conversion.rs b/quaint/src/connector/mysql/native/conversion.rs index 1a2d065f03af..540ffb8b4df0 100644 --- a/quaint/src/connector/mysql/native/conversion.rs +++ b/quaint/src/connector/mysql/native/conversion.rs @@ -68,6 +68,10 @@ pub fn conv_params(params: &[Value<'_>]) -> crate::Result { dt.timestamp_subsec_micros(), ) }), + + ValueType::Var(name, _) => { + Err(Error::builder(ErrorKind::RanQueryWithVarParam(name.clone().into_owned())).build())? + } }; match res { diff --git a/quaint/src/connector/postgres/native/conversion.rs b/quaint/src/connector/postgres/native/conversion.rs index a55e6490bd86..c67cc8384b63 100644 --- a/quaint/src/connector/postgres/native/conversion.rs +++ b/quaint/src/connector/postgres/native/conversion.rs @@ -1,7 +1,7 @@ mod decimal; use crate::{ - ast::{Value, ValueType}, + ast::{Value, ValueType, VarType}, connector::queryable::{GetRow, ToColumnNames}, error::{Error, ErrorKind}, prelude::EnumVariant, @@ -40,23 +40,27 @@ pub(crate) fn params_to_types(params: &[Value<'_>]) -> Vec { } match &p.typed { - ValueType::Int32(_) => PostgresType::INT4, - ValueType::Int64(_) => PostgresType::INT8, - ValueType::Float(_) => PostgresType::FLOAT4, - ValueType::Double(_) => PostgresType::FLOAT8, - ValueType::Text(_) => PostgresType::TEXT, + ValueType::Int32(_) | ValueType::Var(_, VarType::Int32) => PostgresType::INT4, + ValueType::Int64(_) | ValueType::Var(_, VarType::Int64) => PostgresType::INT8, + ValueType::Float(_) | ValueType::Var(_, VarType::Float) => PostgresType::FLOAT4, + ValueType::Double(_) | ValueType::Var(_, VarType::Double) => PostgresType::FLOAT8, + ValueType::Text(_) | ValueType::Var(_, VarType::Text) => PostgresType::TEXT, // Enums are user-defined types, we can't statically infer them, so we let PG infer it - ValueType::Enum(_, _) | ValueType::EnumArray(_, _) => PostgresType::UNKNOWN, - ValueType::Bytes(_) => PostgresType::BYTEA, - ValueType::Boolean(_) => PostgresType::BOOL, - ValueType::Char(_) => PostgresType::CHAR, - ValueType::Numeric(_) => PostgresType::NUMERIC, - ValueType::Json(_) => PostgresType::JSONB, - ValueType::Xml(_) => PostgresType::XML, - ValueType::Uuid(_) => PostgresType::UUID, - ValueType::DateTime(_) => PostgresType::TIMESTAMPTZ, - ValueType::Date(_) => PostgresType::TIMESTAMP, - ValueType::Time(_) => PostgresType::TIME, + ValueType::Enum(_, _) | ValueType::EnumArray(_, _) | ValueType::Var(_, VarType::Enum) => { + PostgresType::UNKNOWN + } + ValueType::Bytes(_) | ValueType::Var(_, VarType::Bytes) => PostgresType::BYTEA, + ValueType::Boolean(_) | ValueType::Var(_, VarType::Boolean) => PostgresType::BOOL, + ValueType::Char(_) | ValueType::Var(_, VarType::Char) => PostgresType::CHAR, + ValueType::Numeric(_) | ValueType::Var(_, VarType::Numeric) => PostgresType::NUMERIC, + ValueType::Json(_) | ValueType::Var(_, VarType::Json) => PostgresType::JSONB, + ValueType::Xml(_) | ValueType::Var(_, VarType::Xml) => PostgresType::XML, + ValueType::Uuid(_) | ValueType::Var(_, VarType::Uuid) => PostgresType::UUID, + ValueType::DateTime(_) | ValueType::Var(_, VarType::DateTime) => PostgresType::TIMESTAMPTZ, + ValueType::Date(_) | ValueType::Var(_, VarType::Date) => PostgresType::TIMESTAMP, + ValueType::Time(_) | ValueType::Var(_, VarType::Time) => PostgresType::TIME, + ValueType::Var(_, VarType::Unknown) => PostgresType::UNKNOWN, + ValueType::Array(ref arr) => { let arr = arr.as_ref().unwrap(); @@ -76,27 +80,53 @@ pub(crate) fn params_to_types(params: &[Value<'_>]) -> Vec { } match first.typed { - ValueType::Int32(_) => PostgresType::INT4_ARRAY, - ValueType::Int64(_) => PostgresType::INT8_ARRAY, - ValueType::Float(_) => PostgresType::FLOAT4_ARRAY, - ValueType::Double(_) => PostgresType::FLOAT8_ARRAY, - ValueType::Text(_) => PostgresType::TEXT_ARRAY, + ValueType::Int32(_) | ValueType::Var(_, VarType::Int32) => PostgresType::INT4_ARRAY, + ValueType::Int64(_) | ValueType::Var(_, VarType::Int64) => PostgresType::INT8_ARRAY, + ValueType::Float(_) | ValueType::Var(_, VarType::Float) => PostgresType::FLOAT4_ARRAY, + ValueType::Double(_) | ValueType::Var(_, VarType::Double) => PostgresType::FLOAT8_ARRAY, + ValueType::Text(_) | ValueType::Var(_, VarType::Text) => PostgresType::TEXT_ARRAY, // Enums are special types, we can't statically infer them, so we let PG infer it - ValueType::Enum(_, _) | ValueType::EnumArray(_, _) => PostgresType::UNKNOWN, - ValueType::Bytes(_) => PostgresType::BYTEA_ARRAY, - ValueType::Boolean(_) => PostgresType::BOOL_ARRAY, - ValueType::Char(_) => PostgresType::CHAR_ARRAY, - ValueType::Numeric(_) => PostgresType::NUMERIC_ARRAY, - ValueType::Json(_) => PostgresType::JSONB_ARRAY, - ValueType::Xml(_) => PostgresType::XML_ARRAY, - ValueType::Uuid(_) => PostgresType::UUID_ARRAY, - ValueType::DateTime(_) => PostgresType::TIMESTAMPTZ_ARRAY, - ValueType::Date(_) => PostgresType::TIMESTAMP_ARRAY, - ValueType::Time(_) => PostgresType::TIME_ARRAY, + ValueType::Enum(_, _) | ValueType::EnumArray(_, _) | ValueType::Var(_, VarType::Enum) => { + PostgresType::UNKNOWN + } + ValueType::Bytes(_) | ValueType::Var(_, VarType::Bytes) => PostgresType::BYTEA_ARRAY, + ValueType::Boolean(_) | ValueType::Var(_, VarType::Boolean) => PostgresType::BOOL_ARRAY, + ValueType::Char(_) | ValueType::Var(_, VarType::Char) => PostgresType::CHAR_ARRAY, + ValueType::Numeric(_) | ValueType::Var(_, VarType::Numeric) => PostgresType::NUMERIC_ARRAY, + ValueType::Json(_) | ValueType::Var(_, VarType::Json) => PostgresType::JSONB_ARRAY, + ValueType::Xml(_) | ValueType::Var(_, VarType::Xml) => PostgresType::XML_ARRAY, + ValueType::Uuid(_) | ValueType::Var(_, VarType::Uuid) => PostgresType::UUID_ARRAY, + ValueType::DateTime(_) | ValueType::Var(_, VarType::DateTime) => { + PostgresType::TIMESTAMPTZ_ARRAY + } + ValueType::Date(_) | ValueType::Var(_, VarType::Date) => PostgresType::TIMESTAMP_ARRAY, + ValueType::Time(_) | ValueType::Var(_, VarType::Time) => PostgresType::TIME_ARRAY, // In the case of nested arrays, we let PG infer the type - ValueType::Array(_) => PostgresType::UNKNOWN, + ValueType::Array(_) | ValueType::Var(_, VarType::Array(_)) => PostgresType::UNKNOWN, + ValueType::Var(_, VarType::Unknown) => PostgresType::UNKNOWN, } } + + ValueType::Var(_, VarType::Array(t)) => match &**t { + VarType::Unknown => PostgresType::UNKNOWN, + VarType::Int32 => PostgresType::INT4_ARRAY, + VarType::Int64 => PostgresType::INT8_ARRAY, + VarType::Float => PostgresType::FLOAT4_ARRAY, + VarType::Double => PostgresType::FLOAT8_ARRAY, + VarType::Text => PostgresType::TEXT_ARRAY, + VarType::Enum => PostgresType::UNKNOWN, + VarType::Bytes => PostgresType::BYTEA_ARRAY, + VarType::Boolean => PostgresType::BOOL_ARRAY, + VarType::Char => PostgresType::CHAR_ARRAY, + VarType::Array(_) => PostgresType::UNKNOWN, + VarType::Numeric => PostgresType::NUMERIC_ARRAY, + VarType::Json => PostgresType::JSONB_ARRAY, + VarType::Xml => PostgresType::XML_ARRAY, + VarType::Uuid => PostgresType::UUID_ARRAY, + VarType::DateTime => PostgresType::TIMESTAMPTZ_ARRAY, + VarType::Date => PostgresType::TIMESTAMP_ARRAY, + VarType::Time => PostgresType::TIME_ARRAY, + }, } }) .collect() @@ -975,6 +1005,11 @@ impl ToSql for Value<'_> { Ok(result) }), (ValueType::DateTime(value), _) => value.map(|value| value.naive_utc().to_sql(ty, out)), + (ValueType::Var(name, _), _) => { + let error: Box = + Box::new(Error::builder(ErrorKind::RanQueryWithVarParam(name.clone().into_owned())).build()); + Some(Err(error)) + } }; match res { diff --git a/quaint/src/connector/sqlite/native/conversion.rs b/quaint/src/connector/sqlite/native/conversion.rs index e24379a58aca..dd6316d5b327 100644 --- a/quaint/src/connector/sqlite/native/conversion.rs +++ b/quaint/src/connector/sqlite/native/conversion.rs @@ -307,6 +307,10 @@ impl ToSql for Value<'_> { date.and_hms_opt(time.hour(), time.minute(), time.second()) }) .map(|dt| ToSqlOutput::from(dt.and_utc().timestamp_millis())), + + ValueType::Var(name, _) => Err(RusqlError::ToSqlConversionFailure(Box::new( + Error::builder(ErrorKind::RanQueryWithVarParam(name.clone().into_owned())).build(), + )))?, }; match value { diff --git a/quaint/src/error/mod.rs b/quaint/src/error/mod.rs index 661eb4d344ff..67f7e62650f8 100644 --- a/quaint/src/error/mod.rs +++ b/quaint/src/error/mod.rs @@ -241,6 +241,12 @@ pub enum ErrorKind { #[error("External error id#{}", _0)] ExternalError(i32), + + #[error("Variable '{0}' used as raw value in query. Variables must be used as parameters.")] + VarAsRawValue(String), + + #[error("Attempted to execute a query that contains unbound variable '{0}' in parameters.")] + RanQueryWithVarParam(String), } #[cfg(not(target_arch = "wasm32"))] diff --git a/quaint/src/visitor/mssql.rs b/quaint/src/visitor/mssql.rs index a3887b4cfaee..56d81232a1e3 100644 --- a/quaint/src/visitor/mssql.rs +++ b/quaint/src/visitor/mssql.rs @@ -402,6 +402,10 @@ impl<'a> Visitor<'a> for Mssql<'a> { // Style 3 is keep all whitespace + internal DTD processing: // https://docs.microsoft.com/en-us/sql/t-sql/functions/cast-and-convert-transact-sql?redirectedfrom=MSDN&view=sql-server-ver15#xml-styles ValueType::Xml(cow) => cow.map(|cow| self.write(format!("CONVERT(XML, N'{cow}', 3)"))), + + ValueType::Var(name, _) => Some(Err( + Error::builder(ErrorKind::VarAsRawValue(name.clone().into_owned())).build() + )), }; match res { diff --git a/quaint/src/visitor/mysql.rs b/quaint/src/visitor/mysql.rs index 63ed01612e0a..2d207ea4d70f 100644 --- a/quaint/src/visitor/mysql.rs +++ b/quaint/src/visitor/mysql.rs @@ -198,6 +198,10 @@ impl<'a> Visitor<'a> for Mysql<'a> { ValueType::Date(date) => date.map(|date| self.write(format!("'{date}'"))), ValueType::Time(time) => time.map(|time| self.write(format!("'{time}'"))), ValueType::Xml(cow) => cow.as_ref().map(|cow| self.write(format!("'{cow}'"))), + + ValueType::Var(name, _) => Some(Err( + Error::builder(ErrorKind::VarAsRawValue(name.clone().into_owned())).build() + )), }; match res { diff --git a/quaint/src/visitor/postgres.rs b/quaint/src/visitor/postgres.rs index d347402f73f2..b6db70b3da4d 100644 --- a/quaint/src/visitor/postgres.rs +++ b/quaint/src/visitor/postgres.rs @@ -1,5 +1,6 @@ use crate::{ ast::*, + error::{Error, ErrorKind}, visitor::{self, Visitor}, }; use itertools::Itertools; @@ -262,6 +263,10 @@ impl<'a> Visitor<'a> for Postgres<'a> { ValueType::DateTime(dt) => dt.map(|dt| self.write(format!("'{}'", dt.to_rfc3339(),))), ValueType::Date(date) => date.map(|date| self.write(format!("'{date}'"))), ValueType::Time(time) => time.map(|time| self.write(format!("'{time}'"))), + + ValueType::Var(name, _) => Some(Err( + Error::builder(ErrorKind::VarAsRawValue(name.clone().into_owned())).build() + )), }; match res { diff --git a/quaint/src/visitor/sqlite.rs b/quaint/src/visitor/sqlite.rs index 06d7c86f9876..aed9b96a2fa4 100644 --- a/quaint/src/visitor/sqlite.rs +++ b/quaint/src/visitor/sqlite.rs @@ -145,6 +145,10 @@ impl<'a> Visitor<'a> for Sqlite<'a> { ValueType::Date(date) => date.map(|date| self.write(format!("'{date}'"))), ValueType::Time(time) => time.map(|time| self.write(format!("'{time}'"))), ValueType::Xml(cow) => cow.as_ref().map(|cow| self.write(format!("'{cow}'"))), + + ValueType::Var(name, _) => Some(Err( + Error::builder(ErrorKind::VarAsRawValue(name.clone().into_owned())).build() + )), }; match res { diff --git a/query-engine/connectors/sql-query-connector/src/error.rs b/query-engine/connectors/sql-query-connector/src/error.rs index f3e76d84fd9e..970fb173804f 100644 --- a/query-engine/connectors/sql-query-connector/src/error.rs +++ b/query-engine/connectors/sql-query-connector/src/error.rs @@ -367,6 +367,8 @@ impl From for SqlError { e @ QuaintKind::DatabaseAlreadyExists { .. } => SqlError::ConnectionError(e), e @ QuaintKind::InvalidConnectionArguments => SqlError::ConnectionError(e), e @ QuaintKind::SocketTimeout => SqlError::ConnectionError(e), + e @ QuaintKind::VarAsRawValue { .. } => SqlError::ConversionError(e.into()), + e @ QuaintKind::RanQueryWithVarParam { .. } => SqlError::ConversionError(e.into()), } } } diff --git a/query-engine/connectors/sql-query-connector/src/model_extensions/scalar_field.rs b/query-engine/connectors/sql-query-connector/src/model_extensions/scalar_field.rs index 826bc2f1d7e1..3d051b667f8a 100644 --- a/query-engine/connectors/sql-query-connector/src/model_extensions/scalar_field.rs +++ b/query-engine/connectors/sql-query-connector/src/model_extensions/scalar_field.rs @@ -1,8 +1,8 @@ use crate::context::Context; use chrono::Utc; -use prisma_value::PrismaValue; +use prisma_value::{PlaceholderType, PrismaValue}; use quaint::{ - ast::{EnumName, Value, ValueType}, + ast::{EnumName, Value, ValueType, VarType}, prelude::{EnumVariant, TypeDataLength, TypeFamily}, }; use query_structure::{ScalarField, TypeIdentifier}; @@ -74,7 +74,21 @@ impl ScalarFieldExt for ScalarField { TypeIdentifier::Int => Value::null_int32(), TypeIdentifier::BigInt => Value::null_int64(), TypeIdentifier::Bytes => Value::null_bytes(), - TypeIdentifier::Unsupported => unreachable!("No unsupported field should reach that path"), + TypeIdentifier::Unsupported => unreachable!("No unsupported field should reach this path"), + }, + (PrismaValue::Placeholder { name, .. }, ident) => match ident { + TypeIdentifier::String => Value::var(name, VarType::Text), + TypeIdentifier::Int => Value::var(name, VarType::Int32), + TypeIdentifier::BigInt => Value::var(name, VarType::Int64), + TypeIdentifier::Float => Value::var(name, VarType::Numeric), + TypeIdentifier::Decimal => Value::var(name, VarType::Numeric), + TypeIdentifier::Boolean => Value::var(name, VarType::Boolean), + TypeIdentifier::Enum(_) => Value::var(name, VarType::Enum), + TypeIdentifier::UUID => Value::var(name, VarType::Uuid), + TypeIdentifier::Json => Value::var(name, VarType::Json), + TypeIdentifier::DateTime => Value::var(name, VarType::DateTime), + TypeIdentifier::Bytes => Value::var(name, VarType::Bytes), + TypeIdentifier::Unsupported => unreachable!("No unsupported field should reach this path"), }, }; @@ -126,6 +140,23 @@ pub fn convert_lossy<'a>(pv: PrismaValue) -> Value<'a> { PrismaValue::Bytes(b) => Value::bytes(b), PrismaValue::Null => Value::null_int32(), // Can't tell which type the null is supposed to be. PrismaValue::Object(_) => unimplemented!(), + PrismaValue::Placeholder { name, r#type } => Value::var(name, convert_placeholder_type_to_var_type(&r#type)), + } +} + +fn convert_placeholder_type_to_var_type(pt: &PlaceholderType) -> VarType { + match pt { + PlaceholderType::Any => VarType::Unknown, + PlaceholderType::String => VarType::Text, + PlaceholderType::Int => VarType::Int32, + PlaceholderType::BigInt => VarType::Int64, + PlaceholderType::Float => VarType::Numeric, + PlaceholderType::Boolean => VarType::Boolean, + PlaceholderType::Decimal => VarType::Numeric, + PlaceholderType::Date => VarType::DateTime, + PlaceholderType::Array(t) => VarType::Array(Box::new(convert_placeholder_type_to_var_type(&*t))), + PlaceholderType::Object => VarType::Json, + PlaceholderType::Bytes => VarType::Bytes, } } diff --git a/query-engine/connectors/sql-query-connector/src/ser_raw.rs b/query-engine/connectors/sql-query-connector/src/ser_raw.rs index 87e0e84ae8f9..1bcd059e4c31 100644 --- a/query-engine/connectors/sql-query-connector/src/ser_raw.rs +++ b/query-engine/connectors/sql-query-connector/src/ser_raw.rs @@ -187,6 +187,7 @@ impl Serialize for SerializedValue<'_> { ValueType::DateTime(value) => value.map(|value| value.to_rfc3339()).serialize(serializer), ValueType::Date(value) => value.serialize(serializer), ValueType::Time(value) => value.serialize(serializer), + ValueType::Var(_, _) => unreachable!(), } } } diff --git a/query-engine/connectors/sql-query-connector/src/value.rs b/query-engine/connectors/sql-query-connector/src/value.rs index 2221925e8040..71bc7ec3dfb7 100644 --- a/query-engine/connectors/sql-query-connector/src/value.rs +++ b/query-engine/connectors/sql-query-connector/src/value.rs @@ -98,7 +98,35 @@ pub fn to_prisma_value<'a, T: Into>>(qv: T) -> crate::Result s .map(|s| PrismaValue::String(s.into_owned())) .unwrap_or(PrismaValue::Null), + + ValueType::Var(name, vt) => PrismaValue::Placeholder { + name: name.into_owned(), + r#type: var_type_to_prisma_type(&vt), + }, }; Ok(val) } + +fn var_type_to_prisma_type(vt: &quaint::ast::VarType) -> prisma_value::PlaceholderType { + match vt { + quaint::ast::VarType::Unknown => prisma_value::PlaceholderType::Any, + quaint::ast::VarType::Int32 => prisma_value::PlaceholderType::Int, + quaint::ast::VarType::Int64 => prisma_value::PlaceholderType::BigInt, + quaint::ast::VarType::Float => prisma_value::PlaceholderType::Float, + quaint::ast::VarType::Double => prisma_value::PlaceholderType::Float, + quaint::ast::VarType::Text => prisma_value::PlaceholderType::String, + quaint::ast::VarType::Enum => prisma_value::PlaceholderType::String, + quaint::ast::VarType::Bytes => prisma_value::PlaceholderType::Bytes, + quaint::ast::VarType::Boolean => prisma_value::PlaceholderType::Boolean, + quaint::ast::VarType::Char => prisma_value::PlaceholderType::String, + quaint::ast::VarType::Array(t) => prisma_value::PlaceholderType::Array(Box::new(var_type_to_prisma_type(&*t))), + quaint::ast::VarType::Numeric => prisma_value::PlaceholderType::Decimal, + quaint::ast::VarType::Json => prisma_value::PlaceholderType::Object, + quaint::ast::VarType::Xml => prisma_value::PlaceholderType::String, + quaint::ast::VarType::Uuid => prisma_value::PlaceholderType::String, + quaint::ast::VarType::DateTime => prisma_value::PlaceholderType::Date, + quaint::ast::VarType::Date => prisma_value::PlaceholderType::Date, + quaint::ast::VarType::Time => prisma_value::PlaceholderType::Date, + } +} diff --git a/query-engine/core/src/compiler/translate/query/convert.rs b/query-engine/core/src/compiler/translate/query/convert.rs index d4c678035fc0..393b1edaeb66 100644 --- a/query-engine/core/src/compiler/translate/query/convert.rs +++ b/query-engine/core/src/compiler/translate/query/convert.rs @@ -1,6 +1,7 @@ use bigdecimal::{BigDecimal, FromPrimitive}; use chrono::{DateTime, NaiveDate, Utc}; -use query_structure::PrismaValue; +use quaint::ast::VarType; +use query_structure::{PlaceholderType, PrismaValue}; pub(crate) fn quaint_value_to_prisma_value(value: quaint::Value<'_>) -> PrismaValue { match value.typed { @@ -62,5 +63,32 @@ pub(crate) fn quaint_value_to_prisma_value(value: quaint::Value<'_>) -> PrismaVa PrismaValue::DateTime(dt.into()) } quaint::ValueType::Time(None) => PrismaValue::Null, + quaint::ValueType::Var(name, vt) => PrismaValue::Placeholder { + name: name.into_owned(), + r#type: var_type_to_placeholder_type(&vt), + }, + } +} + +fn var_type_to_placeholder_type(vt: &VarType) -> PlaceholderType { + match vt { + VarType::Unknown => PlaceholderType::Any, + VarType::Int32 => PlaceholderType::Int, + VarType::Int64 => PlaceholderType::BigInt, + VarType::Float => PlaceholderType::Float, + VarType::Double => PlaceholderType::Float, + VarType::Text => PlaceholderType::String, + VarType::Enum => PlaceholderType::String, + VarType::Bytes => PlaceholderType::Bytes, + VarType::Boolean => PlaceholderType::Boolean, + VarType::Char => PlaceholderType::String, + VarType::Array(t) => PlaceholderType::Array(Box::new(var_type_to_placeholder_type(&*t))), + VarType::Numeric => PlaceholderType::Float, + VarType::Json => PlaceholderType::Object, + VarType::Xml => PlaceholderType::String, + VarType::Uuid => PlaceholderType::String, + VarType::DateTime => PlaceholderType::Date, + VarType::Date => PlaceholderType::Date, + VarType::Time => PlaceholderType::Date, } } diff --git a/query-engine/core/src/constants.rs b/query-engine/core/src/constants.rs index f6a9eb403646..2ec2a7680060 100644 --- a/query-engine/core/src/constants.rs +++ b/query-engine/core/src/constants.rs @@ -12,6 +12,7 @@ pub mod custom_types { pub const ENUM: &str = "Enum"; pub const FIELD_REF: &str = "FieldRef"; pub const RAW: &str = "Raw"; + pub const PARAM: &str = "Param"; pub fn make_object(typ: &str, value: PrismaValue) -> PrismaValue { PrismaValue::Object(vec![make_type_pair(typ), make_value_pair(value)]) diff --git a/query-engine/core/src/query_document/parser.rs b/query-engine/core/src/query_document/parser.rs index 0f512612144d..6ef92807cbf6 100644 --- a/query-engine/core/src/query_document/parser.rs +++ b/query-engine/core/src/query_document/parser.rs @@ -232,6 +232,10 @@ impl QueryDocumentParser { possible_input_types: &[InputType<'a>], query_schema: &'a QuerySchema, ) -> QueryParserResult> { + if let ArgumentValue::Scalar(pv @ PrismaValue::Placeholder { .. }) = &value { + return Ok(ParsedInputValue::Single(pv.clone())); + } + let mut failures = Vec::new(); macro_rules! try_this { @@ -908,6 +912,7 @@ pub(crate) mod conversions { PrismaValue::Float(_) => "Float".to_string(), PrismaValue::BigInt(_) => "BigInt".to_string(), PrismaValue::Bytes(_) => "Bytes".to_string(), + PrismaValue::Placeholder { r#type, .. } => r#type.to_string(), } } diff --git a/query-engine/dmmf/src/ast_builders/datamodel_ast_builder.rs b/query-engine/dmmf/src/ast_builders/datamodel_ast_builder.rs index 356e03038cfb..7e89a295eec8 100644 --- a/query-engine/dmmf/src/ast_builders/datamodel_ast_builder.rs +++ b/query-engine/dmmf/src/ast_builders/datamodel_ast_builder.rs @@ -338,6 +338,7 @@ fn prisma_value_to_serde(value: &PrismaValue) -> serde_json::Value { serde_json::Value::Object(map) } + PrismaValue::Placeholder { .. } => unreachable!(), } } diff --git a/query-engine/driver-adapters/src/conversion/js_arg_type.rs b/query-engine/driver-adapters/src/conversion/js_arg_type.rs index e1ea7c1c5754..63ffafd3b66b 100644 --- a/query-engine/driver-adapters/src/conversion/js_arg_type.rs +++ b/query-engine/driver-adapters/src/conversion/js_arg_type.rs @@ -89,5 +89,6 @@ pub fn value_to_js_arg_type(value: &quaint::Value) -> JSArgType { quaint::ValueType::DateTime(_) => JSArgType::DateTime, quaint::ValueType::Date(_) => JSArgType::Date, quaint::ValueType::Time(_) => JSArgType::Time, + quaint::ValueType::Var(_, _) => unreachable!(), } } diff --git a/query-engine/query-engine/examples/compiler.rs b/query-engine/query-engine/examples/compiler.rs index 2de007a8c77e..0c36b89877f8 100644 --- a/query-engine/query-engine/examples/compiler.rs +++ b/query-engine/query-engine/examples/compiler.rs @@ -19,7 +19,19 @@ pub fn main() -> anyhow::Result<()> { model_name: Some("User".into()), action: Action::new(QueryTag::FindMany), query: FieldQuery { - arguments: None, + arguments: Some({ + let mut map = IndexMap::new(); + map.insert( + "where".into(), + serde_json::json!({ + "email": { + "$type": "Param", + "value": "userEmail", + } + }), + ); + map + }), selection: SelectionSet::new({ let mut map = IndexMap::new(); map.insert("$scalars".into(), SelectionSetValue::Shorthand(true)); diff --git a/query-engine/request-handlers/src/protocols/json/protocol_adapter.rs b/query-engine/request-handlers/src/protocols/json/protocol_adapter.rs index 650f4e1a8bb9..e30077ecce44 100644 --- a/query-engine/request-handlers/src/protocols/json/protocol_adapter.rs +++ b/query-engine/request-handlers/src/protocols/json/protocol_adapter.rs @@ -6,7 +6,7 @@ use query_core::{ schema::{ObjectType, OutputField, QuerySchema}, ArgumentValue, Operation, Selection, }; -use query_structure::{decode_bytes, parse_datetime, prelude::ParentContainer, Field}; +use query_structure::{decode_bytes, parse_datetime, prelude::ParentContainer, Field, PlaceholderType, PrismaValue}; use serde_json::Value as JsonValue; use std::str::FromStr; @@ -243,6 +243,20 @@ impl<'a> JsonProtocolAdapter<'a> { Ok(ArgumentValue::FieldRef(values)) } + Some(custom_types::PARAM) => { + let name = obj + .get(custom_types::VALUE) + .and_then(|v| v.as_str()) + .ok_or_else(build_err)? + .to_owned(); + + let placeholder = PrismaValue::Placeholder { + name, + r#type: PlaceholderType::Any, + }; + + Ok(ArgumentValue::Scalar(placeholder)) + } _ => { let values = obj .into_iter() @@ -421,12 +435,12 @@ mod tests { generator client { provider = "prisma-client-js" } - + datasource db { provider = "mongodb" url = "mongodb://" } - + model User { id String @id @map("_id") name String? @@ -441,7 +455,7 @@ mod tests { model Post { id String @id @map("_id") title String - userId String + userId String user User @relation(fields: [userId], references: [id]) } @@ -1391,28 +1405,28 @@ mod tests { generator client { provider = "prisma-client-js" } - + datasource db { provider = "mongodb" url = "mongodb://" } - + model Comment { id String @id @default(auto()) @map("_id") @db.ObjectId - + country String? content CommentContent } - + type CommentContent { text String upvotes CommentContentUpvotes[] } - + type CommentContentUpvotes { vote Boolean userId String - } + } "#; let mut schema = psl::validate(schema_str.into()); @@ -1532,21 +1546,21 @@ mod tests { generator client { provider = "prisma-client-js" } - + datasource db { provider = "mongodb" url = "mongodb://" } - + model List { id String @id @default(auto()) @map("_id") @db.ObjectId head ListNode? } - + type ListNode { value Int - next ListNode? - } + next ListNode? + } "#; let mut schema = psl::validate(schema_str.into()); @@ -1586,24 +1600,24 @@ mod tests { generator client { provider = "prisma-client-js" } - + datasource db { provider = "mongodb" url = "mongodb://" } - + model User { id String @id @default(auto()) @map("_id") @db.ObjectId - + billingAddress Address shippingAddress Address } - + type Address { number Int street String zipCode Int - } + } "#; let mut schema = psl::validate(schema_str.into()); @@ -1675,28 +1689,28 @@ mod tests { generator client { provider = "prisma-client-js" } - + datasource db { provider = "mongodb" url = "mongodb://" } - + model User { id String @id @default(auto()) @map("_id") @db.ObjectId billingAddress Address shippingAddress Address } - + type Address { streetAddress StreetAddress zipCode String city String } - + type StreetAddress { streetName String houseNumber String - } + } "#; let mut schema = psl::validate(schema_str.into()); From 94bd64154c17bc3bce658506a820a99bf5e42e8e Mon Sep 17 00:00:00 2001 From: Alexey Orlenko Date: Sat, 26 Oct 2024 03:26:03 +0200 Subject: [PATCH 10/65] [integration] From 440e3e21a8b84d6019a48df08de2a98d1078b514 Mon Sep 17 00:00:00 2001 From: Alexey Orlenko Date: Sat, 26 Oct 2024 03:27:55 +0200 Subject: [PATCH 11/65] [integration] --- libs/prisma-value/src/lib.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/libs/prisma-value/src/lib.rs b/libs/prisma-value/src/lib.rs index 3551a45cdcdb..3d8a1c6c4b2a 100644 --- a/libs/prisma-value/src/lib.rs +++ b/libs/prisma-value/src/lib.rs @@ -81,7 +81,7 @@ impl std::fmt::Display for PlaceholderType { PlaceholderType::Boolean => write!(f, "Boolean"), PlaceholderType::Decimal => write!(f, "Decimal"), PlaceholderType::Date => write!(f, "Date"), - PlaceholderType::Array(t) => write!(f, "Array<{}>", t), + PlaceholderType::Array(t) => write!(f, "Array<{t}>"), PlaceholderType::Object => write!(f, "Object"), PlaceholderType::Bytes => write!(f, "Bytes"), } From f4d21eaae853c515a57a521fe284a43a412aafe2 Mon Sep 17 00:00:00 2001 From: Alexey Orlenko Date: Sat, 26 Oct 2024 15:49:56 +0200 Subject: [PATCH 12/65] Expand comment --- query-engine/core/src/compiler/expression.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/query-engine/core/src/compiler/expression.rs b/query-engine/core/src/compiler/expression.rs index a1fda58c7b5f..a29122945b5b 100644 --- a/query-engine/core/src/compiler/expression.rs +++ b/query-engine/core/src/compiler/expression.rs @@ -32,7 +32,7 @@ impl DbQuery { #[derive(Debug)] pub enum Expression { - /// Sequence of statements. + /// Sequence of statements. The whole sequence evaluates to the result of the last expression. Seq(Vec), /// Get binding value. From f2615630b78fe1ee16e10adfd80e54da300dde04 Mon Sep 17 00:00:00 2001 From: Alexey Orlenko Date: Sat, 26 Oct 2024 17:16:54 +0200 Subject: [PATCH 13/65] Implement napi method --- query-engine/core/src/compiler/expression.rs | 8 +++-- query-engine/core/src/compiler/mod.rs | 27 ++++++++++++++++- query-engine/core/src/error.rs | 5 +++- .../query-engine-node-api/src/engine.rs | 29 +++++++++++++++++++ 4 files changed, 64 insertions(+), 5 deletions(-) diff --git a/query-engine/core/src/compiler/expression.rs b/query-engine/core/src/compiler/expression.rs index a29122945b5b..26e6e066be55 100644 --- a/query-engine/core/src/compiler/expression.rs +++ b/query-engine/core/src/compiler/expression.rs @@ -1,6 +1,7 @@ use query_structure::PrismaValue; +use serde::Serialize; -#[derive(Debug)] +#[derive(Debug, Serialize)] pub struct Binding { pub name: String, pub expr: Expression, @@ -18,7 +19,7 @@ impl std::fmt::Display for Binding { } } -#[derive(Debug)] +#[derive(Debug, Serialize)] pub struct DbQuery { pub query: String, pub params: Vec, @@ -30,7 +31,8 @@ impl DbQuery { } } -#[derive(Debug)] +#[derive(Debug, Serialize)] +#[serde(tag = "type", content = "args")] pub enum Expression { /// Sequence of statements. The whole sequence evaluates to the result of the last expression. Seq(Vec), diff --git a/query-engine/core/src/compiler/mod.rs b/query-engine/core/src/compiler/mod.rs index f3f5aba6c017..26170861f259 100644 --- a/query-engine/core/src/compiler/mod.rs +++ b/query-engine/core/src/compiler/mod.rs @@ -1,4 +1,29 @@ pub mod expression; pub mod translate; -pub use translate::translate; +use std::sync::Arc; + +pub use expression::Expression; +use schema::QuerySchema; +use thiserror::Error; +pub use translate::{translate, TranslateError}; + +use crate::{QueryDocument, QueryGraphBuilder}; + +#[derive(Debug, Error)] +pub enum CompileError { + #[error("only a single query can be compiled at a time")] + UnsupportedRequest, + + #[error("{0}")] + TranslateError(#[from] TranslateError), +} + +pub fn compile(query_schema: &Arc, query_doc: QueryDocument) -> crate::Result { + let QueryDocument::Single(query) = query_doc else { + return Err(CompileError::UnsupportedRequest.into()); + }; + + let (graph, _serializer) = QueryGraphBuilder::new(query_schema).build(query)?; + Ok(translate(graph).map_err(CompileError::from)?) +} diff --git a/query-engine/core/src/error.rs b/query-engine/core/src/error.rs index b067a325a4a5..e779fc311b3a 100644 --- a/query-engine/core/src/error.rs +++ b/query-engine/core/src/error.rs @@ -1,4 +1,4 @@ -use crate::{InterpreterError, QueryGraphBuilderError, RelationViolation, TransactionError}; +use crate::{compiler::CompileError, InterpreterError, QueryGraphBuilderError, RelationViolation, TransactionError}; use connector::error::ConnectorError; use query_structure::DomainError; use thiserror::Error; @@ -67,6 +67,9 @@ pub enum CoreError { #[error("Query timed out")] QueryTimeout, + + #[error("Error compiling a query: {0}")] + CompileError(#[from] CompileError), } impl CoreError { diff --git a/query-engine/query-engine-node-api/src/engine.rs b/query-engine/query-engine-node-api/src/engine.rs index 9e6bf180171f..1d17eb56ff87 100644 --- a/query-engine/query-engine-node-api/src/engine.rs +++ b/query-engine/query-engine-node-api/src/engine.rs @@ -354,6 +354,35 @@ impl QueryEngine { .await } + #[napi] + pub async fn compile(&self, request: String, human_readable: bool) -> napi::Result { + let dispatcher = self.logger.dispatcher(); + let recorder = self.logger.recorder(); + + async_panic_to_js_error(async { + let inner = self.inner.read().await; + let engine = inner.as_engine()?; + + let request = RequestBody::try_from_str(&request, engine.engine_protocol())?; + let query_doc = request + .into_doc(engine.query_schema()) + .map_err(|err| napi::Error::from_reason(err.to_string()))?; + + let plan = query_core::compiler::compile(engine.query_schema(), query_doc).map_err(ApiError::from)?; + + let response = if human_readable { + plan.to_string() + } else { + serde_json::to_string(&plan)? + }; + + Ok(response) + }) + .with_subscriber(dispatcher) + .with_optional_recorder(recorder) + .await + } + /// If connected, attempts to start a transaction in the core and returns its ID. #[napi] pub async fn start_transaction(&self, input: String, trace: String, request_id: String) -> napi::Result { From 0c1f1f4ce55385d3e7c407b402f955483652fce1 Mon Sep 17 00:00:00 2001 From: Alexey Orlenko Date: Sat, 26 Oct 2024 17:17:12 +0200 Subject: [PATCH 14/65] [integration] From 06c3968f89bc22dca594a2e5475379f1f83347ed Mon Sep 17 00:00:00 2001 From: Alexey Orlenko Date: Sat, 26 Oct 2024 18:20:22 +0200 Subject: [PATCH 15/65] Remove useless deref --- .../sql-query-connector/src/model_extensions/scalar_field.rs | 2 +- query-engine/connectors/sql-query-connector/src/value.rs | 2 +- query-engine/core/src/compiler/translate/query/convert.rs | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/query-engine/connectors/sql-query-connector/src/model_extensions/scalar_field.rs b/query-engine/connectors/sql-query-connector/src/model_extensions/scalar_field.rs index 3d051b667f8a..a3e88aa1d403 100644 --- a/query-engine/connectors/sql-query-connector/src/model_extensions/scalar_field.rs +++ b/query-engine/connectors/sql-query-connector/src/model_extensions/scalar_field.rs @@ -154,7 +154,7 @@ fn convert_placeholder_type_to_var_type(pt: &PlaceholderType) -> VarType { PlaceholderType::Boolean => VarType::Boolean, PlaceholderType::Decimal => VarType::Numeric, PlaceholderType::Date => VarType::DateTime, - PlaceholderType::Array(t) => VarType::Array(Box::new(convert_placeholder_type_to_var_type(&*t))), + PlaceholderType::Array(t) => VarType::Array(Box::new(convert_placeholder_type_to_var_type(t))), PlaceholderType::Object => VarType::Json, PlaceholderType::Bytes => VarType::Bytes, } diff --git a/query-engine/connectors/sql-query-connector/src/value.rs b/query-engine/connectors/sql-query-connector/src/value.rs index 71bc7ec3dfb7..f83746c2beb2 100644 --- a/query-engine/connectors/sql-query-connector/src/value.rs +++ b/query-engine/connectors/sql-query-connector/src/value.rs @@ -120,7 +120,7 @@ fn var_type_to_prisma_type(vt: &quaint::ast::VarType) -> prisma_value::Placehold quaint::ast::VarType::Bytes => prisma_value::PlaceholderType::Bytes, quaint::ast::VarType::Boolean => prisma_value::PlaceholderType::Boolean, quaint::ast::VarType::Char => prisma_value::PlaceholderType::String, - quaint::ast::VarType::Array(t) => prisma_value::PlaceholderType::Array(Box::new(var_type_to_prisma_type(&*t))), + quaint::ast::VarType::Array(t) => prisma_value::PlaceholderType::Array(Box::new(var_type_to_prisma_type(t))), quaint::ast::VarType::Numeric => prisma_value::PlaceholderType::Decimal, quaint::ast::VarType::Json => prisma_value::PlaceholderType::Object, quaint::ast::VarType::Xml => prisma_value::PlaceholderType::String, diff --git a/query-engine/core/src/compiler/translate/query/convert.rs b/query-engine/core/src/compiler/translate/query/convert.rs index 393b1edaeb66..2ea8463f93c0 100644 --- a/query-engine/core/src/compiler/translate/query/convert.rs +++ b/query-engine/core/src/compiler/translate/query/convert.rs @@ -82,7 +82,7 @@ fn var_type_to_placeholder_type(vt: &VarType) -> PlaceholderType { VarType::Bytes => PlaceholderType::Bytes, VarType::Boolean => PlaceholderType::Boolean, VarType::Char => PlaceholderType::String, - VarType::Array(t) => PlaceholderType::Array(Box::new(var_type_to_placeholder_type(&*t))), + VarType::Array(t) => PlaceholderType::Array(Box::new(var_type_to_placeholder_type(t))), VarType::Numeric => PlaceholderType::Float, VarType::Json => PlaceholderType::Object, VarType::Xml => PlaceholderType::String, From 362f5b2163da6a1b6b29ba5a812fb9481dbd7359 Mon Sep 17 00:00:00 2001 From: Alexey Orlenko Date: Sat, 26 Oct 2024 18:20:42 +0200 Subject: [PATCH 16/65] [integration] From fdcc71880ac4412c2e9ede5954a0ef38cb3613a0 Mon Sep 17 00:00:00 2001 From: Alexey Orlenko Date: Sat, 26 Oct 2024 22:04:04 +0200 Subject: [PATCH 17/65] Improve placeholder serialization --- libs/prisma-value/src/lib.rs | 12 +++++++++--- 1 file changed, 9 insertions(+), 3 deletions(-) diff --git a/libs/prisma-value/src/lib.rs b/libs/prisma-value/src/lib.rs index 3d8a1c6c4b2a..b5c3f9cb3ed4 100644 --- a/libs/prisma-value/src/lib.rs +++ b/libs/prisma-value/src/lib.rs @@ -8,6 +8,7 @@ use chrono::prelude::*; use serde::de::Unexpected; use serde::ser::SerializeMap; use serde::{ser::Serializer, Deserialize, Deserializer, Serialize}; +use serde_json::json; use std::{convert::TryFrom, fmt, str::FromStr}; use uuid::Uuid; @@ -256,11 +257,16 @@ fn serialize_placeholder(name: &str, r#type: &PlaceholderType, serializer: S) where S: Serializer, { - let mut map = serializer.serialize_map(Some(3))?; + let mut map = serializer.serialize_map(Some(2))?; map.serialize_entry("prisma__type", "param")?; - map.serialize_entry("prisma__value", name)?; - map.serialize_entry("prisma__paramType", r#type)?; + map.serialize_entry( + "prisma__value", + &json!({ + "name": name, + "type": r#type.to_string(), + }), + )?; map.end() } From 4b729e62c00917d97969ab628e728835eae6d73a Mon Sep 17 00:00:00 2001 From: Alexey Orlenko Date: Sat, 26 Oct 2024 22:04:26 +0200 Subject: [PATCH 18/65] [integration] From 20bb45443fcd8a79b54638a801283fb42406daaf Mon Sep 17 00:00:00 2001 From: Alexey Orlenko Date: Sat, 26 Oct 2024 22:47:39 +0200 Subject: [PATCH 19/65] Fix warnings --- .../sql-query-connector/src/model_extensions/mod.rs | 3 ++- query-engine/core/src/compiler/translate.rs | 3 +++ query-engine/core/src/compiler/translate/query/read.rs | 4 ++-- 3 files changed, 7 insertions(+), 3 deletions(-) diff --git a/query-engine/connectors/sql-query-connector/src/model_extensions/mod.rs b/query-engine/connectors/sql-query-connector/src/model_extensions/mod.rs index 2c6096ffb481..d1bff1954100 100644 --- a/query-engine/connectors/sql-query-connector/src/model_extensions/mod.rs +++ b/query-engine/connectors/sql-query-connector/src/model_extensions/mod.rs @@ -5,4 +5,5 @@ mod scalar_field; mod selection_result; mod table; -pub use self::{column::*, record::*, relation::*, scalar_field::*, selection_result::*, table::*}; +pub use self::{column::*, record::*, scalar_field::*}; +pub(crate) use self::{relation::*, selection_result::*, table::*}; diff --git a/query-engine/core/src/compiler/translate.rs b/query-engine/core/src/compiler/translate.rs index f0346120ae4d..650d03e936fb 100644 --- a/query-engine/core/src/compiler/translate.rs +++ b/query-engine/core/src/compiler/translate.rs @@ -30,6 +30,7 @@ pub fn translate(mut graph: QueryGraph) -> TranslateResult { struct NodeTranslator<'a, 'b> { graph: &'a mut QueryGraph, node: NodeRef, + #[allow(dead_code)] parent_edges: &'b [EdgeRef], } @@ -66,6 +67,7 @@ impl<'a, 'b> NodeTranslator<'a, 'b> { translate_query(query) } + #[allow(dead_code)] fn process_children(&mut self) -> TranslateResult> { let mut child_pairs = self.graph.direct_child_pairs(&self.node); @@ -110,6 +112,7 @@ impl<'a, 'b> NodeTranslator<'a, 'b> { Ok(expressions) } + #[allow(dead_code)] fn fold_result_scopes(&mut self, result_subgraphs: Vec<(EdgeRef, NodeRef)>) -> TranslateResult { // if the subgraphs all point to the same result node, we fold them in sequence // if not, we can separate them with a getfirstnonempty diff --git a/query-engine/core/src/compiler/translate/query/read.rs b/query-engine/core/src/compiler/translate/query/read.rs index cabf9421e250..076d4379566a 100644 --- a/query-engine/core/src/compiler/translate/query/read.rs +++ b/query-engine/core/src/compiler/translate/query/read.rs @@ -65,10 +65,10 @@ pub(crate) fn translate_read_query(query: ReadQuery, ctx: &Context<'_>) -> Trans }) } -fn build_read_m2m_query(query: RelatedRecordsQuery, ctx: &Context<'_>) -> TranslateResult { +fn build_read_m2m_query(_query: RelatedRecordsQuery, _ctx: &Context<'_>) -> TranslateResult { todo!() } -fn build_read_one2m_query(query: RelatedRecordsQuery, ctx: &Context<'_>) -> TranslateResult { +fn build_read_one2m_query(_query: RelatedRecordsQuery, _ctx: &Context<'_>) -> TranslateResult { todo!() } From b0d1b7f0175c6e278d45f1f60cd33b5496b6df92 Mon Sep 17 00:00:00 2001 From: Alexey Orlenko Date: Sun, 27 Oct 2024 12:26:07 +0100 Subject: [PATCH 20/65] Add compile method on wasm --- .../query-engine-wasm/src/wasm/engine.rs | 24 +++++++++++++++++++ 1 file changed, 24 insertions(+) diff --git a/query-engine/query-engine-wasm/src/wasm/engine.rs b/query-engine/query-engine-wasm/src/wasm/engine.rs index 5adc474e5cfb..2f76eb2375e6 100644 --- a/query-engine/query-engine-wasm/src/wasm/engine.rs +++ b/query-engine/query-engine-wasm/src/wasm/engine.rs @@ -359,4 +359,28 @@ impl QueryEngine { .with_subscriber(dispatcher) .await } + + #[wasm_bindgen] + pub async fn compile( + &self, + request: String, + _human_readable: bool, // ignored on wasm to not compile it in + ) -> Result { + let dispatcher = self.logger.dispatcher(); + + async { + let inner = self.inner.read().await; + let engine = inner.as_engine()?; + + let request = RequestBody::try_from_str(&request, engine.engine_protocol())?; + let query_doc = request + .into_doc(engine.query_schema()) + .map_err(|err| napi::Error::from_reason(err.to_string()))?; + + let plan = query_core::compiler::compile(engine.query_schema(), query_doc).map_err(ApiError::from)?; + Ok(serde_json::to_string(&plan)?) + } + .with_subscriber(dispatcher) + .await + } } From 3bb68da812cbb9343616be758760257541e799b0 Mon Sep 17 00:00:00 2001 From: Alexey Orlenko Date: Sun, 27 Oct 2024 13:10:55 +0100 Subject: [PATCH 21/65] Make PrismaValue param parsing consistent with serialization --- libs/prisma-value/src/lib.rs | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/libs/prisma-value/src/lib.rs b/libs/prisma-value/src/lib.rs index b5c3f9cb3ed4..01a4e5e50572 100644 --- a/libs/prisma-value/src/lib.rs +++ b/libs/prisma-value/src/lib.rs @@ -180,15 +180,20 @@ impl TryFrom for PrismaValue { } Some("param") => { - let name = obj + let obj = obj .get("prisma__value") + .and_then(|v| v.as_object()) + .ok_or_else(|| ConversionFailure::new("JSON param value", "PrismaValue"))?; + + let name = obj + .get("name") .and_then(|v| v.as_str()) - .ok_or_else(|| ConversionFailure::new("JSON param value", "PrismaValue"))? + .ok_or_else(|| ConversionFailure::new("param name", "JSON param value"))? .to_owned(); Ok(PrismaValue::Placeholder { name, - r#type: PlaceholderType::Any, + r#type: PlaceholderType::Any, // parsing the type is not implemented yet }) } From d8ebea21a57d55421c14af529ff26787840d3fc7 Mon Sep 17 00:00:00 2001 From: Alexey Orlenko Date: Mon, 28 Oct 2024 10:50:26 +0100 Subject: [PATCH 22/65] [integration] From e27a869c4bad1cf5e89763859861f510b88d0d03 Mon Sep 17 00:00:00 2001 From: Alexey Orlenko Date: Mon, 28 Oct 2024 10:56:10 +0100 Subject: [PATCH 23/65] Switch dialect to sqlite --- query-engine/core/src/compiler/translate/query.rs | 7 ++----- 1 file changed, 2 insertions(+), 5 deletions(-) diff --git a/query-engine/core/src/compiler/translate/query.rs b/query-engine/core/src/compiler/translate/query.rs index f3ff82c95298..fe6eae207e3c 100644 --- a/query-engine/core/src/compiler/translate/query.rs +++ b/query-engine/core/src/compiler/translate/query.rs @@ -18,11 +18,8 @@ use crate::{ use super::TranslateResult; pub(crate) fn translate_query(query: Query) -> TranslateResult { - let connection_info = ConnectionInfo::External(ExternalConnectionInfo::new( - SqlFamily::Postgres, - "public".to_owned(), - None, - )); + let connection_info = + ConnectionInfo::External(ExternalConnectionInfo::new(SqlFamily::Sqlite, "main".to_owned(), None)); let ctx = Context::new(&connection_info, None); From af8159deeb0aa89851a02549309a975edd692084 Mon Sep 17 00:00:00 2001 From: Alexey Orlenko Date: Mon, 28 Oct 2024 10:58:38 +0100 Subject: [PATCH 24/65] [integration] From ce0e4f63e1034043688a61db5554203b70d43f47 Mon Sep 17 00:00:00 2001 From: Alexey Orlenko Date: Mon, 28 Oct 2024 11:11:10 +0100 Subject: [PATCH 25/65] [integration] From b820b6af8e45e157c3985be56a5d792b2b8f98c0 Mon Sep 17 00:00:00 2001 From: Alexey Orlenko Date: Mon, 28 Oct 2024 12:31:52 +0100 Subject: [PATCH 26/65] Comment out react native --- .github/workflows/build-engines.yml | 124 ++++++++++++++-------------- 1 file changed, 62 insertions(+), 62 deletions(-) diff --git a/.github/workflows/build-engines.yml b/.github/workflows/build-engines.yml index db1c1a42ce6d..ac7a72024b52 100644 --- a/.github/workflows/build-engines.yml +++ b/.github/workflows/build-engines.yml @@ -6,30 +6,30 @@ on: push: branches: - main - - '*.*.x' - - 'integration/*' + - "*.*.x" + - "integration/*" paths-ignore: - - '!.github/workflows/build-engines*' - - '.github/**' - - '.buildkite/**' - - '*.md' - - 'LICENSE' - - 'CODEOWNERS' - - 'renovate.json' + - "!.github/workflows/build-engines*" + - ".github/**" + - ".buildkite/**" + - "*.md" + - "LICENSE" + - "CODEOWNERS" + - "renovate.json" workflow_dispatch: pull_request: paths-ignore: - - '!.github/workflows/build-engines*' - - '.github/**' - - '.buildkite/**' - - '*.md' - - 'LICENSE' - - 'CODEOWNERS' - - 'renovate.json' + - "!.github/workflows/build-engines*" + - ".github/**" + - ".buildkite/**" + - "*.md" + - "LICENSE" + - "CODEOWNERS" + - "renovate.json" jobs: is-release-necessary: - name: 'Decide if a release of the engines artifacts is necessary' + name: "Decide if a release of the engines artifacts is necessary" runs-on: ubuntu-22.04 outputs: release: ${{ steps.decision.outputs.release }} @@ -59,7 +59,7 @@ jobs: END_OF_COMMIT_MESSAGE echo "Commit message contains [integration]: ${{ contains(steps.commit-msg.outputs.commit-msg, '[integration]') }}" - - name: 'Check if commit message conatains `[integration]` and the PR author has permissions to trigger the workflow' + - name: "Check if commit message conatains `[integration]` and the PR author has permissions to trigger the workflow" id: check-commit-message # See https://docs.github.com/en/graphql/reference/enums # https://michaelheap.com/github-actions-check-permission/ @@ -68,7 +68,7 @@ jobs: # - the PR author has permissions to trigger the workflow (must be part of the org or a collaborator) if: | github.event_name == 'pull_request' && - contains(steps.commit-msg.outputs.commit-msg, '[integration]') && + contains(steps.commit-msg.outputs.commit-msg, '[integration]') && ( github.event.pull_request.author_association == 'OWNER' || github.event.pull_request.author_association == 'MEMBER' || @@ -76,8 +76,8 @@ jobs: github.event.pull_request.author_association == 'COLLABORATOR' ) run: | - echo "Commit message contains [integration] and PR author has permissions" - # set value to GitHub output + echo "Commit message contains [integration] and PR author has permissions" + # set value to GitHub output echo "release=true" >> $GITHUB_OUTPUT # @@ -118,9 +118,9 @@ jobs: # https://github.com/peter-evans/find-comment/tree/v3/?tab=readme-ov-file#outputs # Tip: Empty strings evaluate to zero in GitHub Actions expressions. e.g. If comment-id is an empty string steps.fc.outputs.comment-id == 0 evaluates to true. if: | - github.event_name == 'workflow_dispatch' || - github.event_name == 'push' || - steps.check-commit-message.outputs.release == 'true' || + github.event_name == 'workflow_dispatch' || + github.event_name == 'push' || + steps.check-commit-message.outputs.release == 'true' || steps.check-branch.outputs.release == 'true' id: decision @@ -140,7 +140,7 @@ jobs: build-linux: name: Build Engines for Linux - needs: + needs: - is-release-necessary if: ${{ needs.is-release-necessary.outputs.release == 'true' }} uses: ./.github/workflows/build-engines-linux-template.yml @@ -149,7 +149,7 @@ jobs: build-macos-intel: name: Build Engines for Apple Intel - needs: + needs: - is-release-necessary if: ${{ needs.is-release-necessary.outputs.release == 'true' }} uses: ./.github/workflows/build-engines-apple-intel-template.yml @@ -158,25 +158,25 @@ jobs: build-macos-silicon: name: Build Engines for Apple Silicon - needs: + needs: - is-release-necessary if: ${{ needs.is-release-necessary.outputs.release == 'true' }} uses: ./.github/workflows/build-engines-apple-silicon-template.yml with: commit: ${{ github.sha }} - build-react-native: - name: Build Engines for React native - needs: - - is-release-necessary - if: ${{ needs.is-release-necessary.outputs.release == 'true' }} - uses: ./.github/workflows/build-engines-react-native-template.yml - with: - commit: ${{ github.sha }} + # build-react-native: + # name: Build Engines for React native + # needs: + # - is-release-necessary + # if: ${{ needs.is-release-necessary.outputs.release == 'true' }} + # uses: ./.github/workflows/build-engines-react-native-template.yml + # with: + # commit: ${{ github.sha }} build-windows: name: Build Engines for Windows - needs: + needs: - is-release-necessary if: ${{ needs.is-release-necessary.outputs.release == 'true' }} uses: ./.github/workflows/build-engines-windows-template.yml @@ -184,7 +184,7 @@ jobs: commit: ${{ github.sha }} release-artifacts: - name: 'Release artifacts from branch ${{ github.head_ref || github.ref_name }} for commit ${{ github.sha }}' + name: "Release artifacts from branch ${{ github.head_ref || github.ref_name }} for commit ${{ github.sha }}" runs-on: ubuntu-22.04 concurrency: group: ${{ github.sha }} @@ -192,12 +192,12 @@ jobs: - build-linux - build-macos-intel - build-macos-silicon - - build-react-native + # - build-react-native - build-windows env: - BUCKET_NAME: 'prisma-builds' + BUCKET_NAME: "prisma-builds" PRISMA_ENGINES_COMMIT_SHA: ${{ github.sha }} - DESTINATION_TARGET_PATH: 's3://prisma-builds/all_commits/${{ github.sha }}' + DESTINATION_TARGET_PATH: "s3://prisma-builds/all_commits/${{ github.sha }}" steps: # Because we need the scripts @@ -215,22 +215,22 @@ jobs: # run-id: 9526334324 # github-token: ${{ secrets.GITHUB_TOKEN }} - - name: 'R2: Check if artifacts were already built and uploaded before via `.finished` file' + - name: "R2: Check if artifacts were already built and uploaded before via `.finished` file" env: - FILE_PATH: 'all_commits/${{ github.sha }}/.finished' - FILE_PATH_LEGACY: 'all_commits/${{ github.sha }}/rhel-openssl-1.1.x/.finished' - AWS_DEFAULT_REGION: 'auto' + FILE_PATH: "all_commits/${{ github.sha }}/.finished" + FILE_PATH_LEGACY: "all_commits/${{ github.sha }}/rhel-openssl-1.1.x/.finished" + AWS_DEFAULT_REGION: "auto" AWS_ACCESS_KEY_ID: ${{ vars.R2_ACCESS_KEY_ID }} AWS_SECRET_ACCESS_KEY: ${{ secrets.R2_SECRET_ACCESS_KEY }} AWS_ENDPOINT_URL_S3: ${{ vars.R2_ENDPOINT }} working-directory: .github/workflows/utils run: bash checkFinishedMarker.sh - - name: 'S3: Check if artifacts were already built and uploaded before via `.finished` file' + - name: "S3: Check if artifacts were already built and uploaded before via `.finished` file" env: - FILE_PATH: 'all_commits/${{ github.sha }}/.finished' - FILE_PATH_LEGACY: 'all_commits/${{ github.sha }}/rhel-openssl-1.1.x/.finished' - AWS_DEFAULT_REGION: 'eu-west-1' + FILE_PATH: "all_commits/${{ github.sha }}/.finished" + FILE_PATH_LEGACY: "all_commits/${{ github.sha }}/rhel-openssl-1.1.x/.finished" + AWS_DEFAULT_REGION: "eu-west-1" AWS_ACCESS_KEY_ID: ${{ vars.AWS_ACCESS_KEY_ID }} AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} working-directory: .github/workflows/utils @@ -247,14 +247,14 @@ jobs: cp -r rhel-openssl-1.1.x debian-openssl-1.1.x cp -r rhel-openssl-3.0.x debian-openssl-3.0.x - - name: Create .zip for react-native - working-directory: engines-artifacts - run: | - mkdir react-native - zip -r react-native/binaries.zip ios android - rm -rf ios android + # - name: Create .zip for react-native + # working-directory: engines-artifacts + # run: | + # mkdir react-native + # zip -r react-native/binaries.zip ios android + # rm -rf ios android - - name: 'Create compressed engine files (.gz)' + - name: "Create compressed engine files (.gz)" working-directory: engines-artifacts run: | set -eu @@ -266,13 +266,13 @@ jobs: ls -Rl . - - name: 'Create SHA256 checksum files (.sha256).' + - name: "Create SHA256 checksum files (.sha256)." working-directory: engines-artifacts run: | set -eu find . -type f | while read filename; do - sha256sum "$filename" > "$filename.sha256" + sha256sum "$filename" > "$filename.sha256" echo "$filename.sha256 file created." done @@ -292,7 +292,7 @@ jobs: run: gpg -K # next to each file (excluding .sha256 files) - - name: 'Create a GPG detached signature (.sig)' + - name: "Create a GPG detached signature (.sig)" working-directory: engines-artifacts run: | set -eu @@ -303,18 +303,18 @@ jobs: ls -Rl . - - name: 'Cloudflare R2: Upload to bucket and verify uploaded files then create `.finished` file' + - name: "Cloudflare R2: Upload to bucket and verify uploaded files then create `.finished` file" # https://docs.aws.amazon.com/cli/v1/userguide/cli-configure-envvars.html env: - AWS_DEFAULT_REGION: 'auto' + AWS_DEFAULT_REGION: "auto" AWS_ACCESS_KEY_ID: ${{ vars.R2_ACCESS_KEY_ID }} AWS_SECRET_ACCESS_KEY: ${{ secrets.R2_SECRET_ACCESS_KEY }} AWS_ENDPOINT_URL_S3: ${{ vars.R2_ENDPOINT }} run: bash .github/workflows/utils/uploadAndVerify.sh engines-artifacts-for-r2 - - name: 'AWS S3: Upload to bucket and verify uploaded files then create `.finished` file' + - name: "AWS S3: Upload to bucket and verify uploaded files then create `.finished` file" env: - AWS_DEFAULT_REGION: 'eu-west-1' + AWS_DEFAULT_REGION: "eu-west-1" AWS_ACCESS_KEY_ID: ${{ vars.AWS_ACCESS_KEY_ID }} AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} run: bash .github/workflows/utils/uploadAndVerify.sh engines-artifacts-for-s3 From 8f2d9e3386567d737a6417ea42ea766047d26f13 Mon Sep 17 00:00:00 2001 From: Alexey Orlenko Date: Mon, 28 Oct 2024 12:33:53 +0100 Subject: [PATCH 27/65] [integration] From e995e5df84b44959f292b33e2a5ac9c87b80632d Mon Sep 17 00:00:00 2001 From: Alexey Orlenko Date: Mon, 28 Oct 2024 12:34:40 +0100 Subject: [PATCH 28/65] wip add param to dmmf --- .../ast_builders/schema_ast_builder/type_renderer.rs | 1 + .../graphql/schema_renderer/type_renderer.rs | 2 ++ query-engine/schema/src/input_types.rs | 11 ++++++++++- query-engine/schema/src/query_schema.rs | 2 ++ 4 files changed, 15 insertions(+), 1 deletion(-) diff --git a/query-engine/dmmf/src/ast_builders/schema_ast_builder/type_renderer.rs b/query-engine/dmmf/src/ast_builders/schema_ast_builder/type_renderer.rs index dd4f26660440..c88e83438a50 100644 --- a/query-engine/dmmf/src/ast_builders/schema_ast_builder/type_renderer.rs +++ b/query-engine/dmmf/src/ast_builders/schema_ast_builder/type_renderer.rs @@ -49,6 +49,7 @@ pub(super) fn render_output_type<'a>(output_type: &OutputType<'a>, ctx: &mut Ren ScalarType::UUID => "UUID", ScalarType::JsonList => "Json", ScalarType::Bytes => "Bytes", + ScalarType::Param => unreachable!("output type must not be Param"), }; DmmfTypeReference { diff --git a/query-engine/request-handlers/src/protocols/graphql/schema_renderer/type_renderer.rs b/query-engine/request-handlers/src/protocols/graphql/schema_renderer/type_renderer.rs index 82a70e53dc00..b43449d34cc8 100644 --- a/query-engine/request-handlers/src/protocols/graphql/schema_renderer/type_renderer.rs +++ b/query-engine/request-handlers/src/protocols/graphql/schema_renderer/type_renderer.rs @@ -47,6 +47,7 @@ impl<'a> GqlTypeRenderer<'a> { ScalarType::UUID => "UUID", ScalarType::JsonList => "Json", ScalarType::Bytes => "Bytes", + ScalarType::Param => "Param", ScalarType::Null => unreachable!("Null types should not be picked for GQL rendering."), }; @@ -86,6 +87,7 @@ impl<'a> GqlTypeRenderer<'a> { ScalarType::JsonList => "Json", ScalarType::Bytes => "Bytes", ScalarType::Null => unreachable!("Null types should not be picked for GQL rendering."), + ScalarType::Param => unreachable!("output type must not be Param"), }; stringified.to_string() diff --git a/query-engine/schema/src/input_types.rs b/query-engine/schema/src/input_types.rs index 4ce09bd97bd2..3b47b2a37789 100644 --- a/query-engine/schema/src/input_types.rs +++ b/query-engine/schema/src/input_types.rs @@ -122,10 +122,15 @@ pub struct InputField<'a> { impl<'a> InputField<'a> { pub(crate) fn new( name: Cow<'a, str>, - field_types: Vec>, + mut field_types: Vec>, default_value: Option, is_required: bool, ) -> InputField<'a> { + // todo + #[allow(clippy::overly_complex_bool_expr)] + if false && field_types.iter().any(|t| t.is_scalar()) { + field_types.push(InputType::Scalar(ScalarType::Param)); + } InputField { name, default_value, @@ -279,6 +284,10 @@ impl<'a> InputType<'a> { InputType::Enum(containing) } + pub fn is_scalar(&self) -> bool { + matches!(self, Self::Scalar(_)) + } + pub fn is_json(&self) -> bool { matches!( self, diff --git a/query-engine/schema/src/query_schema.rs b/query-engine/schema/src/query_schema.rs index a970329383ec..b14febbdc8c3 100644 --- a/query-engine/schema/src/query_schema.rs +++ b/query-engine/schema/src/query_schema.rs @@ -373,6 +373,7 @@ pub enum ScalarType { JsonList, UUID, Bytes, + Param, } impl fmt::Display for ScalarType { @@ -390,6 +391,7 @@ impl fmt::Display for ScalarType { ScalarType::UUID => "UUID", ScalarType::JsonList => "Json", ScalarType::Bytes => "Bytes", + ScalarType::Param => "Param", }; f.write_str(typ) From 12a506f7fbe1b766af0ad896592a690471ad620f Mon Sep 17 00:00:00 2001 From: Alexey Orlenko Date: Mon, 28 Oct 2024 12:35:03 +0100 Subject: [PATCH 29/65] Add comment --- query-engine/query-engine/examples/compiler.rs | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/query-engine/query-engine/examples/compiler.rs b/query-engine/query-engine/examples/compiler.rs index 0c36b89877f8..b725d4bc5363 100644 --- a/query-engine/query-engine/examples/compiler.rs +++ b/query-engine/query-engine/examples/compiler.rs @@ -15,6 +15,11 @@ pub fn main() -> anyhow::Result<()> { let schema = Arc::new(schema); let query_schema = Arc::new(query_core::schema::build(schema, true)); + // prisma.user.findMany({ + // where: { + // email: Prisma.Param("userEmail") + // } + // }) let request = RequestBody::Json(JsonBody::Single(JsonSingleQuery { model_name: Some("User".into()), action: Action::new(QueryTag::FindMany), From 721ced95cff7819c3c8bd683892222c59bfdcfb7 Mon Sep 17 00:00:00 2001 From: Alexey Orlenko Date: Mon, 28 Oct 2024 12:35:22 +0100 Subject: [PATCH 30/65] [integration] From 323a23214416c31095a7849064d16213b0abdc26 Mon Sep 17 00:00:00 2001 From: Alexey Orlenko Date: Mon, 28 Oct 2024 13:08:20 +0100 Subject: [PATCH 31/65] disable more react-native --- .github/workflows/test-compilation.yml | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/.github/workflows/test-compilation.yml b/.github/workflows/test-compilation.yml index 3db71c67b5e7..193fdc26da96 100644 --- a/.github/workflows/test-compilation.yml +++ b/.github/workflows/test-compilation.yml @@ -50,9 +50,9 @@ jobs: - name: "Check that Cargo.lock did not change" run: "git diff --exit-code" - test-react-native-compilation: - name: React Native - uses: ./.github/workflows/build-engines-react-native-template.yml - with: - commit: ${{ github.sha }} - uploadArtifacts: false + # test-react-native-compilation: + # name: React Native + # uses: ./.github/workflows/build-engines-react-native-template.yml + # with: + # commit: ${{ github.sha }} + # uploadArtifacts: false From 8c78d8d0c3ad9bf14c11804154b67a37139e4088 Mon Sep 17 00:00:00 2001 From: Alexey Orlenko Date: Mon, 28 Oct 2024 13:08:37 +0100 Subject: [PATCH 32/65] check param in query schema in validation --- query-engine/core/src/query_document/parser.rs | 3 +++ 1 file changed, 3 insertions(+) diff --git a/query-engine/core/src/query_document/parser.rs b/query-engine/core/src/query_document/parser.rs index 6ef92807cbf6..85ee16e27df7 100644 --- a/query-engine/core/src/query_document/parser.rs +++ b/query-engine/core/src/query_document/parser.rs @@ -232,6 +232,7 @@ impl QueryDocumentParser { possible_input_types: &[InputType<'a>], query_schema: &'a QuerySchema, ) -> QueryParserResult> { + // TODO: we disabled generating Param explicitly in the query schema for now if let ArgumentValue::Scalar(pv @ PrismaValue::Placeholder { .. }) = &value { return Ok(ParsedInputValue::Single(pv.clone())); } @@ -415,6 +416,8 @@ impl QueryDocumentParser { // UUID coercion matchers (PrismaValue::Uuid(uuid), ScalarType::String) => Ok(PrismaValue::String(uuid.to_string())), + (pv @ PrismaValue::Placeholder { .. }, ScalarType::Param) => Ok(pv), + // All other combinations are value type mismatches. (_, _) => Err(ValidationError::invalid_argument_type( selection_path.segments(), From f7783217c2bd2430451148bfb04eb97e37753b18 Mon Sep 17 00:00:00 2001 From: Alexey Orlenko Date: Mon, 28 Oct 2024 14:53:41 +0100 Subject: [PATCH 33/65] switch to sqlite visitor --- query-engine/core/src/compiler/translate/query.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/query-engine/core/src/compiler/translate/query.rs b/query-engine/core/src/compiler/translate/query.rs index fe6eae207e3c..37bcacb7bc19 100644 --- a/query-engine/core/src/compiler/translate/query.rs +++ b/query-engine/core/src/compiler/translate/query.rs @@ -30,7 +30,7 @@ pub(crate) fn translate_query(query: Query) -> TranslateResult { } fn build_db_query<'a>(query: impl Into>) -> TranslateResult { - let (sql, params) = quaint::visitor::Postgres::build(query)?; + let (sql, params) = quaint::visitor::Sqlite::build(query)?; let params = params .into_iter() .map(convert::quaint_value_to_prisma_value) From 2d54430f913467fa2794cbfcb2e34f233fd3af85 Mon Sep 17 00:00:00 2001 From: Alexey Orlenko Date: Mon, 28 Oct 2024 14:53:47 +0100 Subject: [PATCH 34/65] [integration] From cfb698a47c186ee10ef474c034fd2d5cb86e2d9c Mon Sep 17 00:00:00 2001 From: Alexey Orlenko Date: Thu, 19 Dec 2024 19:05:11 +0100 Subject: [PATCH 35/65] add nested query in example --- .../core/src/compiler/translate/query/read.rs | 1 + .../query-engine/examples/compiler.rs | 56 +++++++++---------- 2 files changed, 29 insertions(+), 28 deletions(-) diff --git a/query-engine/core/src/compiler/translate/query/read.rs b/query-engine/core/src/compiler/translate/query/read.rs index 076d4379566a..dbe8f16730a8 100644 --- a/query-engine/core/src/compiler/translate/query/read.rs +++ b/query-engine/core/src/compiler/translate/query/read.rs @@ -32,6 +32,7 @@ pub(crate) fn translate_read_query(query: ReadQuery, ctx: &Context<'_>) -> Trans ReadQuery::ManyRecordsQuery(mrq) => { let selected_fields = mrq.selected_fields.without_relations().into_virtuals_last(); let needs_reversed_order = mrq.args.needs_reversed_order(); + dbg!(mrq.nested); // TODO: we ignore chunking for now let query = query_builder::read::get_records( diff --git a/query-engine/query-engine/examples/compiler.rs b/query-engine/query-engine/examples/compiler.rs index b725d4bc5363..7a1150cc3651 100644 --- a/query-engine/query-engine/examples/compiler.rs +++ b/query-engine/query-engine/examples/compiler.rs @@ -1,8 +1,8 @@ use std::sync::Arc; -use indexmap::IndexMap; -use query_core::{query_graph_builder::QueryGraphBuilder, schema::QueryTag, QueryDocument}; -use request_handlers::{Action, FieldQuery, JsonBody, JsonSingleQuery, RequestBody, SelectionSet, SelectionSetValue}; +use query_core::{query_graph_builder::QueryGraphBuilder, QueryDocument}; +use request_handlers::{JsonBody, JsonSingleQuery, RequestBody}; +use serde_json::json; pub fn main() -> anyhow::Result<()> { let schema_string = include_str!("./schema.prisma"); @@ -20,31 +20,31 @@ pub fn main() -> anyhow::Result<()> { // email: Prisma.Param("userEmail") // } // }) - let request = RequestBody::Json(JsonBody::Single(JsonSingleQuery { - model_name: Some("User".into()), - action: Action::new(QueryTag::FindMany), - query: FieldQuery { - arguments: Some({ - let mut map = IndexMap::new(); - map.insert( - "where".into(), - serde_json::json!({ - "email": { - "$type": "Param", - "value": "userEmail", - } - }), - ); - map - }), - selection: SelectionSet::new({ - let mut map = IndexMap::new(); - map.insert("$scalars".into(), SelectionSetValue::Shorthand(true)); - map - }), - }, - })); - + let query: JsonSingleQuery = serde_json::from_value(json!({ + "modelName": "User", + "action": "findMany", + "query": { + "arguments": { + "where": { + "email": { + "$type": "Param", + "value": "userEmail" + } + } + }, + "selection": { + "$scalars": true, + "posts": { + "arguments": {}, + "selection": { + "$scalars": true + } + } + } + } + }))?; + + let request = RequestBody::Json(JsonBody::Single(query)); let doc = request.into_doc(&query_schema)?; let QueryDocument::Single(query) = doc else { From 89b40cbfee1d55bb3919157eebc1981a8dbc6664 Mon Sep 17 00:00:00 2001 From: Alexey Orlenko Date: Mon, 23 Dec 2024 12:50:48 +0100 Subject: [PATCH 36/65] Use lld via .cargo/config.toml and not env vars --- .envrc | 8 ++++++++ shell.nix | 6 ++++-- 2 files changed, 12 insertions(+), 2 deletions(-) diff --git a/.envrc b/.envrc index 5436426aa834..ed5ae2b6d10b 100644 --- a/.envrc +++ b/.envrc @@ -53,3 +53,11 @@ if command -v nix &> /dev/null && [ -z ${DISABLE_NIX+x} ] then use nix fi + +if [[ "$OSTYPE" == "linux-gnu"* ]] && command -v lld &> /dev/null && [ ! -f .cargo/config.toml ]; then + mkdir -p .cargo + cat << EOF > .cargo/config.toml +[target.$(uname -m)-unknown-linux-gnu] +rustflags = ["-C", "link-arg=-fuse-ld=lld"] +EOF +fi diff --git a/shell.nix b/shell.nix index 309f6275660f..bf6c03cc3716 100644 --- a/shell.nix +++ b/shell.nix @@ -45,7 +45,9 @@ pkgs.mkShell { useLld = "-C link-arg=-fuse-ld=lld"; in pkgs.lib.optionalString pkgs.stdenv.isLinux '' - export CARGO_TARGET_X86_64_UNKNOWN_LINUX_GNU_RUSTFLAGS="${useLld}" - export CARGO_TARGET_AARCH64_UNKNOWN_LINUX_GNU_RUSTFLAGS="${useLld}" + if [ ! -f .cargo/config.toml ]; then + export CARGO_TARGET_X86_64_UNKNOWN_LINUX_GNU_RUSTFLAGS="${useLld}" + export CARGO_TARGET_AARCH64_UNKNOWN_LINUX_GNU_RUSTFLAGS="${useLld}" + fi ''; } From 58f35a7b892c3cdb18062bd0e01abd90e2a6757c Mon Sep 17 00:00:00 2001 From: Alexey Orlenko Date: Mon, 23 Dec 2024 16:25:37 +0100 Subject: [PATCH 37/65] use postgres --- query-engine/core/src/compiler/translate/query.rs | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/query-engine/core/src/compiler/translate/query.rs b/query-engine/core/src/compiler/translate/query.rs index 37bcacb7bc19..f3ff82c95298 100644 --- a/query-engine/core/src/compiler/translate/query.rs +++ b/query-engine/core/src/compiler/translate/query.rs @@ -18,8 +18,11 @@ use crate::{ use super::TranslateResult; pub(crate) fn translate_query(query: Query) -> TranslateResult { - let connection_info = - ConnectionInfo::External(ExternalConnectionInfo::new(SqlFamily::Sqlite, "main".to_owned(), None)); + let connection_info = ConnectionInfo::External(ExternalConnectionInfo::new( + SqlFamily::Postgres, + "public".to_owned(), + None, + )); let ctx = Context::new(&connection_info, None); @@ -30,7 +33,7 @@ pub(crate) fn translate_query(query: Query) -> TranslateResult { } fn build_db_query<'a>(query: impl Into>) -> TranslateResult { - let (sql, params) = quaint::visitor::Sqlite::build(query)?; + let (sql, params) = quaint::visitor::Postgres::build(query)?; let params = params .into_iter() .map(convert::quaint_value_to_prisma_value) From da96b5d9419295f569778d6bb113c9d86a0751b2 Mon Sep 17 00:00:00 2001 From: Alexey Orlenko Date: Mon, 23 Dec 2024 16:46:26 +0100 Subject: [PATCH 38/65] remove dbg --- .../schema.prisma | 26 +++++++++---------- .../core/src/compiler/translate/query/read.rs | 1 - 2 files changed, 13 insertions(+), 14 deletions(-) diff --git a/prisma-fmt/tests/code_actions/scenarios/create_missing_block_composite_type_crlf/schema.prisma b/prisma-fmt/tests/code_actions/scenarios/create_missing_block_composite_type_crlf/schema.prisma index 35fb65826c70..4ec44dc9b33f 100644 --- a/prisma-fmt/tests/code_actions/scenarios/create_missing_block_composite_type_crlf/schema.prisma +++ b/prisma-fmt/tests/code_actions/scenarios/create_missing_block_composite_type_crlf/schema.prisma @@ -1,13 +1,13 @@ -generator client { - provider = "prisma-client-js" -} - -datasource db { - provider = "mongodb" - url = env("DATABASE_URL") -} - -type Kattbjorn { - name String - friend Animal -} +generator client { + provider = "prisma-client-js" +} + +datasource db { + provider = "mongodb" + url = env("DATABASE_URL") +} + +type Kattbjorn { + name String + friend Animal +} diff --git a/query-engine/core/src/compiler/translate/query/read.rs b/query-engine/core/src/compiler/translate/query/read.rs index dbe8f16730a8..076d4379566a 100644 --- a/query-engine/core/src/compiler/translate/query/read.rs +++ b/query-engine/core/src/compiler/translate/query/read.rs @@ -32,7 +32,6 @@ pub(crate) fn translate_read_query(query: ReadQuery, ctx: &Context<'_>) -> Trans ReadQuery::ManyRecordsQuery(mrq) => { let selected_fields = mrq.selected_fields.without_relations().into_virtuals_last(); let needs_reversed_order = mrq.args.needs_reversed_order(); - dbg!(mrq.nested); // TODO: we ignore chunking for now let query = query_builder::read::get_records( From 18a7faf30e8151ef779be20864f1a05c5def0689 Mon Sep 17 00:00:00 2001 From: Alexey Orlenko Date: Thu, 26 Dec 2024 18:58:04 +0100 Subject: [PATCH 39/65] first pass at read nested --- libs/prisma-value/src/lib.rs | 4 + query-engine/core/src/compiler/expression.rs | 58 ++++++++- .../core/src/compiler/translate/query/read.rs | 118 ++++++++++++++++-- query-engine/core/src/query_ast/read.rs | 16 +++ .../query-engine/examples/compiler.rs | 10 +- 5 files changed, 194 insertions(+), 12 deletions(-) diff --git a/libs/prisma-value/src/lib.rs b/libs/prisma-value/src/lib.rs index 01a4e5e50572..81adccca2661 100644 --- a/libs/prisma-value/src/lib.rs +++ b/libs/prisma-value/src/lib.rs @@ -381,6 +381,10 @@ impl PrismaValue { PrismaValue::DateTime(parse_datetime(datetime).unwrap()) } + pub fn placeholder(name: String, r#type: PlaceholderType) -> PrismaValue { + PrismaValue::Placeholder { name, r#type } + } + pub fn as_boolean(&self) -> Option<&bool> { match self { PrismaValue::Boolean(bool) => Some(bool), diff --git a/query-engine/core/src/compiler/expression.rs b/query-engine/core/src/compiler/expression.rs index 26e6e066be55..5669e82a695b 100644 --- a/query-engine/core/src/compiler/expression.rs +++ b/query-engine/core/src/compiler/expression.rs @@ -1,3 +1,4 @@ +use itertools::Itertools; use query_structure::PrismaValue; use serde::Serialize; @@ -32,7 +33,13 @@ impl DbQuery { } #[derive(Debug, Serialize)] -#[serde(tag = "type", content = "args")] +pub struct JoinExpression { + pub child: Expression, + pub on: Vec<(String, String)>, +} + +#[derive(Debug, Serialize)] +#[serde(tag = "type", content = "args", rename_all = "camelCase")] pub enum Expression { /// Sequence of statements. The whole sequence evaluates to the result of the last expression. Seq(Vec), @@ -63,6 +70,22 @@ pub enum Expression { /// Concatenates a list of lists. Concat(Vec), + + /// Asserts that the result of the expression is at most one record. + Unique(Box), + + /// Asserts that the result of the expression is at least one record. + Required(Box), + + /// Application-level join. + Join { + parent: Box, + children: Vec, + }, + + /// Get a field from a record or records. If the argument is a list of records, + /// returns a list of values of this field. + MapField { field: String, records: Box }, } impl Expression { @@ -114,6 +137,37 @@ impl Expression { Self::Sum(exprs) => self.display_function("sum", exprs, f, level)?, Self::Concat(exprs) => self.display_function("concat", exprs, f, level)?, + + Self::Unique(expr) => { + writeln!(f, "{indent}unique (")?; + expr.display(f, level + 1)?; + write!(f, "{indent})")?; + } + + Self::Required(expr) => { + writeln!(f, "{indent}required (")?; + expr.display(f, level + 1)?; + write!(f, "{indent})")?; + } + + Self::Join { parent, children } => { + writeln!(f, "{indent}join (")?; + parent.display(f, level + 1)?; + for nested in children { + let left = nested.on.iter().map(|(l, _)| l).cloned().join(", "); + let right = nested.on.iter().map(|(_, r)| r).cloned().join(", "); + writeln!(f, "\n{indent} with (")?; + nested.child.display(f, level + 2)?; + writeln!(f, "\n{indent} ) on left.{left} = right.{right},")?; + } + write!(f, "{indent})")?; + } + + Self::MapField { field, records } => { + writeln!(f, "{indent}mapField {field} (")?; + records.display(f, level + 1)?; + write!(f, "\n{indent})")?; + } } Ok(()) @@ -128,7 +182,7 @@ impl Expression { ) -> std::fmt::Result { let indent = " ".repeat(level); let DbQuery { query, params } = db_query; - write!(f, "{indent}{op} {{\n{indent} {query}\n{indent}}} with {params:?}") + write!(f, "{indent}{op} (\n{indent} {query}\n{indent}) with {params:?}") } fn display_function( diff --git a/query-engine/core/src/compiler/translate/query/read.rs b/query-engine/core/src/compiler/translate/query/read.rs index 076d4379566a..6a27080d7d07 100644 --- a/query-engine/core/src/compiler/translate/query/read.rs +++ b/query-engine/core/src/compiler/translate/query/read.rs @@ -1,16 +1,30 @@ -use query_structure::ModelProjection; +use std::collections::HashSet; + +use itertools::Itertools; +use query_structure::{ + ConditionValue, Filter, ModelProjection, PlaceholderType, PrismaValue, QueryMode, RelationField, ScalarCondition, + ScalarField, ScalarFilter, ScalarProjection, +}; use sql_query_connector::{ context::Context, model_extensions::AsColumns, query_arguments_ext::QueryArgumentsExt, query_builder, }; use crate::{ - compiler::{expression::Expression, translate::TranslateResult}, - ReadQuery, RelatedRecordsQuery, + compiler::{ + expression::{Binding, Expression, JoinExpression}, + translate::TranslateResult, + }, + FilteredQuery, ReadQuery, RelatedRecordsQuery, }; use super::build_db_query; pub(crate) fn translate_read_query(query: ReadQuery, ctx: &Context<'_>) -> TranslateResult { + let all_linking_fields = query + .nested_related_records_queries() + .flat_map(|rrq| rrq.parent_field.linking_fields()) + .collect::>(); + Ok(match query { ReadQuery::RecordQuery(rq) => { let selected_fields = rq.selected_fields.without_relations().into_virtuals_last(); @@ -26,7 +40,66 @@ pub(crate) fn translate_read_query(query: ReadQuery, ctx: &Context<'_>) -> Trans ) .limit(1); - Expression::Query(build_db_query(query)?) + let expr = Expression::Query(build_db_query(query)?); + + if rq.nested.is_empty() { + return Ok(expr); + } + + Expression::Let { + bindings: vec![Binding { + name: "@parent".into(), + expr, + }], + expr: Box::new(Expression::Let { + bindings: all_linking_fields + .into_iter() + .map(|sf| Binding { + name: format!("@parent.{}", sf.prisma_name().into_owned()), + expr: Expression::MapField { + field: sf.prisma_name().into_owned(), + records: Box::new(Expression::Get { name: "@parent".into() }), + }, + }) + .collect(), + expr: Box::new(Expression::Join { + parent: Box::new(Expression::Get { name: "@parent".into() }), + children: rq + .nested + .into_iter() + .filter_map(|nested| match nested { + ReadQuery::RelatedRecordsQuery(rrq) => Some(rrq), + _ => None, + }) + .map(|rrq| -> TranslateResult { + let parent_fields = rrq.parent_field.linking_fields(); + let child_fields = rrq.parent_field.related_field().linking_fields(); + + let join_expr = parent_fields + .scalars() + .zip(child_fields.scalars()) + .map(|(left, right)| (left.name().to_owned(), right.name().to_owned())) + .collect_vec(); + + // nested.add_filter(Filter::Scalar(ScalarFilter { + // mode: QueryMode::Default, + // condition: ScalarCondition::Equals(ConditionValue::value(PrismaValue::placeholder( + // "parent_id".into(), + // PlaceholderType::String, + // ))), + // projection: ScalarProjection::Compound(referenced_fields), + // })); + let child_query = translate_read_query(ReadQuery::RelatedRecordsQuery(rrq), ctx)?; + + Ok(JoinExpression { + child: child_query, + on: join_expr, + }) + }) + .try_collect()?, + }), + }), + } } ReadQuery::ManyRecordsQuery(mrq) => { @@ -61,7 +134,7 @@ pub(crate) fn translate_read_query(query: ReadQuery, ctx: &Context<'_>) -> Trans } } - _ => unimplemented!(), + _ => todo!(), }) } @@ -69,6 +142,37 @@ fn build_read_m2m_query(_query: RelatedRecordsQuery, _ctx: &Context<'_>) -> Tran todo!() } -fn build_read_one2m_query(_query: RelatedRecordsQuery, _ctx: &Context<'_>) -> TranslateResult { - todo!() +fn build_read_one2m_query(rrq: RelatedRecordsQuery, ctx: &Context<'_>) -> TranslateResult { + let selected_fields = rrq.selected_fields.without_relations().into_virtuals_last(); + let needs_reversed_order = rrq.args.needs_reversed_order(); + + // TODO: we ignore chunking for now + let query = query_builder::read::get_records( + &rrq.parent_field.related_model(), + ModelProjection::from(&selected_fields) + .as_columns(ctx) + .mark_all_selected(), + selected_fields.virtuals(), + rrq.args, + ctx, + ); + + let expr = Expression::Query(build_db_query(query)?); + + if needs_reversed_order { + Ok(Expression::Reverse(Box::new(expr))) + } else { + Ok(expr) + } +} + +fn collect_referenced_fields(nested_queries: &[ReadQuery]) -> HashSet { + nested_queries + .iter() + .filter_map(|rq| match rq { + ReadQuery::RelatedRecordsQuery(rrq) => Some(rrq), + _ => None, + }) + .flat_map(|rrq| rrq.parent_field.referenced_fields()) + .collect() } diff --git a/query-engine/core/src/query_ast/read.rs b/query-engine/core/src/query_ast/read.rs index e3eca8c88ee5..6d25f8f435ff 100644 --- a/query-engine/core/src/query_ast/read.rs +++ b/query-engine/core/src/query_ast/read.rs @@ -64,6 +64,22 @@ impl ReadQuery { ReadQuery::AggregateRecordsQuery(_) => false, } } + + fn nested(&self) -> &[ReadQuery] { + match self { + ReadQuery::RecordQuery(x) => &x.nested, + ReadQuery::ManyRecordsQuery(x) => &x.nested, + ReadQuery::RelatedRecordsQuery(x) => &x.nested, + ReadQuery::AggregateRecordsQuery(_) => &[], + } + } + + pub fn nested_related_records_queries(&self) -> impl Iterator + '_ { + self.nested().iter().filter_map(|q| match q { + ReadQuery::RelatedRecordsQuery(rrq) => Some(rrq), + _ => None, + }) + } } impl FilteredQuery for ReadQuery { diff --git a/query-engine/query-engine/examples/compiler.rs b/query-engine/query-engine/examples/compiler.rs index 7a1150cc3651..950fd99f255e 100644 --- a/query-engine/query-engine/examples/compiler.rs +++ b/query-engine/query-engine/examples/compiler.rs @@ -15,14 +15,18 @@ pub fn main() -> anyhow::Result<()> { let schema = Arc::new(schema); let query_schema = Arc::new(query_core::schema::build(schema, true)); - // prisma.user.findMany({ + // prisma.user.findUnique({ // where: { // email: Prisma.Param("userEmail") + // }, + // select: { + // val: true, + // posts: true, // } // }) let query: JsonSingleQuery = serde_json::from_value(json!({ "modelName": "User", - "action": "findMany", + "action": "findUnique", "query": { "arguments": { "where": { @@ -33,7 +37,7 @@ pub fn main() -> anyhow::Result<()> { } }, "selection": { - "$scalars": true, + "val": true, "posts": { "arguments": {}, "selection": { From ec1a30497e7b1afe4476d157d7e161779bae378f Mon Sep 17 00:00:00 2001 From: Alexey Orlenko Date: Sat, 11 Jan 2025 16:20:13 +0100 Subject: [PATCH 40/65] Use `pretty` for pretty printing --- Cargo.lock | 19 ++ query-engine/core/Cargo.toml | 1 + query-engine/core/src/compiler/expression.rs | 298 +++++++++++------- .../query-engine/examples/compiler.rs | 9 +- .../query-engine/examples/schema.prisma | 16 +- 5 files changed, 229 insertions(+), 114 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 5f104e97bb5c..cfd52bd42c57 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -3419,6 +3419,18 @@ version = "0.2.17" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5b40af805b3121feab8a3c29f04d8ad262fa8e0561883e7653e024ae4479e6de" +[[package]] +name = "pretty" +version = "0.12.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b55c4d17d994b637e2f4daf6e5dc5d660d209d5642377d675d7a1c3ab69fa579" +dependencies = [ + "arrayvec 0.5.2", + "termcolor", + "typed-arena", + "unicode-width", +] + [[package]] name = "pretty-hex" version = "0.3.0" @@ -3769,6 +3781,7 @@ dependencies = [ "lru 0.7.8", "once_cell", "petgraph", + "pretty", "prisma-metrics", "psl", "quaint", @@ -5908,6 +5921,12 @@ dependencies = [ "static_assertions", ] +[[package]] +name = "typed-arena" +version = "2.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6af6ae20167a9ece4bcb41af5b80f8a1f1df981f6391189ce00fd257af04126a" + [[package]] name = "typed-builder" version = "0.10.0" diff --git a/query-engine/core/Cargo.toml b/query-engine/core/Cargo.toml index d8f97ec2cec1..a6aa37e1f63a 100644 --- a/query-engine/core/Cargo.toml +++ b/query-engine/core/Cargo.toml @@ -23,6 +23,7 @@ petgraph = "0.4" query-structure = { path = "../query-structure", features = [ "default_generators", ] } +pretty = { version = "0.12", features = ["termcolor"] } prisma-metrics = { path = "../../libs/metrics", optional = true } serde.workspace = true serde_json.workspace = true diff --git a/query-engine/core/src/compiler/expression.rs b/query-engine/core/src/compiler/expression.rs index 5669e82a695b..0cb342be9881 100644 --- a/query-engine/core/src/compiler/expression.rs +++ b/query-engine/core/src/compiler/expression.rs @@ -1,4 +1,7 @@ -use itertools::Itertools; +use pretty::{ + termcolor::{Color, ColorSpec}, + DocAllocator, DocBuilder, +}; use query_structure::PrismaValue; use serde::Serialize; @@ -88,122 +91,201 @@ pub enum Expression { MapField { field: String, records: Box }, } -impl Expression { - fn display(&self, f: &mut std::fmt::Formatter<'_>, level: usize) -> std::fmt::Result { - let indent = " ".repeat(level); - - match self { - Self::Seq(exprs) => { - writeln!(f, "{indent}{{")?; - for expr in exprs { - expr.display(f, level + 1)?; - writeln!(f, ";")?; - } - write!(f, "{indent}}}")?; - } - - Self::Get { name } => { - write!(f, "{indent}get {name}")?; - } - - Self::Let { bindings, expr } => { - writeln!(f, "{indent}let")?; - for Binding { name, expr } in bindings { - writeln!(f, "{indent} {name} =")?; - expr.display(f, level + 2)?; - writeln!(f, ";")?; - } - writeln!(f, "{indent}in")?; - expr.display(f, level + 1)?; - } - - Self::GetFirstNonEmpty { names } => { - write!(f, "{indent}getFirstNonEmpty")?; - for name in names { - write!(f, " {}", name)?; - } - } - - Self::Query(query) => self.display_query("query", query, f, level)?, - - Self::Execute(query) => self.display_query("execute", query, f, level)?, - - Self::Reverse(expr) => { - writeln!(f, "{indent}reverse (")?; - expr.display(f, level + 1)?; - write!(f, "{indent})")?; - } - - Self::Sum(exprs) => self.display_function("sum", exprs, f, level)?, - - Self::Concat(exprs) => self.display_function("concat", exprs, f, level)?, - - Self::Unique(expr) => { - writeln!(f, "{indent}unique (")?; - expr.display(f, level + 1)?; - write!(f, "{indent})")?; - } - - Self::Required(expr) => { - writeln!(f, "{indent}required (")?; - expr.display(f, level + 1)?; - write!(f, "{indent})")?; - } - - Self::Join { parent, children } => { - writeln!(f, "{indent}join (")?; - parent.display(f, level + 1)?; - for nested in children { - let left = nested.on.iter().map(|(l, _)| l).cloned().join(", "); - let right = nested.on.iter().map(|(_, r)| r).cloned().join(", "); - writeln!(f, "\n{indent} with (")?; - nested.child.display(f, level + 2)?; - writeln!(f, "\n{indent} ) on left.{left} = right.{right},")?; - } - write!(f, "{indent})")?; - } - - Self::MapField { field, records } => { - writeln!(f, "{indent}mapField {field} (")?; - records.display(f, level + 1)?; - write!(f, "\n{indent})")?; - } - } +#[derive(thiserror::Error, Debug)] +pub enum PrettyPrintError { + #[error("{0}")] + IoError(#[from] std::io::Error), + #[error("{0}")] + FromUtf8Error(#[from] std::string::FromUtf8Error), +} - Ok(()) +impl Expression { + pub fn pretty_print(&self, color: bool, width: usize) -> Result { + let arena = pretty::Arena::new(); + let doc = self.to_doc(&arena); + + let mut buf = if color { + pretty::termcolor::Buffer::ansi() + } else { + pretty::termcolor::Buffer::no_color() + }; + + doc.render_colored(width, &mut buf)?; + Ok(String::from_utf8(buf.into_inner())?) } - fn display_query( - &self, - op: &str, - db_query: &DbQuery, - f: &mut std::fmt::Formatter<'_>, - level: usize, - ) -> std::fmt::Result { - let indent = " ".repeat(level); - let DbQuery { query, params } = db_query; - write!(f, "{indent}{op} (\n{indent} {query}\n{indent}) with {params:?}") - } + fn to_doc<'a, D>(&'a self, d: &'a D) -> DocBuilder<'a, D, ColorSpec> + where + D: DocAllocator<'a, ColorSpec>, + D::Doc: Clone, + { + let color_kw = || ColorSpec::new().set_fg(Some(Color::Blue)).clone(); + let color_fn = || ColorSpec::new().set_underline(true).clone(); + let color_var = || ColorSpec::new().set_bold(true).clone(); + let color_lit = || ColorSpec::new().set_italic(true).set_fg(Some(Color::Green)).clone(); + + let format_query = |tag: &'static str, db_query: &'a DbQuery| { + d.text(tag) + .annotate(color_kw()) + .append(d.softline()) + .append( + d.reflow(&db_query.query) + .align() + .enclose("«", "»") + .annotate(color_lit()), + ) + .append(d.line()) + .append(d.text("with params").annotate(color_kw())) + .append(d.space()) + .append( + d.intersperse( + db_query.params.iter().map(|param| match param { + PrismaValue::Placeholder { name, r#type } => d.text("var").annotate(color_kw()).append( + d.text(name) + .annotate(color_var()) + .append(d.space()) + .append(d.text("as").annotate(color_kw())) + .append(d.space()) + .append(match r#type { + query_structure::PlaceholderType::Array(inner) => format!("{inner:?}[]"), + _ => format!("{type:?}"), + }) + .parens(), + ), + _ => d + .text("const") + .annotate(color_kw()) + .append(d.text(format!("{param:?}")).annotate(color_lit()).parens()), + }), + d.text(",").append(d.softline()), + ) + .align() + .brackets(), + ) + .align() + }; + + let format_function = |name: &'static str, args: &'a [Expression]| { + d.text(name).annotate(color_fn()).append(d.space()).append( + d.intersperse(args.iter().map(|expr| expr.to_doc(d)), d.space()) + .parens(), + ) + }; + + let format_unary_function = |name: &'static str, arg: &'a Expression| { + d.text(name) + .annotate(color_fn()) + .append(d.space()) + .append(arg.to_doc(d).parens()) + }; - fn display_function( - &self, - name: &str, - args: &[Expression], - f: &mut std::fmt::Formatter<'_>, - level: usize, - ) -> std::fmt::Result { - let indent = " ".repeat(level); - write!(f, "{indent}{name} (")?; - for arg in args { - arg.display(f, level + 1)?; - writeln!(f, ",")?; + match self { + Expression::Seq(vec) => d.intersperse(vec.iter().map(|expr| expr.to_doc(d)), d.text(";").append(d.line())), + + Expression::Get { name } => d + .text("get") + .annotate(color_kw()) + .append(d.space()) + .append(d.text(name).annotate(color_var())), + + Expression::Let { bindings, expr } => d + .text("let") + .annotate(color_kw()) + .append(d.softline()) + .append( + d.intersperse( + bindings.iter().map(|binding| { + d.text(&binding.name) + .annotate(color_var()) + .append(d.space()) + .append("=") + .append(d.softline()) + .append(binding.expr.to_doc(d)) + }), + d.text(";").append(d.line()), + ) + .align(), + ) + .append(d.line()) + .append(d.text("in").annotate(color_kw())) + .append(d.softline()) + .append(expr.to_doc(d).align()), + + Expression::GetFirstNonEmpty { names } => d + .text("getFirstNonEmpty") + .annotate(color_fn()) + .append(d.intersperse(names.iter().map(|name| d.text(name).annotate(color_var())), d.space())), + + Expression::Query(db_query) => format_query("query", db_query), + + Expression::Execute(db_query) => format_query("execute", db_query), + + Expression::Reverse(expression) => format_unary_function("reverse", expression), + + Expression::Sum(vec) => format_function("sum", vec), + + Expression::Concat(vec) => format_function("concat", vec), + + Expression::Unique(expression) => format_unary_function("unique", expression), + + Expression::Required(expression) => format_unary_function("required", expression), + + Expression::Join { parent, children } => d + .text("join") + .annotate(color_kw()) + .append(d.space()) + .append(parent.to_doc(d).parens()) + .append(d.line()) + .append(d.text("with").annotate(color_kw())) + .append(d.space()) + .append( + d.intersperse( + children.iter().map(|join| { + join.child + .to_doc(d) + .parens() + .append(d.space()) + .append(d.text("on").annotate(color_kw())) + .append(d.space()) + .append(d.intersperse( + join.on.iter().map(|(l, r)| { + d.text("left") + .annotate(color_kw()) + .append(".") + .append(d.text(l).annotate(color_var())) + .parens() + .append(d.space()) + .append("=") + .append(d.space()) + .append( + d.text("right") + .annotate(color_kw()) + .append(".") + .append(d.text(r).annotate(color_var())) + .parens(), + ) + }), + d.text(", "), + )) + }), + d.text(",").append(d.line()), + ) + .align(), + ), + + Expression::MapField { field, records } => d + .text("mapField") + .annotate(color_fn()) + .append(d.space()) + .append(d.text(field).double_quotes().annotate(color_lit())) + .append(d.space()) + .append(records.to_doc(d).parens()), } - write!(f, ")") } } impl std::fmt::Display for Expression { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - self.display(f, 0) + self.pretty_print(false, 80).map_err(|_| std::fmt::Error)?.fmt(f) } } diff --git a/query-engine/query-engine/examples/compiler.rs b/query-engine/query-engine/examples/compiler.rs index 950fd99f255e..6292910ea3d7 100644 --- a/query-engine/query-engine/examples/compiler.rs +++ b/query-engine/query-engine/examples/compiler.rs @@ -22,6 +22,7 @@ pub fn main() -> anyhow::Result<()> { // select: { // val: true, // posts: true, + // profile: true, // } // }) let query: JsonSingleQuery = serde_json::from_value(json!({ @@ -43,6 +44,12 @@ pub fn main() -> anyhow::Result<()> { "selection": { "$scalars": true } + }, + "profile": { + "arguments": {}, + "selection": { + "$scalars": true + } } } } @@ -61,7 +68,7 @@ pub fn main() -> anyhow::Result<()> { let expr = query_core::compiler::translate(graph)?; - println!("{expr}"); + println!("{}", expr.pretty_print(true, 80)?); Ok(()) } diff --git a/query-engine/query-engine/examples/schema.prisma b/query-engine/query-engine/examples/schema.prisma index ab9cd218da49..ff51ae3ddfe0 100644 --- a/query-engine/query-engine/examples/schema.prisma +++ b/query-engine/query-engine/examples/schema.prisma @@ -8,11 +8,17 @@ datasource db { } model User { - id String @id @default(cuid()) - email String @unique - name String? - posts Post[] - val Int? + id String @id @default(cuid()) + email String @unique + name String? + posts Post[] + val Int? + profile Profile? +} + +model Profile { + userId String @id + user User @relation(fields: [userId], references: [id]) } model Post { From 8e7a29c824eb25b462782b40e8634655605c48ce Mon Sep 17 00:00:00 2001 From: Alexey Orlenko Date: Sat, 11 Jan 2025 20:51:40 +0100 Subject: [PATCH 41/65] refactor formatting --- query-engine/core/src/compiler/expression.rs | 178 +------------ .../core/src/compiler/expression/format.rs | 244 ++++++++++++++++++ 2 files changed, 248 insertions(+), 174 deletions(-) create mode 100644 query-engine/core/src/compiler/expression/format.rs diff --git a/query-engine/core/src/compiler/expression.rs b/query-engine/core/src/compiler/expression.rs index 0cb342be9881..4bca1d7b796a 100644 --- a/query-engine/core/src/compiler/expression.rs +++ b/query-engine/core/src/compiler/expression.rs @@ -1,10 +1,8 @@ -use pretty::{ - termcolor::{Color, ColorSpec}, - DocAllocator, DocBuilder, -}; use query_structure::PrismaValue; use serde::Serialize; +mod format; + #[derive(Debug, Serialize)] pub struct Binding { pub name: String, @@ -102,7 +100,8 @@ pub enum PrettyPrintError { impl Expression { pub fn pretty_print(&self, color: bool, width: usize) -> Result { let arena = pretty::Arena::new(); - let doc = self.to_doc(&arena); + let builder = format::PrettyPrinter::new(&arena); + let doc = builder.expression(self); let mut buf = if color { pretty::termcolor::Buffer::ansi() @@ -113,175 +112,6 @@ impl Expression { doc.render_colored(width, &mut buf)?; Ok(String::from_utf8(buf.into_inner())?) } - - fn to_doc<'a, D>(&'a self, d: &'a D) -> DocBuilder<'a, D, ColorSpec> - where - D: DocAllocator<'a, ColorSpec>, - D::Doc: Clone, - { - let color_kw = || ColorSpec::new().set_fg(Some(Color::Blue)).clone(); - let color_fn = || ColorSpec::new().set_underline(true).clone(); - let color_var = || ColorSpec::new().set_bold(true).clone(); - let color_lit = || ColorSpec::new().set_italic(true).set_fg(Some(Color::Green)).clone(); - - let format_query = |tag: &'static str, db_query: &'a DbQuery| { - d.text(tag) - .annotate(color_kw()) - .append(d.softline()) - .append( - d.reflow(&db_query.query) - .align() - .enclose("«", "»") - .annotate(color_lit()), - ) - .append(d.line()) - .append(d.text("with params").annotate(color_kw())) - .append(d.space()) - .append( - d.intersperse( - db_query.params.iter().map(|param| match param { - PrismaValue::Placeholder { name, r#type } => d.text("var").annotate(color_kw()).append( - d.text(name) - .annotate(color_var()) - .append(d.space()) - .append(d.text("as").annotate(color_kw())) - .append(d.space()) - .append(match r#type { - query_structure::PlaceholderType::Array(inner) => format!("{inner:?}[]"), - _ => format!("{type:?}"), - }) - .parens(), - ), - _ => d - .text("const") - .annotate(color_kw()) - .append(d.text(format!("{param:?}")).annotate(color_lit()).parens()), - }), - d.text(",").append(d.softline()), - ) - .align() - .brackets(), - ) - .align() - }; - - let format_function = |name: &'static str, args: &'a [Expression]| { - d.text(name).annotate(color_fn()).append(d.space()).append( - d.intersperse(args.iter().map(|expr| expr.to_doc(d)), d.space()) - .parens(), - ) - }; - - let format_unary_function = |name: &'static str, arg: &'a Expression| { - d.text(name) - .annotate(color_fn()) - .append(d.space()) - .append(arg.to_doc(d).parens()) - }; - - match self { - Expression::Seq(vec) => d.intersperse(vec.iter().map(|expr| expr.to_doc(d)), d.text(";").append(d.line())), - - Expression::Get { name } => d - .text("get") - .annotate(color_kw()) - .append(d.space()) - .append(d.text(name).annotate(color_var())), - - Expression::Let { bindings, expr } => d - .text("let") - .annotate(color_kw()) - .append(d.softline()) - .append( - d.intersperse( - bindings.iter().map(|binding| { - d.text(&binding.name) - .annotate(color_var()) - .append(d.space()) - .append("=") - .append(d.softline()) - .append(binding.expr.to_doc(d)) - }), - d.text(";").append(d.line()), - ) - .align(), - ) - .append(d.line()) - .append(d.text("in").annotate(color_kw())) - .append(d.softline()) - .append(expr.to_doc(d).align()), - - Expression::GetFirstNonEmpty { names } => d - .text("getFirstNonEmpty") - .annotate(color_fn()) - .append(d.intersperse(names.iter().map(|name| d.text(name).annotate(color_var())), d.space())), - - Expression::Query(db_query) => format_query("query", db_query), - - Expression::Execute(db_query) => format_query("execute", db_query), - - Expression::Reverse(expression) => format_unary_function("reverse", expression), - - Expression::Sum(vec) => format_function("sum", vec), - - Expression::Concat(vec) => format_function("concat", vec), - - Expression::Unique(expression) => format_unary_function("unique", expression), - - Expression::Required(expression) => format_unary_function("required", expression), - - Expression::Join { parent, children } => d - .text("join") - .annotate(color_kw()) - .append(d.space()) - .append(parent.to_doc(d).parens()) - .append(d.line()) - .append(d.text("with").annotate(color_kw())) - .append(d.space()) - .append( - d.intersperse( - children.iter().map(|join| { - join.child - .to_doc(d) - .parens() - .append(d.space()) - .append(d.text("on").annotate(color_kw())) - .append(d.space()) - .append(d.intersperse( - join.on.iter().map(|(l, r)| { - d.text("left") - .annotate(color_kw()) - .append(".") - .append(d.text(l).annotate(color_var())) - .parens() - .append(d.space()) - .append("=") - .append(d.space()) - .append( - d.text("right") - .annotate(color_kw()) - .append(".") - .append(d.text(r).annotate(color_var())) - .parens(), - ) - }), - d.text(", "), - )) - }), - d.text(",").append(d.line()), - ) - .align(), - ), - - Expression::MapField { field, records } => d - .text("mapField") - .annotate(color_fn()) - .append(d.space()) - .append(d.text(field).double_quotes().annotate(color_lit())) - .append(d.space()) - .append(records.to_doc(d).parens()), - } - } } impl std::fmt::Display for Expression { diff --git a/query-engine/core/src/compiler/expression/format.rs b/query-engine/core/src/compiler/expression/format.rs new file mode 100644 index 000000000000..dd1df9643c80 --- /dev/null +++ b/query-engine/core/src/compiler/expression/format.rs @@ -0,0 +1,244 @@ +use pretty::{ + termcolor::{Color, ColorSpec}, + DocAllocator, DocBuilder, +}; +use query_structure::PrismaValue; + +use super::{Binding, DbQuery, Expression, JoinExpression}; + +fn color_kw() -> ColorSpec { + ColorSpec::new().set_fg(Some(Color::Blue)).clone() +} + +fn color_fn() -> ColorSpec { + ColorSpec::new().set_underline(true).clone() +} + +fn color_var() -> ColorSpec { + ColorSpec::new().set_bold(true).clone() +} + +fn color_lit() -> ColorSpec { + ColorSpec::new().set_italic(true).set_fg(Some(Color::Green)).clone() +} + +pub(super) struct PrettyPrinter<'a, D> { + allocator: &'a D, +} + +impl<'a, D> PrettyPrinter<'a, D> +where + D: DocAllocator<'a, ColorSpec>, + D::Doc: Clone, +{ + pub fn new(allocator: &'a D) -> Self { + Self { allocator } + } + + pub fn expression(&'a self, expression: &'a Expression) -> DocBuilder<'a, PrettyPrinter<'a, D>, ColorSpec> { + match expression { + Expression::Seq(vec) => self.seq(&vec), + Expression::Get { name } => self.get(&name), + Expression::Let { bindings, expr } => self.r#let(&bindings, &expr), + Expression::GetFirstNonEmpty { names } => self.get_first_non_empty(&names), + Expression::Query(db_query) => self.query("query", &db_query), + Expression::Execute(db_query) => self.query("execute", &db_query), + Expression::Reverse(expression) => self.unary_function("reverse", &expression), + Expression::Sum(vec) => self.function("sum", &vec), + Expression::Concat(vec) => self.function("concat", &vec), + Expression::Unique(expression) => self.unary_function("unique", expression), + Expression::Required(expression) => self.unary_function("required", expression), + Expression::Join { parent, children } => self.join(&parent, &children), + Expression::MapField { field, records } => self.map_field(&field, &records), + } + } + + fn keyword(&'a self, keyword: &'static str) -> DocBuilder<'a, PrettyPrinter<'a, D>, ColorSpec> { + self.text(keyword).annotate(color_kw()) + } + + fn var_name(&'a self, name: &'a str) -> DocBuilder<'a, PrettyPrinter<'a, D>, ColorSpec> { + self.text(name).annotate(color_var()) + } + + fn query(&'a self, tag: &'static str, db_query: &'a DbQuery) -> DocBuilder<'a, PrettyPrinter<'a, D>, ColorSpec> { + self.keyword(tag) + .append(self.softline()) + .append( + self.reflow(&db_query.query) + .align() + .enclose("«", "»") + .annotate(color_lit()), + ) + .append(self.line()) + .append(self.keyword("with params")) + .append(self.space()) + .append(self.list(&db_query.params)) + .align() + } + + fn list(&'a self, values: &'a [PrismaValue]) -> DocBuilder<'a, PrettyPrinter<'a, D>, ColorSpec> { + self.intersperse( + values.iter().map(|value| self.value(value)), + self.text(",").append(self.softline()), + ) + .align() + .brackets() + } + + fn value(&'a self, value: &'a PrismaValue) -> DocBuilder<'a, PrettyPrinter<'a, D>, ColorSpec> { + match value { + PrismaValue::Placeholder { name, r#type } => self.keyword("var").append( + self.var_name(name) + .append(self.space()) + .append(self.keyword("as")) + .append(self.space()) + .append(match r#type { + query_structure::PlaceholderType::Array(inner) => format!("{inner:?}[]"), + _ => format!("{type:?}"), + }) + .parens(), + ), + PrismaValue::List(values) => self.list(&values), + _ => self + .keyword("const") + .append(self.text(format!("{value:?}")).annotate(color_lit()).parens()), + } + } + + fn function( + &'a self, + name: &'static str, + args: &'a [Expression], + ) -> DocBuilder<'a, PrettyPrinter<'a, D>, ColorSpec> { + self.text(name).annotate(color_fn()).append(self.space()).append( + self.intersperse(args.iter().map(|expr| self.expression(expr)), self.space()) + .parens(), + ) + } + + fn unary_function( + &'a self, + name: &'static str, + arg: &'a Expression, + ) -> DocBuilder<'a, PrettyPrinter<'a, D>, ColorSpec> { + self.text(name) + .annotate(color_fn()) + .append(self.space()) + .append(self.expression(arg).parens()) + } + + fn seq(&'a self, vec: &'a [Expression]) -> DocBuilder<'a, PrettyPrinter<'a, D>, ColorSpec> { + self.intersperse( + vec.iter().map(|expr| self.expression(expr)), + self.text(";").append(self.line()), + ) + } + + fn get(&'a self, name: &'a str) -> DocBuilder<'a, PrettyPrinter<'a, D>, ColorSpec> { + self.keyword("get").append(self.space()).append(self.var_name(name)) + } + + fn r#let( + &'a self, + bindings: &'a [Binding], + expr: &'a Expression, + ) -> DocBuilder<'a, PrettyPrinter<'a, D>, ColorSpec> { + self.keyword("let") + .append(self.softline()) + .append( + self.intersperse( + bindings.iter().map(|binding| { + self.var_name(&binding.name) + .append(self.space()) + .append("=") + .append(self.softline()) + .append(self.expression(&binding.expr)) + }), + self.text(";").append(self.line()), + ) + .align(), + ) + .append(self.line()) + .append(self.keyword("in")) + .append(self.softline()) + .append(self.expression(expr).align()) + } + + fn get_first_non_empty(&'a self, names: &'a [String]) -> DocBuilder<'a, PrettyPrinter<'a, D>, ColorSpec> { + self.text("getFirstNonEmpty") + .annotate(color_fn()) + .append(self.intersperse(names.iter().map(|name| self.var_name(name)), self.space())) + } + + fn join( + &'a self, + parent: &'a Expression, + children: &'a [JoinExpression], + ) -> DocBuilder<'a, PrettyPrinter<'a, D>, ColorSpec> { + self.keyword("join") + .append(self.space()) + .append(self.expression(parent).parens()) + .append(self.line()) + .append(self.keyword("with")) + .append(self.space()) + .append( + self.intersperse( + children.iter().map(|join| { + self.expression(&join.child) + .parens() + .append(self.space()) + .append(self.keyword("on")) + .append(self.space()) + .append(self.intersperse( + join.on.iter().map(|(l, r)| { + self.keyword("left") + .append(".") + .append(self.text(l)) + .parens() + .append(self.space()) + .append("=") + .append(self.space()) + .append(self.keyword("right").append(".").append(self.text(r)).parens()) + }), + self.text(", "), + )) + }), + self.text(",").append(self.line()), + ) + .align(), + ) + } + + fn map_field(&'a self, field: &'a str, records: &'a Expression) -> DocBuilder<'a, PrettyPrinter<'a, D>, ColorSpec> { + self.text("mapField") + .annotate(color_fn()) + .append(self.space()) + .append(self.text(field).double_quotes().annotate(color_lit())) + .append(self.space()) + .append(self.expression(records).parens()) + } +} + +impl<'a, D, A> DocAllocator<'a, A> for PrettyPrinter<'a, D> +where + D: DocAllocator<'a, A>, + A: 'a, +{ + type Doc = D::Doc; + + fn alloc(&'a self, doc: pretty::Doc<'a, Self::Doc, A>) -> Self::Doc { + self.allocator.alloc(doc) + } + + fn alloc_column_fn( + &'a self, + f: impl Fn(usize) -> Self::Doc + 'a, + ) -> >::ColumnFn { + self.allocator.alloc_column_fn(f) + } + + fn alloc_width_fn(&'a self, f: impl Fn(isize) -> Self::Doc + 'a) -> >::WidthFn { + self.allocator.alloc_width_fn(f) + } +} From 740e9962acad75c84ec38397ed5d05d3c2e9236f Mon Sep 17 00:00:00 2001 From: Alexey Orlenko Date: Sat, 11 Jan 2025 16:56:36 +0100 Subject: [PATCH 42/65] Add parent field name to join node --- query-engine/core/src/compiler/expression.rs | 2 + .../core/src/compiler/expression/format.rs | 52 ++++-- .../core/src/compiler/translate/query/read.rs | 164 ++++++++++-------- query-engine/core/src/query_ast/read.rs | 16 -- 4 files changed, 129 insertions(+), 105 deletions(-) diff --git a/query-engine/core/src/compiler/expression.rs b/query-engine/core/src/compiler/expression.rs index 4bca1d7b796a..65b5d22bcd05 100644 --- a/query-engine/core/src/compiler/expression.rs +++ b/query-engine/core/src/compiler/expression.rs @@ -34,9 +34,11 @@ impl DbQuery { } #[derive(Debug, Serialize)] +#[serde(rename_all = "camelCase")] pub struct JoinExpression { pub child: Expression, pub on: Vec<(String, String)>, + pub parent_field: String, } #[derive(Debug, Serialize)] diff --git a/query-engine/core/src/compiler/expression/format.rs b/query-engine/core/src/compiler/expression/format.rs index dd1df9643c80..2ea2bfe5d97f 100644 --- a/query-engine/core/src/compiler/expression/format.rs +++ b/query-engine/core/src/compiler/expression/format.rs @@ -22,6 +22,10 @@ fn color_lit() -> ColorSpec { ColorSpec::new().set_italic(true).set_fg(Some(Color::Green)).clone() } +fn color_field() -> ColorSpec { + ColorSpec::new().set_bold(true).set_fg(Some(Color::Yellow)).clone() +} + pub(super) struct PrettyPrinter<'a, D> { allocator: &'a D, } @@ -61,6 +65,19 @@ where self.text(name).annotate(color_var()) } + fn field_name(&'a self, name: &'a str) -> DocBuilder<'a, PrettyPrinter<'a, D>, ColorSpec> { + self.text(name).annotate(color_field()) + } + + fn tuple( + &'a self, + subtrees: impl IntoIterator, ColorSpec>>, + ) -> DocBuilder<'a, PrettyPrinter<'a, D>, ColorSpec> { + self.intersperse(subtrees, self.text(",").append(self.softline())) + .align() + .parens() + } + fn query(&'a self, tag: &'static str, db_query: &'a DbQuery) -> DocBuilder<'a, PrettyPrinter<'a, D>, ColorSpec> { self.keyword(tag) .append(self.softline()) @@ -185,24 +202,31 @@ where .append( self.intersperse( children.iter().map(|join| { + let (left_fields, right_fields): (Vec<_>, Vec<_>) = join + .on + .iter() + .map(|(l, r)| (self.field_name(l), self.field_name(r))) + .unzip(); self.expression(&join.child) .parens() .append(self.space()) .append(self.keyword("on")) .append(self.space()) - .append(self.intersperse( - join.on.iter().map(|(l, r)| { - self.keyword("left") - .append(".") - .append(self.text(l)) - .parens() - .append(self.space()) - .append("=") - .append(self.space()) - .append(self.keyword("right").append(".").append(self.text(r)).parens()) - }), - self.text(", "), - )) + .append( + self.keyword("left") + .append(".") + .append(self.tuple(left_fields)) + .append(self.space()) + .append("=") + .append(self.space()) + .append(self.keyword("right")) + .append(".") + .append(self.tuple(right_fields)), + ) + .append(self.space()) + .append(self.keyword("as")) + .append(self.space()) + .append(self.field_name(&join.parent_field)) }), self.text(",").append(self.line()), ) @@ -214,7 +238,7 @@ where self.text("mapField") .annotate(color_fn()) .append(self.space()) - .append(self.text(field).double_quotes().annotate(color_lit())) + .append(self.field_name(field)) .append(self.space()) .append(self.expression(records).parens()) } diff --git a/query-engine/core/src/compiler/translate/query/read.rs b/query-engine/core/src/compiler/translate/query/read.rs index 6a27080d7d07..3fddda418e95 100644 --- a/query-engine/core/src/compiler/translate/query/read.rs +++ b/query-engine/core/src/compiler/translate/query/read.rs @@ -20,11 +20,6 @@ use crate::{ use super::build_db_query; pub(crate) fn translate_read_query(query: ReadQuery, ctx: &Context<'_>) -> TranslateResult { - let all_linking_fields = query - .nested_related_records_queries() - .flat_map(|rrq| rrq.parent_field.linking_fields()) - .collect::>(); - Ok(match query { ReadQuery::RecordQuery(rq) => { let selected_fields = rq.selected_fields.without_relations().into_virtuals_last(); @@ -41,64 +36,12 @@ pub(crate) fn translate_read_query(query: ReadQuery, ctx: &Context<'_>) -> Trans .limit(1); let expr = Expression::Query(build_db_query(query)?); + let expr = Expression::Unique(Box::new(expr)); if rq.nested.is_empty() { - return Ok(expr); - } - - Expression::Let { - bindings: vec![Binding { - name: "@parent".into(), - expr, - }], - expr: Box::new(Expression::Let { - bindings: all_linking_fields - .into_iter() - .map(|sf| Binding { - name: format!("@parent.{}", sf.prisma_name().into_owned()), - expr: Expression::MapField { - field: sf.prisma_name().into_owned(), - records: Box::new(Expression::Get { name: "@parent".into() }), - }, - }) - .collect(), - expr: Box::new(Expression::Join { - parent: Box::new(Expression::Get { name: "@parent".into() }), - children: rq - .nested - .into_iter() - .filter_map(|nested| match nested { - ReadQuery::RelatedRecordsQuery(rrq) => Some(rrq), - _ => None, - }) - .map(|rrq| -> TranslateResult { - let parent_fields = rrq.parent_field.linking_fields(); - let child_fields = rrq.parent_field.related_field().linking_fields(); - - let join_expr = parent_fields - .scalars() - .zip(child_fields.scalars()) - .map(|(left, right)| (left.name().to_owned(), right.name().to_owned())) - .collect_vec(); - - // nested.add_filter(Filter::Scalar(ScalarFilter { - // mode: QueryMode::Default, - // condition: ScalarCondition::Equals(ConditionValue::value(PrismaValue::placeholder( - // "parent_id".into(), - // PlaceholderType::String, - // ))), - // projection: ScalarProjection::Compound(referenced_fields), - // })); - let child_query = translate_read_query(ReadQuery::RelatedRecordsQuery(rrq), ctx)?; - - Ok(JoinExpression { - child: child_query, - on: join_expr, - }) - }) - .try_collect()?, - }), - }), + expr + } else { + add_inmemory_join(expr, rq.nested, ctx)? } } @@ -119,10 +62,16 @@ pub(crate) fn translate_read_query(query: ReadQuery, ctx: &Context<'_>) -> Trans let expr = Expression::Query(build_db_query(query)?); - if needs_reversed_order { + let expr = if needs_reversed_order { Expression::Reverse(Box::new(expr)) } else { expr + }; + + if mrq.nested.is_empty() { + expr + } else { + add_inmemory_join(expr, mrq.nested, ctx)? } } @@ -138,6 +87,76 @@ pub(crate) fn translate_read_query(query: ReadQuery, ctx: &Context<'_>) -> Trans }) } +fn add_inmemory_join(parent: Expression, nested: Vec, ctx: &Context<'_>) -> TranslateResult { + let all_linking_fields = nested + .iter() + .flat_map(|nested| match nested { + ReadQuery::RelatedRecordsQuery(rrq) => rrq.parent_field.linking_fields(), + _ => unreachable!(), + }) + .collect::>(); + + let linking_fields_bindings = all_linking_fields + .into_iter() + .map(|sf| Binding { + name: format!("@parent${}", sf.prisma_name().into_owned()), + expr: Expression::MapField { + field: sf.prisma_name().into_owned(), + records: Box::new(Expression::Get { name: "@parent".into() }), + }, + }) + .collect(); + + let join_expressions = nested + .into_iter() + .filter_map(|nested| match nested { + ReadQuery::RelatedRecordsQuery(rrq) => Some(rrq), + _ => None, + }) + .map(|rrq| -> TranslateResult { + let parent_field_name = rrq.parent_field.name().to_owned(); + let parent_fields = rrq.parent_field.linking_fields(); + let child_fields = rrq.parent_field.related_field().linking_fields(); + + let join_expr = parent_fields + .scalars() + .zip(child_fields.scalars()) + .map(|(left, right)| (left.name().to_owned(), right.name().to_owned())) + .collect_vec(); + + // nested.add_filter(Filter::Scalar(ScalarFilter { + // mode: QueryMode::Default, + // condition: ScalarCondition::Equals(ConditionValue::value(PrismaValue::placeholder( + // "parent_id".into(), + // PlaceholderType::String, + // ))), + // projection: ScalarProjection::Compound(referenced_fields), + // })); + let child_query = translate_read_query(ReadQuery::RelatedRecordsQuery(rrq), ctx)?; + + Ok(JoinExpression { + child: child_query, + on: join_expr, + parent_field: parent_field_name, + }) + }) + .try_collect()?; + + Ok(Expression::Let { + bindings: vec![Binding { + name: "@parent".into(), + expr: parent, + }], + expr: Box::new(Expression::Let { + bindings: linking_fields_bindings, + expr: Box::new(Expression::Join { + parent: Box::new(Expression::Get { name: "@parent".into() }), + children: join_expressions, + }), + }), + }) +} + fn build_read_m2m_query(_query: RelatedRecordsQuery, _ctx: &Context<'_>) -> TranslateResult { todo!() } @@ -159,20 +178,15 @@ fn build_read_one2m_query(rrq: RelatedRecordsQuery, ctx: &Context<'_>) -> Transl let expr = Expression::Query(build_db_query(query)?); - if needs_reversed_order { - Ok(Expression::Reverse(Box::new(expr))) + let expr = if needs_reversed_order { + Expression::Reverse(Box::new(expr)) } else { + expr + }; + + if rrq.nested.is_empty() { Ok(expr) + } else { + add_inmemory_join(expr, rrq.nested, ctx) } } - -fn collect_referenced_fields(nested_queries: &[ReadQuery]) -> HashSet { - nested_queries - .iter() - .filter_map(|rq| match rq { - ReadQuery::RelatedRecordsQuery(rrq) => Some(rrq), - _ => None, - }) - .flat_map(|rrq| rrq.parent_field.referenced_fields()) - .collect() -} diff --git a/query-engine/core/src/query_ast/read.rs b/query-engine/core/src/query_ast/read.rs index 6d25f8f435ff..e3eca8c88ee5 100644 --- a/query-engine/core/src/query_ast/read.rs +++ b/query-engine/core/src/query_ast/read.rs @@ -64,22 +64,6 @@ impl ReadQuery { ReadQuery::AggregateRecordsQuery(_) => false, } } - - fn nested(&self) -> &[ReadQuery] { - match self { - ReadQuery::RecordQuery(x) => &x.nested, - ReadQuery::ManyRecordsQuery(x) => &x.nested, - ReadQuery::RelatedRecordsQuery(x) => &x.nested, - ReadQuery::AggregateRecordsQuery(_) => &[], - } - } - - pub fn nested_related_records_queries(&self) -> impl Iterator + '_ { - self.nested().iter().filter_map(|q| match q { - ReadQuery::RelatedRecordsQuery(rrq) => Some(rrq), - _ => None, - }) - } } impl FilteredQuery for ReadQuery { From 96f7233d3de618d23d66f33eed4a23d6a5411166 Mon Sep 17 00:00:00 2001 From: Alexey Orlenko Date: Sat, 11 Jan 2025 21:55:46 +0100 Subject: [PATCH 43/65] Drop "with" in "with params" --- query-engine/core/src/compiler/expression/format.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/query-engine/core/src/compiler/expression/format.rs b/query-engine/core/src/compiler/expression/format.rs index 2ea2bfe5d97f..8c61747356df 100644 --- a/query-engine/core/src/compiler/expression/format.rs +++ b/query-engine/core/src/compiler/expression/format.rs @@ -88,7 +88,7 @@ where .annotate(color_lit()), ) .append(self.line()) - .append(self.keyword("with params")) + .append(self.keyword("params")) .append(self.space()) .append(self.list(&db_query.params)) .align() From 10a05b7f4ad45c47083b838cc0377dc1e4d3b145 Mon Sep 17 00:00:00 2001 From: Alexey Orlenko Date: Sat, 11 Jan 2025 23:12:52 +0100 Subject: [PATCH 44/65] Add filters --- query-engine/core/src/compiler/expression.rs | 37 +++++++++++ .../core/src/compiler/translate/query/read.rs | 61 +++++++++++++++---- query-engine/core/src/query_ast/read.rs | 10 +++ query-engine/query-structure/src/field/mod.rs | 18 ++++++ 4 files changed, 115 insertions(+), 11 deletions(-) diff --git a/query-engine/core/src/compiler/expression.rs b/query-engine/core/src/compiler/expression.rs index 65b5d22bcd05..1a3a3606bb68 100644 --- a/query-engine/core/src/compiler/expression.rs +++ b/query-engine/core/src/compiler/expression.rs @@ -91,6 +91,20 @@ pub enum Expression { MapField { field: String, records: Box }, } +#[derive(Debug, Clone)] +pub enum ExpressionType { + Scalar, + Record, + List(Box), + Dynamic, +} + +impl ExpressionType { + pub fn is_list(&self) -> bool { + matches!(self, ExpressionType::List(_) | ExpressionType::Dynamic) + } +} + #[derive(thiserror::Error, Debug)] pub enum PrettyPrintError { #[error("{0}")] @@ -114,6 +128,29 @@ impl Expression { doc.render_colored(width, &mut buf)?; Ok(String::from_utf8(buf.into_inner())?) } + + pub fn r#type(&self) -> ExpressionType { + match self { + Expression::Seq(vec) => vec.iter().last().map_or(ExpressionType::Scalar, Expression::r#type), + Expression::Get { .. } => ExpressionType::Dynamic, + Expression::Let { expr, .. } => expr.r#type(), + Expression::GetFirstNonEmpty { .. } => ExpressionType::Dynamic, + Expression::Query(_) => ExpressionType::List(Box::new(ExpressionType::Record)), + Expression::Execute(_) => ExpressionType::Scalar, + Expression::Reverse(expression) => expression.r#type(), + Expression::Sum(_) => ExpressionType::Scalar, + Expression::Concat(vec) => ExpressionType::List(Box::new( + vec.iter().last().map_or(ExpressionType::Scalar, Expression::r#type), + )), + Expression::Unique(expression) => match expression.r#type() { + ExpressionType::List(inner) => inner.as_ref().clone(), + _ => expression.r#type(), + }, + Expression::Required(expression) => expression.r#type(), + Expression::Join { parent, .. } => parent.r#type(), + Expression::MapField { records, .. } => records.r#type(), + } + } } impl std::fmt::Display for Expression { diff --git a/query-engine/core/src/compiler/translate/query/read.rs b/query-engine/core/src/compiler/translate/query/read.rs index 3fddda418e95..a2f51b5aac81 100644 --- a/query-engine/core/src/compiler/translate/query/read.rs +++ b/query-engine/core/src/compiler/translate/query/read.rs @@ -2,8 +2,8 @@ use std::collections::HashSet; use itertools::Itertools; use query_structure::{ - ConditionValue, Filter, ModelProjection, PlaceholderType, PrismaValue, QueryMode, RelationField, ScalarCondition, - ScalarField, ScalarFilter, ScalarProjection, + ConditionListValue, ConditionValue, Filter, ModelProjection, PlaceholderType, PrismaValue, QueryMode, + RelationField, ScalarCondition, ScalarField, ScalarFilter, ScalarProjection, SelectedField, SelectionResult, }; use sql_query_connector::{ context::Context, model_extensions::AsColumns, query_arguments_ext::QueryArgumentsExt, query_builder, @@ -113,7 +113,7 @@ fn add_inmemory_join(parent: Expression, nested: Vec, ctx: &Context<' ReadQuery::RelatedRecordsQuery(rrq) => Some(rrq), _ => None, }) - .map(|rrq| -> TranslateResult { + .map(|mut rrq| -> TranslateResult { let parent_field_name = rrq.parent_field.name().to_owned(); let parent_fields = rrq.parent_field.linking_fields(); let child_fields = rrq.parent_field.related_field().linking_fields(); @@ -124,14 +124,53 @@ fn add_inmemory_join(parent: Expression, nested: Vec, ctx: &Context<' .map(|(left, right)| (left.name().to_owned(), right.name().to_owned())) .collect_vec(); - // nested.add_filter(Filter::Scalar(ScalarFilter { - // mode: QueryMode::Default, - // condition: ScalarCondition::Equals(ConditionValue::value(PrismaValue::placeholder( - // "parent_id".into(), - // PlaceholderType::String, - // ))), - // projection: ScalarProjection::Compound(referenced_fields), - // })); + // let linking_placeholders = parent_fields + // .scalars() + // .map(|sf| { + // ( + // sf.clone(), + // PrismaValue::placeholder( + // format!("@parent${}", sf.name()), + // sf.type_identifier().to_placeholder_type(), + // ), + // ) + // }) + // .collect::>(); + // + // // If constant values were already provided for some of the fields, merge the + // // placeholders for the missing fields. Otherwise, assign new `parent_results`. + // if let Some(parent_results) = &mut rrq.parent_results { + // for result in parent_results { + // for (sf, value) in &linking_placeholders { + // let field = SelectedField::from(sf.clone()); + // if result.get(&field).is_none() { + // result.add((field, value.clone())); + // } + // } + // } + // } else { + // rrq.parent_results = Some(vec![SelectionResult::new(linking_placeholders)]); + // } + + for (parent_field, child_field) in parent_fields.scalars().zip(child_fields.scalars()) { + let placeholder = PrismaValue::placeholder( + format!("@parent${}", parent_field.name()), + parent_field.type_identifier().to_placeholder_type(), + ); + + let condition = if parent.r#type().is_list() { + ScalarCondition::In(ConditionListValue::list(vec![placeholder])) + } else { + ScalarCondition::Equals(ConditionValue::value(placeholder)) + }; + + rrq.add_filter(Filter::Scalar(ScalarFilter { + condition, + projection: ScalarProjection::Single(child_field.clone()), + mode: QueryMode::Default, + })); + } + let child_query = translate_read_query(ReadQuery::RelatedRecordsQuery(rrq), ctx)?; Ok(JoinExpression { diff --git a/query-engine/core/src/query_ast/read.rs b/query-engine/core/src/query_ast/read.rs index e3eca8c88ee5..2326183b7b54 100644 --- a/query-engine/core/src/query_ast/read.rs +++ b/query-engine/core/src/query_ast/read.rs @@ -250,3 +250,13 @@ impl FilteredQuery for ManyRecordsQuery { self.args.filter = Some(filter) } } + +impl FilteredQuery for RelatedRecordsQuery { + fn get_filter(&mut self) -> Option<&mut Filter> { + self.args.filter.as_mut() + } + + fn set_filter(&mut self, filter: Filter) { + self.args.filter = Some(filter) + } +} diff --git a/query-engine/query-structure/src/field/mod.rs b/query-engine/query-structure/src/field/mod.rs index d8faf404e662..93b9ccd98d80 100644 --- a/query-engine/query-structure/src/field/mod.rs +++ b/query-engine/query-structure/src/field/mod.rs @@ -3,6 +3,7 @@ mod relation; mod scalar; pub use composite::*; +use prisma_value::PlaceholderType; pub use relation::*; pub use scalar::*; @@ -179,6 +180,23 @@ impl TypeIdentifier { } } + pub fn to_placeholder_type(&self) -> PlaceholderType { + match self { + TypeIdentifier::String => PlaceholderType::String, + TypeIdentifier::Int => PlaceholderType::Int, + TypeIdentifier::BigInt => PlaceholderType::BigInt, + TypeIdentifier::Float => PlaceholderType::Float, + TypeIdentifier::Decimal => PlaceholderType::Decimal, + TypeIdentifier::Boolean => PlaceholderType::Boolean, + TypeIdentifier::Enum(_) => PlaceholderType::String, + TypeIdentifier::UUID => PlaceholderType::String, + TypeIdentifier::Json => PlaceholderType::Object, + TypeIdentifier::DateTime => PlaceholderType::Date, + TypeIdentifier::Bytes => PlaceholderType::Bytes, + TypeIdentifier::Unsupported => PlaceholderType::Any, + } + } + /// Returns `true` if the type identifier is [`Enum`]. pub fn is_enum(&self) -> bool { matches!(self, Self::Enum(..)) From 6836d42cc929a7da6300b319df45081f09907160 Mon Sep 17 00:00:00 2001 From: jacek-prisma Date: Mon, 13 Jan 2025 13:51:37 +0000 Subject: [PATCH 45/65] fix: fix createOne (#5121) --- .../schema.prisma | 26 +++++++++---------- .../src/compiler/translate/query/write.rs | 2 +- 2 files changed, 14 insertions(+), 14 deletions(-) diff --git a/prisma-fmt/tests/code_actions/scenarios/create_missing_block_composite_type_crlf/schema.prisma b/prisma-fmt/tests/code_actions/scenarios/create_missing_block_composite_type_crlf/schema.prisma index 4ec44dc9b33f..35fb65826c70 100644 --- a/prisma-fmt/tests/code_actions/scenarios/create_missing_block_composite_type_crlf/schema.prisma +++ b/prisma-fmt/tests/code_actions/scenarios/create_missing_block_composite_type_crlf/schema.prisma @@ -1,13 +1,13 @@ -generator client { - provider = "prisma-client-js" -} - -datasource db { - provider = "mongodb" - url = env("DATABASE_URL") -} - -type Kattbjorn { - name String - friend Animal -} +generator client { + provider = "prisma-client-js" +} + +datasource db { + provider = "mongodb" + url = env("DATABASE_URL") +} + +type Kattbjorn { + name String + friend Animal +} diff --git a/query-engine/core/src/compiler/translate/query/write.rs b/query-engine/core/src/compiler/translate/query/write.rs index a3a39f2372da..b3ae28a7b76e 100644 --- a/query-engine/core/src/compiler/translate/query/write.rs +++ b/query-engine/core/src/compiler/translate/query/write.rs @@ -23,7 +23,7 @@ pub(crate) fn translate_write_query(query: WriteQuery, ctx: &Context<'_>) -> Tra // TODO: we probably need some additional node type or extra info in the WriteQuery node // to help the client executor figure out the returned ID in the case when it's inferred // from the query arguments. - Expression::Execute(build_db_query(query)?) + Expression::Query(build_db_query(query)?) } WriteQuery::CreateManyRecords(cmr) => { From 9ef2ff8bb6338a62b7ff7d0367ce21793a958c54 Mon Sep 17 00:00:00 2001 From: Alexey Orlenko Date: Tue, 14 Jan 2025 14:02:50 +0100 Subject: [PATCH 46/65] Mark to-one nested reads as unique --- query-engine/core/src/compiler/translate/query/read.rs | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/query-engine/core/src/compiler/translate/query/read.rs b/query-engine/core/src/compiler/translate/query/read.rs index a2f51b5aac81..9de5840a11ab 100644 --- a/query-engine/core/src/compiler/translate/query/read.rs +++ b/query-engine/core/src/compiler/translate/query/read.rs @@ -217,6 +217,12 @@ fn build_read_one2m_query(rrq: RelatedRecordsQuery, ctx: &Context<'_>) -> Transl let expr = Expression::Query(build_db_query(query)?); + let expr = if !rrq.parent_field.arity().is_list() { + Expression::Unique(Box::new(expr)) + } else { + expr + }; + let expr = if needs_reversed_order { Expression::Reverse(Box::new(expr)) } else { From 159c4a09672d04cc89469da6882e2b58a66e9e74 Mon Sep 17 00:00:00 2001 From: Alexey Orlenko Date: Tue, 14 Jan 2025 14:20:41 +0100 Subject: [PATCH 47/65] Add `LIMIT 1` for to-one queries --- .../core/src/compiler/translate/query/read.rs | 21 +++++++++---------- 1 file changed, 10 insertions(+), 11 deletions(-) diff --git a/query-engine/core/src/compiler/translate/query/read.rs b/query-engine/core/src/compiler/translate/query/read.rs index 9de5840a11ab..744afb569494 100644 --- a/query-engine/core/src/compiler/translate/query/read.rs +++ b/query-engine/core/src/compiler/translate/query/read.rs @@ -203,6 +203,7 @@ fn build_read_m2m_query(_query: RelatedRecordsQuery, _ctx: &Context<'_>) -> Tran fn build_read_one2m_query(rrq: RelatedRecordsQuery, ctx: &Context<'_>) -> TranslateResult { let selected_fields = rrq.selected_fields.without_relations().into_virtuals_last(); let needs_reversed_order = rrq.args.needs_reversed_order(); + let to_one_relation = !rrq.parent_field.arity().is_list(); // TODO: we ignore chunking for now let query = query_builder::read::get_records( @@ -215,19 +216,17 @@ fn build_read_one2m_query(rrq: RelatedRecordsQuery, ctx: &Context<'_>) -> Transl ctx, ); - let expr = Expression::Query(build_db_query(query)?); + let query = if to_one_relation { query.limit(1) } else { query }; - let expr = if !rrq.parent_field.arity().is_list() { - Expression::Unique(Box::new(expr)) - } else { - expr - }; + let mut expr = Expression::Query(build_db_query(query)?); - let expr = if needs_reversed_order { - Expression::Reverse(Box::new(expr)) - } else { - expr - }; + if to_one_relation { + expr = Expression::Unique(Box::new(expr)); + } + + if needs_reversed_order { + expr = Expression::Reverse(Box::new(expr)); + } if rrq.nested.is_empty() { Ok(expr) From 0e6a6944f1c84eb94e951a14f78048e4751228af Mon Sep 17 00:00:00 2001 From: Alexey Orlenko Date: Tue, 14 Jan 2025 15:47:09 +0100 Subject: [PATCH 48/65] Implement templates for IN --- quaint/src/ast.rs | 2 + quaint/src/ast/decorated.rs | 46 +++++++++++++++++++ quaint/src/ast/expression.rs | 2 + quaint/src/visitor.rs | 17 +++++++ .../mongodb-query-connector/src/filter.rs | 4 ++ .../sql-query-connector/src/filter/visitor.rs | 36 +++++++++++++++ .../core/src/compiler/translate/query/read.rs | 2 +- .../query-engine/examples/compiler.rs | 2 +- .../src/filter/scalar/condition/mod.rs | 6 +++ 9 files changed, 115 insertions(+), 2 deletions(-) create mode 100644 quaint/src/ast/decorated.rs diff --git a/quaint/src/ast.rs b/quaint/src/ast.rs index 50aa38cc4d1f..4ad9e9a07851 100644 --- a/quaint/src/ast.rs +++ b/quaint/src/ast.rs @@ -10,6 +10,7 @@ mod compare; mod conditions; mod conjunctive; mod cte; +mod decorated; mod delete; mod enums; mod expression; @@ -35,6 +36,7 @@ pub use compare::{Comparable, Compare, JsonCompare, JsonType}; pub use conditions::ConditionTree; pub use conjunctive::Conjunctive; pub use cte::{CommonTableExpression, IntoCommonTableExpression}; +pub use decorated::{Decoratable, Decorated}; pub use delete::Delete; pub use enums::{EnumName, EnumVariant}; pub use expression::*; diff --git a/quaint/src/ast/decorated.rs b/quaint/src/ast/decorated.rs new file mode 100644 index 000000000000..d88efec05f9e --- /dev/null +++ b/quaint/src/ast/decorated.rs @@ -0,0 +1,46 @@ +use std::borrow::Cow; + +use super::{Expression, ExpressionKind}; + +#[derive(Debug, Clone, PartialEq)] +pub struct Decorated<'a> { + pub(crate) expr: Box>, + pub(crate) prefix: Option>, + pub(crate) suffix: Option>, +} + +impl<'a> Decorated<'a> { + pub fn new(expr: Expression<'a>, prefix: Option, suffix: Option) -> Self + where + L: Into>, + R: Into>, + { + Decorated { + expr: Box::new(expr), + prefix: prefix.map(<_>::into), + suffix: suffix.map(<_>::into), + } + } +} + +expression!(Decorated, Decorated); + +pub trait Decoratable<'a> { + fn decorate(self, left: Option, right: Option) -> Decorated<'a> + where + L: Into>, + R: Into>; +} + +impl<'a, T> Decoratable<'a> for T +where + T: Into>, +{ + fn decorate(self, left: Option, right: Option) -> Decorated<'a> + where + L: Into>, + R: Into>, + { + Decorated::new(self.into(), left, right) + } +} diff --git a/quaint/src/ast/expression.rs b/quaint/src/ast/expression.rs index 7882ed0196df..c3515bfe781a 100644 --- a/quaint/src/ast/expression.rs +++ b/quaint/src/ast/expression.rs @@ -219,6 +219,8 @@ pub enum ExpressionKind<'a> { Value(Box>), /// DEFAULT keyword, e.g. for `INSERT INTO ... VALUES (..., DEFAULT, ...)` Default, + /// An expression wrapped with comments on each side + Decorated(Decorated<'a>), } impl ExpressionKind<'_> { diff --git a/quaint/src/visitor.rs b/quaint/src/visitor.rs index badd9a86a2fc..00f6d9a03fdf 100644 --- a/quaint/src/visitor.rs +++ b/quaint/src/visitor.rs @@ -617,6 +617,7 @@ pub trait Visitor<'a> { None => self.write("*")?, }, ExpressionKind::Default => self.write("DEFAULT")?, + ExpressionKind::Decorated(decorated) => self.visit_decorated(decorated)?, } if let Some(alias) = value.alias { @@ -1207,4 +1208,20 @@ pub trait Visitor<'a> { fn visit_comment(&mut self, comment: Cow<'a, str>) -> Result { self.surround_with("/* ", " */", |ref mut s| s.write(comment)) } + + fn visit_decorated(&mut self, decorated: Decorated<'a>) -> Result { + let Decorated { prefix, suffix, expr } = decorated; + + if let Some(prefix) = prefix { + self.visit_comment(prefix)?; + } + + self.visit_expression(*expr)?; + + if let Some(suffix) = suffix { + self.visit_comment(suffix)?; + } + + Ok(()) + } } diff --git a/query-engine/connectors/mongodb-query-connector/src/filter.rs b/query-engine/connectors/mongodb-query-connector/src/filter.rs index b29fe6f97344..4dfa233acc62 100644 --- a/query-engine/connectors/mongodb-query-connector/src/filter.rs +++ b/query-engine/connectors/mongodb-query-connector/src/filter.rs @@ -246,6 +246,8 @@ impl MongoFilterVisitor { doc! { "$not": { "$in": [&field_name, coerce_as_array(self.prefixed_field_ref(&field_ref)?)] } } } }, + ScalarCondition::InTemplate(_) => unimplemented!("query compiler not supported with mongodb yet"), + ScalarCondition::NotInTemplate(_) => unimplemented!("query compiler not supported with mongodb yet"), ScalarCondition::JsonCompare(jc) => match *jc.condition { ScalarCondition::Equals(value) => { let bson = match value { @@ -400,6 +402,8 @@ impl MongoFilterVisitor { true, )), }, + ScalarCondition::InTemplate(_) => unimplemented!("query compiler not supported with mongodb yet"), + ScalarCondition::NotInTemplate(_) => unimplemented!("query compiler not supported with mongodb yet"), ScalarCondition::IsSet(is_set) => Ok(render_is_set(&field_name, is_set)), ScalarCondition::JsonCompare(_) => Err(MongoError::Unsupported( "JSON filtering is not yet supported on MongoDB".to_string(), diff --git a/query-engine/connectors/sql-query-connector/src/filter/visitor.rs b/query-engine/connectors/sql-query-connector/src/filter/visitor.rs index 152a98f6e783..f98404e33121 100644 --- a/query-engine/connectors/sql-query-connector/src/filter/visitor.rs +++ b/query-engine/connectors/sql-query-connector/src/filter/visitor.rs @@ -957,6 +957,22 @@ fn default_scalar_filter( // This code path is only reachable for connectors with `ScalarLists` capability comparable.not_equals(Expression::from(field_ref.aliased_col(alias, ctx)).all()) } + ScalarCondition::InTemplate(ConditionValue::Value(value)) => { + let sql_value = convert_first_value(fields, value, alias, ctx); + comparable.in_selection(sql_value.decorate( + Some("prisma-comma-repeatable-start"), + Some("prisma-comma-repeatable-end"), + )) + } + ScalarCondition::InTemplate(ConditionValue::FieldRef(_)) => todo!(), + ScalarCondition::NotInTemplate(ConditionValue::Value(value)) => { + let sql_value = convert_first_value(fields, value, alias, ctx); + comparable.not_in_selection(sql_value.decorate( + Some("prisma-comma-repeatable-start"), + Some("prisma-comma-repeatable-end"), + )) + } + ScalarCondition::NotInTemplate(ConditionValue::FieldRef(_)) => todo!(), ScalarCondition::Search(value, _) => { reachable_only_with_capability!(ConnectorCapability::NativeFullTextSearch); let query: String = value @@ -1139,6 +1155,26 @@ fn insensitive_scalar_filter( // This code path is only reachable for connectors with `ScalarLists` capability comparable.compare_raw("NOT ILIKE", Expression::from(field_ref.aliased_col(alias, ctx)).all()) } + ScalarCondition::InTemplate(ConditionValue::Value(value)) => { + let comparable = Expression::from(lower(comparable)); + let sql_value = convert_first_value(fields, value, alias, ctx); + + comparable.in_selection(sql_value.decorate( + Some("prisma-comma-repeatable-start"), + Some("prisma-comma-repeatable-end"), + )) + } + ScalarCondition::InTemplate(ConditionValue::FieldRef(_)) => todo!(), + ScalarCondition::NotInTemplate(ConditionValue::Value(value)) => { + let comparable = Expression::from(lower(comparable)); + let sql_value = convert_first_value(fields, value, alias, ctx); + + comparable.in_selection(sql_value.decorate( + Some("prisma-comma-repeatable-start"), + Some("prisma-comma-repeatable-end"), + )) + } + ScalarCondition::NotInTemplate(ConditionValue::FieldRef(_)) => todo!(), ScalarCondition::Search(value, _) => { reachable_only_with_capability!(ConnectorCapability::NativeFullTextSearch); let query: String = value diff --git a/query-engine/core/src/compiler/translate/query/read.rs b/query-engine/core/src/compiler/translate/query/read.rs index 744afb569494..ff3cc4fb9c16 100644 --- a/query-engine/core/src/compiler/translate/query/read.rs +++ b/query-engine/core/src/compiler/translate/query/read.rs @@ -159,7 +159,7 @@ fn add_inmemory_join(parent: Expression, nested: Vec, ctx: &Context<' ); let condition = if parent.r#type().is_list() { - ScalarCondition::In(ConditionListValue::list(vec![placeholder])) + ScalarCondition::InTemplate(ConditionValue::value(placeholder)) } else { ScalarCondition::Equals(ConditionValue::value(placeholder)) }; diff --git a/query-engine/query-engine/examples/compiler.rs b/query-engine/query-engine/examples/compiler.rs index 6292910ea3d7..4555eeb9ab1e 100644 --- a/query-engine/query-engine/examples/compiler.rs +++ b/query-engine/query-engine/examples/compiler.rs @@ -27,7 +27,7 @@ pub fn main() -> anyhow::Result<()> { // }) let query: JsonSingleQuery = serde_json::from_value(json!({ "modelName": "User", - "action": "findUnique", + "action": "findMany", "query": { "arguments": { "where": { diff --git a/query-engine/query-structure/src/filter/scalar/condition/mod.rs b/query-engine/query-structure/src/filter/scalar/condition/mod.rs index ff32d3d52219..f60fa74ee986 100644 --- a/query-engine/query-structure/src/filter/scalar/condition/mod.rs +++ b/query-engine/query-structure/src/filter/scalar/condition/mod.rs @@ -21,6 +21,8 @@ pub enum ScalarCondition { GreaterThanOrEquals(ConditionValue), In(ConditionListValue), NotIn(ConditionListValue), + InTemplate(ConditionValue), + NotInTemplate(ConditionValue), JsonCompare(JsonCondition), Search(ConditionValue, Vec), NotSearch(ConditionValue, Vec), @@ -52,6 +54,8 @@ impl ScalarCondition { Self::GreaterThanOrEquals(v) => Self::LessThan(v), Self::In(v) => Self::NotIn(v), Self::NotIn(v) => Self::In(v), + Self::InTemplate(v) => Self::NotInTemplate(v), + Self::NotInTemplate(v) => Self::InTemplate(v), Self::JsonCompare(json_compare) => { let inverted_cond = json_compare.condition.invert(true); @@ -86,6 +90,8 @@ impl ScalarCondition { ScalarCondition::GreaterThanOrEquals(v) => v.as_field_ref(), ScalarCondition::In(v) => v.as_field_ref(), ScalarCondition::NotIn(v) => v.as_field_ref(), + ScalarCondition::InTemplate(v) => v.as_field_ref(), + ScalarCondition::NotInTemplate(v) => v.as_field_ref(), ScalarCondition::JsonCompare(json_cond) => json_cond.condition.as_field_ref(), ScalarCondition::Search(v, _) => v.as_field_ref(), ScalarCondition::NotSearch(v, _) => v.as_field_ref(), From 361de2099dc264d63c2e8f57a2befd6d52cc929e Mon Sep 17 00:00:00 2001 From: Alexey Orlenko Date: Tue, 14 Jan 2025 15:51:53 +0100 Subject: [PATCH 49/65] Remove unused stuff --- .../core/src/compiler/translate/query/read.rs | 31 +------------------ 1 file changed, 1 insertion(+), 30 deletions(-) diff --git a/query-engine/core/src/compiler/translate/query/read.rs b/query-engine/core/src/compiler/translate/query/read.rs index ff3cc4fb9c16..785d6d13c0ff 100644 --- a/query-engine/core/src/compiler/translate/query/read.rs +++ b/query-engine/core/src/compiler/translate/query/read.rs @@ -2,8 +2,7 @@ use std::collections::HashSet; use itertools::Itertools; use query_structure::{ - ConditionListValue, ConditionValue, Filter, ModelProjection, PlaceholderType, PrismaValue, QueryMode, - RelationField, ScalarCondition, ScalarField, ScalarFilter, ScalarProjection, SelectedField, SelectionResult, + ConditionValue, Filter, ModelProjection, PrismaValue, QueryMode, ScalarCondition, ScalarFilter, ScalarProjection, }; use sql_query_connector::{ context::Context, model_extensions::AsColumns, query_arguments_ext::QueryArgumentsExt, query_builder, @@ -124,34 +123,6 @@ fn add_inmemory_join(parent: Expression, nested: Vec, ctx: &Context<' .map(|(left, right)| (left.name().to_owned(), right.name().to_owned())) .collect_vec(); - // let linking_placeholders = parent_fields - // .scalars() - // .map(|sf| { - // ( - // sf.clone(), - // PrismaValue::placeholder( - // format!("@parent${}", sf.name()), - // sf.type_identifier().to_placeholder_type(), - // ), - // ) - // }) - // .collect::>(); - // - // // If constant values were already provided for some of the fields, merge the - // // placeholders for the missing fields. Otherwise, assign new `parent_results`. - // if let Some(parent_results) = &mut rrq.parent_results { - // for result in parent_results { - // for (sf, value) in &linking_placeholders { - // let field = SelectedField::from(sf.clone()); - // if result.get(&field).is_none() { - // result.add((field, value.clone())); - // } - // } - // } - // } else { - // rrq.parent_results = Some(vec![SelectionResult::new(linking_placeholders)]); - // } - for (parent_field, child_field) in parent_fields.scalars().zip(child_fields.scalars()) { let placeholder = PrismaValue::placeholder( format!("@parent${}", parent_field.name()), From 244e52cf845620ed9be5db5f520c1db32784e03f Mon Sep 17 00:00:00 2001 From: jacek-prisma Date: Wed, 15 Jan 2025 09:40:21 +0000 Subject: [PATCH 50/65] Extract sql query builder (#5123) --- Cargo.lock | 27 ++++ Cargo.toml | 1 + .../src/interface/connection.rs | 28 ++-- .../src/interface/transaction.rs | 24 ++-- .../src/output_meta.rs | 4 +- .../src/query_builder/group_by_builder.rs | 3 +- .../src/query_builder/read_query_builder.rs | 5 +- .../src/root_queries/aggregate.rs | 2 +- .../src/root_queries/update/expression.rs | 2 +- .../src/root_queries/update/into_operation.rs | 3 +- .../src/root_queries/update/mod.rs | 2 +- .../src/root_queries/update/operation.rs | 3 +- .../query-connector/src/interface.rs | 123 +----------------- .../connectors/query-connector/src/lib.rs | 2 - .../connectors/query-connector/src/upsert.rs | 3 +- .../connectors/sql-query-connector/Cargo.toml | 5 +- .../src/database/connection.rs | 11 +- .../src/database/operations/read.rs | 19 +-- .../src/database/operations/read/process.rs | 3 +- .../src/database/operations/update.rs | 19 +-- .../src/database/operations/upsert.rs | 14 +- .../src/database/operations/write.rs | 48 +++++-- .../src/database/transaction.rs | 13 +- .../connectors/sql-query-connector/src/lib.rs | 14 +- .../src/model_extensions/mod.rs | 9 -- .../src/model_extensions/record.rs | 34 ----- .../sql-query-connector/src/query_ext.rs | 50 +++++-- .../connectors/sql-query-connector/src/row.rs | 7 +- query-engine/core/Cargo.toml | 2 + query-engine/core/src/compiler/expression.rs | 14 +- .../core/src/compiler/expression/format.rs | 24 ++-- .../core/src/compiler/translate/query.rs | 8 +- .../core/src/compiler/translate/query/read.rs | 19 ++- .../src/compiler/translate/query/write.rs | 10 +- .../interpreter/query_interpreters/write.rs | 4 +- query-engine/core/src/query_ast/read.rs | 3 +- query-engine/core/src/query_ast/write.rs | 4 +- .../read/aggregations/mod.rs | 3 +- .../src/query_graph_builder/write/create.rs | 3 +- .../write/nested/delete_nested.rs | 3 +- .../src/query_graph_builder/write/utils.rs | 6 +- .../write/write_args_parser.rs | 6 +- .../query-builders/query-builder/Cargo.toml | 9 ++ .../query-builders/query-builder/src/lib.rs | 14 ++ .../sql-query-builder/Cargo.toml | 19 +++ .../sql-query-builder}/src/column_metadata.rs | 6 +- .../sql-query-builder}/src/context.rs | 12 ++ .../src/cursor_condition.rs | 0 .../sql-query-builder}/src/filter/alias.rs | 4 +- .../sql-query-builder}/src/filter/mod.rs | 8 +- .../sql-query-builder}/src/filter/visitor.rs | 0 .../sql-query-builder}/src/join_utils.rs | 0 .../sql-query-builder/src/lib.rs} | 24 +++- .../sql-query-builder/src/limit.rs | 31 +++++ .../src/model_extensions/column.rs | 0 .../src/model_extensions/mod.rs | 8 ++ .../src/model_extensions/relation.rs | 2 +- .../src/model_extensions/scalar_field.rs | 39 +----- .../src/model_extensions/selection_result.rs | 2 +- .../src/model_extensions/table.rs | 2 +- .../src/nested_aggregations.rs | 0 .../sql-query-builder}/src/ordering.rs | 0 .../src/query_arguments_ext.rs | 0 .../sql-query-builder/src}/read.rs | 19 ++- .../sql-query-builder/src}/select/lateral.rs | 0 .../sql-query-builder/src}/select/mod.rs | 2 +- .../sql-query-builder/src}/select/subquery.rs | 0 .../sql-query-builder}/src/sql_trace.rs | 0 .../sql-query-builder/src}/write.rs | 27 ++-- query-engine/query-structure/Cargo.toml | 1 + .../src/aggregate_selection.rs | 70 ++++++++++ .../query-structure/src/filter/mod.rs | 2 + .../query-structure/src/filter/record.rs | 55 ++++++++ query-engine/query-structure/src/lib.rs | 4 + .../src/write_args.rs | 14 +- 75 files changed, 524 insertions(+), 437 deletions(-) delete mode 100644 query-engine/connectors/sql-query-connector/src/model_extensions/mod.rs delete mode 100644 query-engine/connectors/sql-query-connector/src/model_extensions/record.rs create mode 100644 query-engine/query-builders/query-builder/Cargo.toml create mode 100644 query-engine/query-builders/query-builder/src/lib.rs create mode 100644 query-engine/query-builders/sql-query-builder/Cargo.toml rename query-engine/{connectors/sql-query-connector => query-builders/sql-query-builder}/src/column_metadata.rs (84%) rename query-engine/{connectors/sql-query-connector => query-builders/sql-query-builder}/src/context.rs (78%) rename query-engine/{connectors/sql-query-connector => query-builders/sql-query-builder}/src/cursor_condition.rs (100%) rename query-engine/{connectors/sql-query-connector => query-builders/sql-query-builder}/src/filter/alias.rs (95%) rename query-engine/{connectors/sql-query-connector => query-builders/sql-query-builder}/src/filter/mod.rs (79%) rename query-engine/{connectors/sql-query-connector => query-builders/sql-query-builder}/src/filter/visitor.rs (100%) rename query-engine/{connectors/sql-query-connector => query-builders/sql-query-builder}/src/join_utils.rs (100%) rename query-engine/{connectors/sql-query-connector/src/query_builder/mod.rs => query-builders/sql-query-builder/src/lib.rs} (65%) create mode 100644 query-engine/query-builders/sql-query-builder/src/limit.rs rename query-engine/{connectors/sql-query-connector => query-builders/sql-query-builder}/src/model_extensions/column.rs (100%) create mode 100644 query-engine/query-builders/sql-query-builder/src/model_extensions/mod.rs rename query-engine/{connectors/sql-query-connector => query-builders/sql-query-builder}/src/model_extensions/relation.rs (99%) rename query-engine/{connectors/sql-query-connector => query-builders/sql-query-builder}/src/model_extensions/scalar_field.rs (78%) rename query-engine/{connectors/sql-query-connector => query-builders/sql-query-builder}/src/model_extensions/selection_result.rs (97%) rename query-engine/{connectors/sql-query-connector => query-builders/sql-query-builder}/src/model_extensions/table.rs (98%) rename query-engine/{connectors/sql-query-connector => query-builders/sql-query-builder}/src/nested_aggregations.rs (100%) rename query-engine/{connectors/sql-query-connector => query-builders/sql-query-builder}/src/ordering.rs (100%) rename query-engine/{connectors/sql-query-connector => query-builders/sql-query-builder}/src/query_arguments_ext.rs (100%) rename query-engine/{connectors/sql-query-connector/src/query_builder => query-builders/sql-query-builder/src}/read.rs (97%) rename query-engine/{connectors/sql-query-connector/src/query_builder => query-builders/sql-query-builder/src}/select/lateral.rs (100%) rename query-engine/{connectors/sql-query-connector/src/query_builder => query-builders/sql-query-builder/src}/select/mod.rs (99%) rename query-engine/{connectors/sql-query-connector/src/query_builder => query-builders/sql-query-builder/src}/select/subquery.rs (100%) rename query-engine/{connectors/sql-query-connector => query-builders/sql-query-builder}/src/sql_trace.rs (100%) rename query-engine/{connectors/sql-query-connector/src/query_builder => query-builders/sql-query-builder/src}/write.rs (94%) create mode 100644 query-engine/query-structure/src/aggregate_selection.rs create mode 100644 query-engine/query-structure/src/filter/record.rs rename query-engine/{connectors/query-connector => query-structure}/src/write_args.rs (98%) diff --git a/Cargo.lock b/Cargo.lock index cfd52bd42c57..ec9b4ae466b2 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -3742,6 +3742,14 @@ dependencies = [ "winapi", ] +[[package]] +name = "query-builder" +version = "0.1.0" +dependencies = [ + "query-structure", + "serde", +] + [[package]] name = "query-connector" version = "0.1.0" @@ -3785,11 +3793,13 @@ dependencies = [ "prisma-metrics", "psl", "quaint", + "query-builder", "query-connector", "query-structure", "schema", "serde", "serde_json", + "sql-query-builder", "sql-query-connector", "telemetry", "thiserror", @@ -4004,6 +4014,7 @@ dependencies = [ "chrono", "cuid", "getrandom 0.2.11", + "indexmap 2.2.2", "itertools 0.12.0", "nanoid", "prisma-value", @@ -5118,6 +5129,21 @@ dependencies = [ "user-facing-errors", ] +[[package]] +name = "sql-query-builder" +version = "0.1.0" +dependencies = [ + "chrono", + "itertools 0.12.0", + "prisma-value", + "psl", + "quaint", + "query-builder", + "query-structure", + "serde_json", + "telemetry", +] + [[package]] name = "sql-query-connector" version = "0.1.0" @@ -5139,6 +5165,7 @@ dependencies = [ "rand 0.8.5", "serde", "serde_json", + "sql-query-builder", "telemetry", "thiserror", "tokio", diff --git a/Cargo.toml b/Cargo.toml index b78f4801fd56..4658ac4617c9 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -28,6 +28,7 @@ members = [ "query-engine/query-engine-c-abi", "query-engine/request-handlers", "query-engine/schema", + "query-engine/query-builders/*", "libs/*", "prisma-fmt", "prisma-schema-wasm", diff --git a/query-engine/connectors/mongodb-query-connector/src/interface/connection.rs b/query-engine/connectors/mongodb-query-connector/src/interface/connection.rs index 94f29b21535e..fc73a97a3066 100644 --- a/query-engine/connectors/mongodb-query-connector/src/interface/connection.rs +++ b/query-engine/connectors/mongodb-query-connector/src/interface/connection.rs @@ -5,9 +5,7 @@ use crate::{ MongoDbTransaction, }; use async_trait::async_trait; -use connector_interface::{ - Connection, ConnectionLike, ReadOperations, Transaction, UpdateType, WriteArgs, WriteOperations, -}; +use connector_interface::{Connection, ConnectionLike, ReadOperations, Transaction, UpdateType, WriteOperations}; use mongodb::{ClientSession, Database}; use query_structure::{prelude::*, RelationLoadStrategy, SelectionResult}; use std::collections::HashMap; @@ -55,7 +53,7 @@ impl WriteOperations for MongoDbConnection { async fn create_record( &mut self, model: &Model, - args: WriteArgs, + args: query_structure::WriteArgs, // The field selection on a create is never used on MongoDB as it cannot return more than the ID. _selected_fields: FieldSelection, _traceparent: Option, @@ -66,7 +64,7 @@ impl WriteOperations for MongoDbConnection { async fn create_records( &mut self, model: &Model, - args: Vec, + args: Vec, skip_duplicates: bool, _traceparent: Option, ) -> connector_interface::Result { @@ -83,7 +81,7 @@ impl WriteOperations for MongoDbConnection { async fn create_records_returning( &mut self, _model: &Model, - _args: Vec, + _args: Vec, _skip_duplicates: bool, _selected_fields: FieldSelection, _traceparent: Option, @@ -94,8 +92,8 @@ impl WriteOperations for MongoDbConnection { async fn update_records( &mut self, model: &Model, - record_filter: connector_interface::RecordFilter, - args: WriteArgs, + record_filter: query_structure::RecordFilter, + args: query_structure::WriteArgs, limit: Option, _traceparent: Option, ) -> connector_interface::Result { @@ -118,8 +116,8 @@ impl WriteOperations for MongoDbConnection { async fn update_records_returning( &mut self, _model: &Model, - _record_filter: connector_interface::RecordFilter, - _args: WriteArgs, + _record_filter: query_structure::RecordFilter, + _args: query_structure::WriteArgs, _selected_fields: FieldSelection, _limit: Option, _traceparent: Option, @@ -130,8 +128,8 @@ impl WriteOperations for MongoDbConnection { async fn update_record( &mut self, model: &Model, - record_filter: connector_interface::RecordFilter, - args: WriteArgs, + record_filter: query_structure::RecordFilter, + args: query_structure::WriteArgs, selected_fields: Option, _traceparent: Option, ) -> connector_interface::Result> { @@ -163,7 +161,7 @@ impl WriteOperations for MongoDbConnection { async fn delete_records( &mut self, model: &Model, - record_filter: connector_interface::RecordFilter, + record_filter: query_structure::RecordFilter, limit: Option, _traceparent: Option, ) -> connector_interface::Result { @@ -180,7 +178,7 @@ impl WriteOperations for MongoDbConnection { async fn delete_record( &mut self, model: &Model, - record_filter: connector_interface::RecordFilter, + record_filter: query_structure::RecordFilter, selected_fields: FieldSelection, _traceparent: Option, ) -> connector_interface::Result { @@ -314,7 +312,7 @@ impl ReadOperations for MongoDbConnection { &mut self, model: &Model, query_arguments: query_structure::QueryArguments, - selections: Vec, + selections: Vec, group_by: Vec, having: Option, _traceparent: Option, diff --git a/query-engine/connectors/mongodb-query-connector/src/interface/transaction.rs b/query-engine/connectors/mongodb-query-connector/src/interface/transaction.rs index 31943e0dd6cc..9f7b886d82da 100644 --- a/query-engine/connectors/mongodb-query-connector/src/interface/transaction.rs +++ b/query-engine/connectors/mongodb-query-connector/src/interface/transaction.rs @@ -80,7 +80,7 @@ impl WriteOperations for MongoDbTransaction<'_> { async fn create_record( &mut self, model: &Model, - args: connector_interface::WriteArgs, + args: query_structure::WriteArgs, // The field selection on a create is never used on MongoDB as it cannot return more than the ID. _selected_fields: FieldSelection, _traceparent: Option, @@ -97,7 +97,7 @@ impl WriteOperations for MongoDbTransaction<'_> { async fn create_records( &mut self, model: &Model, - args: Vec, + args: Vec, skip_duplicates: bool, _traceparent: Option, ) -> connector_interface::Result { @@ -114,7 +114,7 @@ impl WriteOperations for MongoDbTransaction<'_> { async fn create_records_returning( &mut self, _model: &Model, - _args: Vec, + _args: Vec, _skip_duplicates: bool, _selected_fields: FieldSelection, _traceparent: Option, @@ -125,8 +125,8 @@ impl WriteOperations for MongoDbTransaction<'_> { async fn update_records( &mut self, model: &Model, - record_filter: connector_interface::RecordFilter, - args: connector_interface::WriteArgs, + record_filter: query_structure::RecordFilter, + args: query_structure::WriteArgs, limit: Option, _traceparent: Option, ) -> connector_interface::Result { @@ -148,8 +148,8 @@ impl WriteOperations for MongoDbTransaction<'_> { async fn update_records_returning( &mut self, _model: &Model, - _record_filter: connector_interface::RecordFilter, - _args: connector_interface::WriteArgs, + _record_filter: query_structure::RecordFilter, + _args: query_structure::WriteArgs, _selected_fields: FieldSelection, _limit: Option, _traceparent: Option, @@ -160,8 +160,8 @@ impl WriteOperations for MongoDbTransaction<'_> { async fn update_record( &mut self, model: &Model, - record_filter: connector_interface::RecordFilter, - args: connector_interface::WriteArgs, + record_filter: query_structure::RecordFilter, + args: query_structure::WriteArgs, selected_fields: Option, _traceparent: Option, ) -> connector_interface::Result> { @@ -192,7 +192,7 @@ impl WriteOperations for MongoDbTransaction<'_> { async fn delete_records( &mut self, model: &Model, - record_filter: connector_interface::RecordFilter, + record_filter: query_structure::RecordFilter, limit: Option, _traceparent: Option, ) -> connector_interface::Result { @@ -209,7 +209,7 @@ impl WriteOperations for MongoDbTransaction<'_> { async fn delete_record( &mut self, model: &Model, - record_filter: connector_interface::RecordFilter, + record_filter: query_structure::RecordFilter, selected_fields: FieldSelection, _traceparent: Option, ) -> connector_interface::Result { @@ -348,7 +348,7 @@ impl ReadOperations for MongoDbTransaction<'_> { &mut self, model: &Model, query_arguments: query_structure::QueryArguments, - selections: Vec, + selections: Vec, group_by: Vec, having: Option, _traceparent: Option, diff --git a/query-engine/connectors/mongodb-query-connector/src/output_meta.rs b/query-engine/connectors/mongodb-query-connector/src/output_meta.rs index d1937bf7fee7..1a38448ff584 100644 --- a/query-engine/connectors/mongodb-query-connector/src/output_meta.rs +++ b/query-engine/connectors/mongodb-query-connector/src/output_meta.rs @@ -1,7 +1,7 @@ -use connector_interface::AggregationSelection; use indexmap::IndexMap; use query_structure::{ - ast::FieldArity, DefaultKind, FieldSelection, PrismaValue, ScalarFieldRef, SelectedField, TypeIdentifier, + ast::FieldArity, AggregationSelection, DefaultKind, FieldSelection, PrismaValue, ScalarFieldRef, SelectedField, + TypeIdentifier, }; /// Maps field db field names to their meta information. diff --git a/query-engine/connectors/mongodb-query-connector/src/query_builder/group_by_builder.rs b/query-engine/connectors/mongodb-query-connector/src/query_builder/group_by_builder.rs index c40c3ee8d0dc..a58a3928ed30 100644 --- a/query-engine/connectors/mongodb-query-connector/src/query_builder/group_by_builder.rs +++ b/query-engine/connectors/mongodb-query-connector/src/query_builder/group_by_builder.rs @@ -1,8 +1,7 @@ use crate::constants::*; use bson::{doc, Bson, Document}; -use connector_interface::AggregationSelection; -use query_structure::{AggregationFilter, Filter, ScalarFieldRef}; +use query_structure::{AggregationFilter, AggregationSelection, Filter, ScalarFieldRef}; use std::collections::HashSet; /// Represents a `$group` aggregation stage. diff --git a/query-engine/connectors/mongodb-query-connector/src/query_builder/read_query_builder.rs b/query-engine/connectors/mongodb-query-connector/src/query_builder/read_query_builder.rs index bfe48d5f851c..61e84de6fa6b 100644 --- a/query-engine/connectors/mongodb-query-connector/src/query_builder/read_query_builder.rs +++ b/query-engine/connectors/mongodb-query-connector/src/query_builder/read_query_builder.rs @@ -11,10 +11,11 @@ use crate::{ vacuum_cursor, BsonTransform, IntoBson, }; use bson::{doc, Document}; -use connector_interface::AggregationSelection; use itertools::Itertools; use mongodb::{options::AggregateOptions, ClientSession, Collection}; -use query_structure::{FieldSelection, Filter, Model, QueryArguments, ScalarFieldRef, VirtualSelection}; +use query_structure::{ + AggregationSelection, FieldSelection, Filter, Model, QueryArguments, ScalarFieldRef, VirtualSelection, +}; use std::convert::TryFrom; use std::future::IntoFuture; diff --git a/query-engine/connectors/mongodb-query-connector/src/root_queries/aggregate.rs b/query-engine/connectors/mongodb-query-connector/src/root_queries/aggregate.rs index 797e34127f8a..02bc36481d51 100644 --- a/query-engine/connectors/mongodb-query-connector/src/root_queries/aggregate.rs +++ b/query-engine/connectors/mongodb-query-connector/src/root_queries/aggregate.rs @@ -2,7 +2,7 @@ use crate::{constants::*, output_meta, query_builder::MongoReadQueryBuilder, val use connector_interface::*; use mongodb::{bson::Document, ClientSession, Database}; -use query_structure::{prelude::*, Filter, QueryArguments}; +use query_structure::{prelude::*, AggregationSelection, Filter, QueryArguments}; pub async fn aggregate<'conn>( database: &Database, diff --git a/query-engine/connectors/mongodb-query-connector/src/root_queries/update/expression.rs b/query-engine/connectors/mongodb-query-connector/src/root_queries/update/expression.rs index f1be40e57416..13cf72beba8f 100644 --- a/query-engine/connectors/mongodb-query-connector/src/root_queries/update/expression.rs +++ b/query-engine/connectors/mongodb-query-connector/src/root_queries/update/expression.rs @@ -1,8 +1,8 @@ use super::{into_expression::IntoUpdateExpression, operation}; use bson::{doc, Bson, Document}; -use connector_interface::FieldPath; use indexmap::IndexMap; +use query_structure::FieldPath; /// `UpdateExpression` is an intermediary AST that's used to represent MongoDB expressions. /// It is meant to be transformed into `BSON`. diff --git a/query-engine/connectors/mongodb-query-connector/src/root_queries/update/into_operation.rs b/query-engine/connectors/mongodb-query-connector/src/root_queries/update/into_operation.rs index 52ffbd70e338..34c843afc06b 100644 --- a/query-engine/connectors/mongodb-query-connector/src/root_queries/update/into_operation.rs +++ b/query-engine/connectors/mongodb-query-connector/src/root_queries/update/into_operation.rs @@ -2,8 +2,7 @@ use super::operation::*; use crate::*; use bson::doc; -use connector_interface::{CompositeWriteOperation, FieldPath, ScalarWriteOperation, WriteOperation}; -use query_structure::{Field, PrismaValue}; +use query_structure::{CompositeWriteOperation, Field, FieldPath, PrismaValue, ScalarWriteOperation, WriteOperation}; pub(crate) trait IntoUpdateOperation { fn into_update_operations(self, field: &Field, path: FieldPath) -> crate::Result>; diff --git a/query-engine/connectors/mongodb-query-connector/src/root_queries/update/mod.rs b/query-engine/connectors/mongodb-query-connector/src/root_queries/update/mod.rs index 481ce7a95d29..287d0c157d86 100644 --- a/query-engine/connectors/mongodb-query-connector/src/root_queries/update/mod.rs +++ b/query-engine/connectors/mongodb-query-connector/src/root_queries/update/mod.rs @@ -8,9 +8,9 @@ use super::*; use crate::*; use bson::Document; -use connector_interface::{FieldPath, WriteOperation}; use into_expression::IntoUpdateExpressions; use into_operation::IntoUpdateOperation; +use query_structure::{FieldPath, WriteOperation}; pub(crate) trait IntoUpdateDocumentExtension { fn into_update_docs(self, field: &Field, path: FieldPath) -> crate::Result>; diff --git a/query-engine/connectors/mongodb-query-connector/src/root_queries/update/operation.rs b/query-engine/connectors/mongodb-query-connector/src/root_queries/update/operation.rs index 62502f03a4aa..f32e55a0d4f3 100644 --- a/query-engine/connectors/mongodb-query-connector/src/root_queries/update/operation.rs +++ b/query-engine/connectors/mongodb-query-connector/src/root_queries/update/operation.rs @@ -1,7 +1,6 @@ use super::{expression, into_expression::IntoUpdateExpression}; use bson::{doc, Document}; -use connector_interface::FieldPath; -use query_structure::Filter; +use query_structure::{FieldPath, Filter}; /// `UpdateOperation` is an intermediary AST used to perform preliminary transformations from a `WriteOperation`. /// It is meant to be transformed into an `UpdateExpression`. diff --git a/query-engine/connectors/query-connector/src/interface.rs b/query-engine/connectors/query-connector/src/interface.rs index 3bf1614e0394..3db5a11a3b5c 100644 --- a/query-engine/connectors/query-connector/src/interface.rs +++ b/query-engine/connectors/query-connector/src/interface.rs @@ -1,7 +1,7 @@ -use crate::{NativeUpsert, WriteArgs}; +use crate::NativeUpsert; use async_trait::async_trait; use prisma_value::PrismaValue; -use query_structure::{ast::FieldArity, *}; +use query_structure::*; use std::collections::HashMap; use telemetry::TraceParent; @@ -50,125 +50,6 @@ pub trait Transaction: ConnectionLike { /// transactions into something that can is capable of writing to or reading from the database. pub trait ConnectionLike: ReadOperations + WriteOperations + Send + Sync {} -/// A wrapper struct allowing to either filter for records or for the core to -/// communicate already known record selectors to connectors. -/// -/// Connector implementations should use known selectors to skip unnecessary fetch operations -/// if the query core already determined the selectors in a previous step. Simply put, -/// `selectors` should always have precendence over `filter`. -#[derive(Debug, Clone)] -pub struct RecordFilter { - pub filter: Filter, - pub selectors: Option>, -} - -impl RecordFilter { - pub fn empty() -> Self { - Self { - filter: Filter::empty(), - selectors: None, - } - } - - pub fn has_selectors(&self) -> bool { - self.selectors.is_some() - } -} - -impl From for RecordFilter { - fn from(filter: Filter) -> Self { - Self { - filter, - selectors: None, - } - } -} - -impl From> for RecordFilter { - fn from(selectors: Vec) -> Self { - Self { - filter: Filter::empty(), - selectors: Some(selectors), - } - } -} - -impl From for RecordFilter { - fn from(selector: SelectionResult) -> Self { - Self { - filter: Filter::empty(), - selectors: Some(vec![selector]), - } - } -} - -/// Selections for aggregation queries. -#[derive(Debug, Clone)] -pub enum AggregationSelection { - /// Single field selector. Only valid in the context of group by statements. - Field(ScalarFieldRef), - - /// Counts records of the model that match the query. - /// `all` indicates that an all-records selection has been made (e.g. SQL *). - /// `fields` are specific fields to count on. By convention, if `all` is true, - /// it will always be the last of the count results. - Count { all: bool, fields: Vec }, - - /// Compute average for each field contained. - Average(Vec), - - /// Compute sum for each field contained. - Sum(Vec), - - /// Compute mininum for each field contained. - Min(Vec), - - /// Compute maximum for each field contained. - Max(Vec), -} - -impl AggregationSelection { - /// Returns (field_db_name, TypeIdentifier, FieldArity) - pub fn identifiers(&self) -> Vec<(String, TypeIdentifier, FieldArity)> { - match self { - AggregationSelection::Field(field) => { - vec![(field.db_name().to_owned(), field.type_identifier(), field.arity())] - } - - AggregationSelection::Count { all, fields } => { - let mut mapped = Self::map_field_types(fields, Some(TypeIdentifier::Int)); - - if *all { - mapped.push(("all".to_owned(), TypeIdentifier::Int, FieldArity::Required)); - } - - mapped - } - - AggregationSelection::Average(fields) => Self::map_field_types(fields, Some(TypeIdentifier::Float)), - AggregationSelection::Sum(fields) => Self::map_field_types(fields, None), - AggregationSelection::Min(fields) => Self::map_field_types(fields, None), - AggregationSelection::Max(fields) => Self::map_field_types(fields, None), - } - } - - fn map_field_types( - fields: &[ScalarFieldRef], - fixed_type: Option, - ) -> Vec<(String, TypeIdentifier, FieldArity)> { - fields - .iter() - .map(|f| { - ( - f.db_name().to_owned(), - fixed_type.unwrap_or_else(|| f.type_identifier()), - FieldArity::Required, - ) - }) - .collect() - } -} - pub type AggregationRow = Vec; /// Result of an aggregation operation on a model or field. diff --git a/query-engine/connectors/query-connector/src/lib.rs b/query-engine/connectors/query-connector/src/lib.rs index c497f121ae9d..b810e70a8b22 100644 --- a/query-engine/connectors/query-connector/src/lib.rs +++ b/query-engine/connectors/query-connector/src/lib.rs @@ -5,12 +5,10 @@ pub mod error; mod coerce; mod interface; mod upsert; -mod write_args; pub use coerce::*; pub use interface::*; pub use upsert::*; -pub use write_args::*; pub type Result = std::result::Result; diff --git a/query-engine/connectors/query-connector/src/upsert.rs b/query-engine/connectors/query-connector/src/upsert.rs index 9455fbc30c49..6b1872db95a9 100644 --- a/query-engine/connectors/query-connector/src/upsert.rs +++ b/query-engine/connectors/query-connector/src/upsert.rs @@ -1,5 +1,4 @@ -use crate::{RecordFilter, WriteArgs}; -use query_structure::{FieldSelection, Filter, Model, ScalarFieldRef}; +use query_structure::{FieldSelection, Filter, Model, RecordFilter, ScalarFieldRef, WriteArgs}; #[derive(Debug, Clone)] pub struct NativeUpsert { diff --git a/query-engine/connectors/sql-query-connector/Cargo.toml b/query-engine/connectors/sql-query-connector/Cargo.toml index 779ae6e50193..d53013d15832 100644 --- a/query-engine/connectors/sql-query-connector/Cargo.toml +++ b/query-engine/connectors/sql-query-connector/Cargo.toml @@ -29,7 +29,7 @@ all-native = [ # TODO: At the moment of writing (rustc 1.77.0), can_have_capability from psl does not eliminate joins # code from bundle for some reason, so we are doing it explicitly. Check with a newer version of compiler - if elimination # happens successfully, we don't need this feature anymore -relation_joins = [] +relation_joins = ["sql-query-builder/relation_joins"] # Enable Driver Adapters driver-adapters = [] @@ -62,6 +62,9 @@ path = "../query-connector" [dependencies.query-structure] path = "../../query-structure" +[dependencies.sql-query-builder] +path = "../../query-builders/sql-query-builder" + [dependencies.prisma-value] path = "../../../libs/prisma-value" diff --git a/query-engine/connectors/sql-query-connector/src/database/connection.rs b/query-engine/connectors/sql-query-connector/src/database/connection.rs index 614f174e562b..cc3489480f3c 100644 --- a/query-engine/connectors/sql-query-connector/src/database/connection.rs +++ b/query-engine/connectors/sql-query-connector/src/database/connection.rs @@ -1,19 +1,22 @@ #![cfg_attr(target_arch = "wasm32", allow(dead_code))] use super::{catch, transaction::SqlConnectorTransaction}; -use crate::{database::operations::*, Context, SqlError}; +use crate::{database::operations::*, SqlError}; use async_trait::async_trait; use connector::ConnectionLike; use connector_interface::{ - self as connector, AggregationRow, AggregationSelection, Connection, ReadOperations, RecordFilter, Transaction, - WriteArgs, WriteOperations, + self as connector, AggregationRow, Connection, ReadOperations, Transaction, WriteOperations, }; use prisma_value::PrismaValue; use quaint::{ connector::{IsolationLevel, TransactionCapable}, prelude::{ConnectionInfo, Queryable}, }; -use query_structure::{prelude::*, Filter, QueryArguments, RelationLoadStrategy, SelectionResult}; +use query_structure::{ + prelude::*, AggregationSelection, Filter, QueryArguments, RecordFilter, RelationLoadStrategy, SelectionResult, + WriteArgs, +}; +use sql_query_builder::Context; use std::{collections::HashMap, str::FromStr}; use telemetry::TraceParent; diff --git a/query-engine/connectors/sql-query-connector/src/database/operations/read.rs b/query-engine/connectors/sql-query-connector/src/database/operations/read.rs index 9eabee23ae72..9a9ac4469ce0 100644 --- a/query-engine/connectors/sql-query-connector/src/database/operations/read.rs +++ b/query-engine/connectors/sql-query-connector/src/database/operations/read.rs @@ -3,18 +3,13 @@ mod coerce; #[cfg(feature = "relation_joins")] mod process; -use crate::{ - column_metadata, - model_extensions::*, - query_arguments_ext::QueryArgumentsExt, - query_builder::{self, read}, - Context, QueryExt, Queryable, SqlError, -}; +use crate::{QueryExt, Queryable, SqlError}; use connector_interface::*; use futures::stream::{FuturesUnordered, StreamExt}; use quaint::ast::*; use query_structure::*; +use sql_query_builder::{column_metadata, read, AsColumns, AsTable, Context, QueryArgumentsExt, RelationFieldExt}; pub(crate) async fn get_single_record( conn: &dyn Queryable, @@ -53,7 +48,7 @@ async fn get_single_record_joins( &field_names, ); - let query = query_builder::select::SelectBuilder::build( + let query = sql_query_builder::select::SelectBuilder::build( QueryArguments::from((model.clone(), filter.clone())), &selected_fields, ctx, @@ -165,7 +160,7 @@ async fn get_many_records_joins( return Ok(records); }; - match ctx.max_bind_values { + match ctx.max_bind_values() { Some(chunk_size) if query_arguments.should_batch(chunk_size) => { return Err(SqlError::QueryParameterLimitExceeded( "Joined queries cannot be split into multiple queries.".to_string(), @@ -174,7 +169,7 @@ async fn get_many_records_joins( _ => (), }; - let query = query_builder::select::SelectBuilder::build(query_arguments.clone(), &selected_fields, ctx); + let query = sql_query_builder::select::SelectBuilder::build(query_arguments.clone(), &selected_fields, ctx); for item in conn.filter(query.into(), meta.as_slice(), ctx).await?.into_iter() { let mut record = Record::from(item); @@ -217,7 +212,7 @@ async fn get_many_records_wo_joins( // Todo: This can't work for all cases. Cursor-based pagination will not work, because it relies on the ordering // to determine the right queries to fire, and will default to incorrect orderings if no ordering is found. // The should_batch has been adjusted to reflect that as a band-aid, but deeper investigation is necessary. - match ctx.max_bind_values { + match ctx.max_bind_values() { Some(chunk_size) if query_arguments.should_batch(chunk_size) => { if query_arguments.has_unbatchable_ordering() { return Err(SqlError::QueryParameterLimitExceeded( @@ -312,7 +307,7 @@ pub(crate) async fn get_related_m2m_record_ids( // [DTODO] To verify: We might need chunked fetch here (too many parameters in the query). let select = Select::from_table(table) - .so_that(query_builder::in_conditions(&from_columns, from_record_ids, ctx)) + .so_that(sql_query_builder::in_conditions(&from_columns, from_record_ids, ctx)) .columns(from_columns.into_iter().chain(to_columns.into_iter())); let parent_model_id = from_field.model().primary_identifier(); diff --git a/query-engine/connectors/sql-query-connector/src/database/operations/read/process.rs b/query-engine/connectors/sql-query-connector/src/database/operations/read/process.rs index cfa03796ceaf..042dc2815b6a 100644 --- a/query-engine/connectors/sql-query-connector/src/database/operations/read/process.rs +++ b/query-engine/connectors/sql-query-connector/src/database/operations/read/process.rs @@ -2,8 +2,7 @@ use std::borrow::Cow; use itertools::{Either, Itertools}; use query_structure::{QueryArguments, Record}; - -use crate::query_arguments_ext::QueryArgumentsExt; +use sql_query_builder::QueryArgumentsExt; macro_rules! processor_state { ($name:ident $(-> $transition:ident($bound:ident))?) => { diff --git a/query-engine/connectors/sql-query-connector/src/database/operations/update.rs b/query-engine/connectors/sql-query-connector/src/database/operations/update.rs index 9ea13127a4fc..3ba3327d02db 100644 --- a/query-engine/connectors/sql-query-connector/src/database/operations/update.rs +++ b/query-engine/connectors/sql-query-connector/src/database/operations/update.rs @@ -1,17 +1,12 @@ use super::read::get_single_record; -use crate::column_metadata::{self, ColumnMetadata}; -use crate::filter::FilterBuilder; -use crate::model_extensions::AsColumns; -use crate::query_builder::write::{build_update_and_set_query, chunk_update_with_ids}; use crate::row::ToSqlRow; -use crate::{Context, QueryExt, Queryable}; +use crate::{QueryExt, Queryable}; -use crate::limit::wrap_with_limit_subquery_if_needed; -use connector_interface::*; use itertools::Itertools; use quaint::ast::*; use query_structure::*; +use sql_query_builder::{column_metadata, limit, write, AsColumns, ColumnMetadata, Context, FilterBuilder}; /// Performs an update with an explicit selection set. /// This function is called for connectors that supports the `UpdateReturning` capability. @@ -34,7 +29,7 @@ pub(crate) async fn update_one_with_selection( let cond = FilterBuilder::without_top_level_joins().visit_filter(build_update_one_filter(record_filter), ctx); - let update = build_update_and_set_query(model, args, Some(&selected_fields), ctx).so_that(cond); + let update = write::build_update_and_set_query(model, args, Some(&selected_fields), ctx).so_that(cond); let field_names: Vec<_> = selected_fields.db_names().collect(); let idents = selected_fields.type_identifiers_with_arities(); @@ -107,8 +102,8 @@ pub(super) async fn update_many_from_filter( limit: Option, ctx: &Context<'_>, ) -> crate::Result> { - let update = build_update_and_set_query(model, args, None, ctx); - let filter_condition = wrap_with_limit_subquery_if_needed( + let update = write::build_update_and_set_query(model, args, None, ctx); + let filter_condition = limit::wrap_with_limit_subquery_if_needed( model, FilterBuilder::without_top_level_joins().visit_filter(record_filter.filter, ctx), limit, @@ -144,10 +139,10 @@ pub(super) async fn update_many_from_ids_and_filter( } let updates = { - let update = build_update_and_set_query(model, args, selected_fields, ctx); + let update = write::build_update_and_set_query(model, args, selected_fields, ctx); let ids: Vec<&SelectionResult> = ids.iter().take(limit.unwrap_or(usize::MAX)).collect(); - chunk_update_with_ids(update, model, &ids, filter_condition, ctx)? + write::chunk_update_with_ids(update, model, &ids, filter_condition, ctx) }; Ok((updates, ids)) diff --git a/query-engine/connectors/sql-query-connector/src/database/operations/upsert.rs b/query-engine/connectors/sql-query-connector/src/database/operations/upsert.rs index f086e4c60798..e95175e33b1a 100644 --- a/query-engine/connectors/sql-query-connector/src/database/operations/upsert.rs +++ b/query-engine/connectors/sql-query-connector/src/database/operations/upsert.rs @@ -1,14 +1,8 @@ -use crate::{ - column_metadata, - filter::FilterBuilder, - model_extensions::AsColumns, - query_builder::write::{build_update_and_set_query, create_record}, - row::ToSqlRow, - Context, Queryable, -}; +use crate::{row::ToSqlRow, Queryable}; use connector_interface::NativeUpsert; use quaint::prelude::{OnConflict, Query}; use query_structure::{ModelProjection, Record, SingleRecord}; +use sql_query_builder::{column_metadata, write, AsColumns, Context, FilterBuilder}; pub(crate) async fn native_upsert( conn: &dyn Queryable, @@ -23,9 +17,9 @@ pub(crate) async fn native_upsert( let where_condition = FilterBuilder::without_top_level_joins().visit_filter(upsert.filter().clone(), ctx); let update = - build_update_and_set_query(upsert.model(), upsert.update().clone(), None, ctx).so_that(where_condition); + write::build_update_and_set_query(upsert.model(), upsert.update().clone(), None, ctx).so_that(where_condition); - let insert = create_record(upsert.model(), upsert.create().clone(), &selected_fields, ctx); + let insert = write::create_record(upsert.model(), upsert.create().clone(), &selected_fields, ctx); let constraints: Vec<_> = upsert.unique_constraints().as_columns(ctx).collect(); let query: Query = insert.on_conflict(OnConflict::Update(update, constraints)).into(); diff --git a/query-engine/connectors/sql-query-connector/src/database/operations/write.rs b/query-engine/connectors/sql-query-connector/src/database/operations/write.rs index 3df576c3fe03..07a385bab3ce 100644 --- a/query-engine/connectors/sql-query-connector/src/database/operations/write.rs +++ b/query-engine/connectors/sql-query-connector/src/database/operations/write.rs @@ -1,19 +1,16 @@ use super::update::*; -use crate::column_metadata; -use crate::filter::FilterBuilder; use crate::row::ToSqlRow; -use crate::{ - error::SqlError, model_extensions::*, query_builder::write, sql_trace::SqlTraceComment, Context, QueryExt, - Queryable, -}; -use connector_interface::*; +use crate::value::to_prisma_value; +use crate::{error::SqlError, QueryExt, Queryable}; use itertools::Itertools; use quaint::ast::{Insert, Query}; +use quaint::prelude::ResultSet; use quaint::{ error::ErrorKind, prelude::{native_uuid, uuid_to_bin, uuid_to_bin_swapped, Aliasable, Select, SqlFamily}, }; use query_structure::*; +use sql_query_builder::{column_metadata, write, Context, FilterBuilder, SelectionResultExt, SqlTraceComment}; use std::borrow::Cow; use std::{ collections::{HashMap, HashSet}, @@ -72,7 +69,7 @@ async fn generate_id( // db generate values only if needed if need_select { - let pk_select = id_select.add_traceparent(ctx.traceparent); + let pk_select = id_select.add_traceparent(ctx.traceparent()); let pk_result = conn.query(pk_select.into()).await?; let result = try_convert(&(id_field.into()), pk_result)?; @@ -287,7 +284,7 @@ pub(crate) async fn create_records_returning( /// Partitions data into batches, respecting `max_bind_values` and `max_insert_rows` settings from /// the `Context`. fn partition_into_batches(args: Vec, ctx: &Context<'_>) -> Vec> { - let batches = if let Some(max_params) = ctx.max_bind_values { + let batches = if let Some(max_params) = ctx.max_bind_values() { // We need to split inserts if they are above a parameter threshold, as well as split based on number of rows. // -> Horizontal partitioning by row number, vertical by number of args. args.into_iter() @@ -328,7 +325,7 @@ fn partition_into_batches(args: Vec, ctx: &Context<'_>) -> Vec) -> crate::Result { Ok(conn.raw_json(inputs).await?) } + +fn try_convert(model_projection: &ModelProjection, result_set: ResultSet) -> crate::Result { + let columns: Vec = result_set.columns().iter().map(|c| c.to_string()).collect(); + let mut record_projection = SelectionResult::default(); + + if let Some(row) = result_set.into_iter().next() { + for (i, val) in row.into_iter().enumerate() { + match model_projection.map_db_name(columns[i].as_str()) { + Some(field) => { + record_projection.add((field, to_prisma_value(val)?)); + } + None => { + return Err(SqlError::DomainError(DomainError::ScalarFieldNotFound { + name: columns[i].clone(), + container_type: "model", + container_name: String::from("unspecified"), + })) + } + } + } + } + + if model_projection.scalar_length() == record_projection.len() { + Ok(record_projection) + } else { + Err(SqlError::DomainError(DomainError::ConversionFailure( + "ResultSet".to_owned(), + "RecordProjection".to_owned(), + ))) + } +} diff --git a/query-engine/connectors/sql-query-connector/src/database/transaction.rs b/query-engine/connectors/sql-query-connector/src/database/transaction.rs index 6528343f54fa..9d43dfb25cbf 100644 --- a/query-engine/connectors/sql-query-connector/src/database/transaction.rs +++ b/query-engine/connectors/sql-query-connector/src/database/transaction.rs @@ -1,14 +1,15 @@ use super::catch; -use crate::{database::operations::*, Context, SqlError}; +use crate::{database::operations::*, SqlError}; use async_trait::async_trait; use connector::ConnectionLike; -use connector_interface::{ - self as connector, AggregationRow, AggregationSelection, ReadOperations, RecordFilter, Transaction, WriteArgs, - WriteOperations, -}; +use connector_interface::{self as connector, AggregationRow, ReadOperations, Transaction, WriteOperations}; use prisma_value::PrismaValue; use quaint::prelude::ConnectionInfo; -use query_structure::{prelude::*, Filter, QueryArguments, RelationLoadStrategy, SelectionResult}; +use query_structure::{ + prelude::*, AggregationSelection, Filter, QueryArguments, RecordFilter, RelationLoadStrategy, SelectionResult, + WriteArgs, +}; +use sql_query_builder::Context; use std::collections::HashMap; use telemetry::TraceParent; diff --git a/query-engine/connectors/sql-query-connector/src/lib.rs b/query-engine/connectors/sql-query-connector/src/lib.rs index dc809c7bf627..28ec5862e227 100644 --- a/query-engine/connectors/sql-query-connector/src/lib.rs +++ b/query-engine/connectors/sql-query-connector/src/lib.rs @@ -1,26 +1,14 @@ #![allow(clippy::wrong_self_convention)] #![deny(unsafe_code)] -mod column_metadata; -pub mod context; -mod cursor_condition; mod database; mod error; -mod filter; -mod join_utils; -mod limit; -pub mod model_extensions; -mod nested_aggregations; -mod ordering; -pub mod query_arguments_ext; -pub mod query_builder; mod query_ext; mod row; mod ser_raw; -mod sql_trace; mod value; -use self::{column_metadata::*, context::Context, query_ext::QueryExt, row::*}; +use self::{query_ext::QueryExt, row::*}; use quaint::prelude::Queryable; pub use database::operations::write::generate_insert_statements; diff --git a/query-engine/connectors/sql-query-connector/src/model_extensions/mod.rs b/query-engine/connectors/sql-query-connector/src/model_extensions/mod.rs deleted file mode 100644 index d1bff1954100..000000000000 --- a/query-engine/connectors/sql-query-connector/src/model_extensions/mod.rs +++ /dev/null @@ -1,9 +0,0 @@ -mod column; -mod record; -mod relation; -mod scalar_field; -mod selection_result; -mod table; - -pub use self::{column::*, record::*, scalar_field::*}; -pub(crate) use self::{relation::*, selection_result::*, table::*}; diff --git a/query-engine/connectors/sql-query-connector/src/model_extensions/record.rs b/query-engine/connectors/sql-query-connector/src/model_extensions/record.rs deleted file mode 100644 index e764aa8e58ba..000000000000 --- a/query-engine/connectors/sql-query-connector/src/model_extensions/record.rs +++ /dev/null @@ -1,34 +0,0 @@ -use crate::{value::to_prisma_value, SqlError}; -use quaint::connector::ResultSet; -use query_structure::{DomainError, ModelProjection, SelectionResult}; - -pub fn try_convert(model_projection: &ModelProjection, result_set: ResultSet) -> crate::Result { - let columns: Vec = result_set.columns().iter().map(|c| c.to_string()).collect(); - let mut record_projection = SelectionResult::default(); - - if let Some(row) = result_set.into_iter().next() { - for (i, val) in row.into_iter().enumerate() { - match model_projection.map_db_name(columns[i].as_str()) { - Some(field) => { - record_projection.add((field, to_prisma_value(val)?)); - } - None => { - return Err(SqlError::DomainError(DomainError::ScalarFieldNotFound { - name: columns[i].clone(), - container_type: "model", - container_name: String::from("unspecified"), - })) - } - } - } - } - - if model_projection.scalar_length() == record_projection.len() { - Ok(record_projection) - } else { - Err(SqlError::DomainError(DomainError::ConversionFailure( - "ResultSet".to_owned(), - "RecordProjection".to_owned(), - ))) - } -} diff --git a/query-engine/connectors/sql-query-connector/src/query_ext.rs b/query-engine/connectors/sql-query-connector/src/query_ext.rs index dcf5b2f143b2..842ee100363c 100644 --- a/query-engine/connectors/sql-query-connector/src/query_ext.rs +++ b/query-engine/connectors/sql-query-connector/src/query_ext.rs @@ -1,15 +1,12 @@ -use crate::filter::FilterBuilder; use crate::ser_raw::SerializedResultSet; -use crate::{ - column_metadata, error::*, model_extensions::*, sql_trace::SqlTraceComment, ColumnMetadata, Context, SqlRow, - ToSqlRow, -}; +use crate::{error::*, SqlRow, ToSqlRow}; use async_trait::async_trait; -use connector_interface::RecordFilter; +use chrono::Utc; use futures::future::FutureExt; use itertools::Itertools; use quaint::{ast::*, connector::Queryable}; use query_structure::*; +use sql_query_builder::{column_metadata, AsColumns, AsTable, ColumnMetadata, Context, FilterBuilder, SqlTraceComment}; use std::{collections::HashMap, panic::AssertUnwindSafe}; use tracing::info_span; use tracing_futures::Instrument; @@ -25,7 +22,7 @@ impl QueryExt for Q { let span = info_span!("prisma:engine:filter_read_query"); let q = match q { - Query::Select(x) => Query::Select(Box::from(x.add_traceparent(ctx.traceparent))), + Query::Select(x) => Query::Select(Box::from(x.add_traceparent(ctx.traceparent()))), q => q, }; @@ -107,7 +104,7 @@ impl QueryExt for Q { let select = Select::from_table(model.as_table(ctx)) .columns(id_cols) - .add_traceparent(ctx.traceparent) + .add_traceparent(ctx.traceparent()) .so_that(condition); self.select_ids(select, model_id, ctx).await @@ -196,3 +193,40 @@ pub(crate) trait QueryExt { ctx: &Context<'_>, ) -> crate::Result>; } + +/// Attempts to convert a PrismaValue to a database value without any additional type information. +/// Can't reliably map Null values. +fn convert_lossy<'a>(pv: PrismaValue) -> Value<'a> { + match pv { + PrismaValue::String(s) => s.into(), + PrismaValue::Float(f) => f.into(), + PrismaValue::Boolean(b) => b.into(), + PrismaValue::DateTime(d) => d.with_timezone(&Utc).into(), + PrismaValue::Enum(e) => e.into(), + PrismaValue::Int(i) => i.into(), + PrismaValue::BigInt(i) => i.into(), + PrismaValue::Uuid(u) => u.to_string().into(), + PrismaValue::List(l) => Value::array(l.into_iter().map(convert_lossy)), + PrismaValue::Json(s) => Value::json(serde_json::from_str(&s).unwrap()), + PrismaValue::Bytes(b) => Value::bytes(b), + PrismaValue::Null => Value::null_int32(), // Can't tell which type the null is supposed to be. + PrismaValue::Object(_) => unimplemented!(), + PrismaValue::Placeholder { name, r#type } => Value::var(name, convert_placeholder_type_to_var_type(&r#type)), + } +} + +fn convert_placeholder_type_to_var_type(pt: &PlaceholderType) -> VarType { + match pt { + PlaceholderType::Any => VarType::Unknown, + PlaceholderType::String => VarType::Text, + PlaceholderType::Int => VarType::Int32, + PlaceholderType::BigInt => VarType::Int64, + PlaceholderType::Float => VarType::Numeric, + PlaceholderType::Boolean => VarType::Boolean, + PlaceholderType::Decimal => VarType::Numeric, + PlaceholderType::Date => VarType::DateTime, + PlaceholderType::Array(t) => VarType::Array(Box::new(convert_placeholder_type_to_var_type(t))), + PlaceholderType::Object => VarType::Json, + PlaceholderType::Bytes => VarType::Bytes, + } +} diff --git a/query-engine/connectors/sql-query-connector/src/row.rs b/query-engine/connectors/sql-query-connector/src/row.rs index 59947bbf386b..bb18ae623a36 100644 --- a/query-engine/connectors/sql-query-connector/src/row.rs +++ b/query-engine/connectors/sql-query-connector/src/row.rs @@ -1,10 +1,11 @@ -use crate::{column_metadata::ColumnMetadata, error::SqlError, value::to_prisma_value}; +use crate::{error::SqlError, value::to_prisma_value}; use bigdecimal::{BigDecimal, FromPrimitive, ToPrimitive}; use chrono::{DateTime, NaiveDate, Utc}; -use connector_interface::{coerce_null_to_zero_value, AggregationResult, AggregationSelection}; +use connector_interface::{coerce_null_to_zero_value, AggregationResult}; use core::{f32, f64}; use quaint::{connector::ResultRow, Value, ValueType}; -use query_structure::{ConversionFailure, FieldArity, PrismaValue, Record, TypeIdentifier}; +use query_structure::{AggregationSelection, ConversionFailure, FieldArity, PrismaValue, Record, TypeIdentifier}; +use sql_query_builder::ColumnMetadata; use std::{io, str::FromStr}; use uuid::Uuid; diff --git a/query-engine/core/Cargo.toml b/query-engine/core/Cargo.toml index a6aa37e1f63a..cd41c4ccf840 100644 --- a/query-engine/core/Cargo.toml +++ b/query-engine/core/Cargo.toml @@ -20,6 +20,8 @@ indexmap.workspace = true itertools.workspace = true once_cell = "1" petgraph = "0.4" +query-builder = { path = "../query-builders/query-builder" } +sql-query-builder = { path = "../query-builders/sql-query-builder" } query-structure = { path = "../query-structure", features = [ "default_generators", ] } diff --git a/query-engine/core/src/compiler/expression.rs b/query-engine/core/src/compiler/expression.rs index 1a3a3606bb68..4ba6f22a46b0 100644 --- a/query-engine/core/src/compiler/expression.rs +++ b/query-engine/core/src/compiler/expression.rs @@ -1,4 +1,4 @@ -use query_structure::PrismaValue; +use query_builder::DbQuery; use serde::Serialize; mod format; @@ -21,18 +21,6 @@ impl std::fmt::Display for Binding { } } -#[derive(Debug, Serialize)] -pub struct DbQuery { - pub query: String, - pub params: Vec, -} - -impl DbQuery { - pub fn new(query: String, params: Vec) -> Self { - Self { query, params } - } -} - #[derive(Debug, Serialize)] #[serde(rename_all = "camelCase")] pub struct JoinExpression { diff --git a/query-engine/core/src/compiler/expression/format.rs b/query-engine/core/src/compiler/expression/format.rs index 8c61747356df..cd2128c3a0b0 100644 --- a/query-engine/core/src/compiler/expression/format.rs +++ b/query-engine/core/src/compiler/expression/format.rs @@ -41,19 +41,19 @@ where pub fn expression(&'a self, expression: &'a Expression) -> DocBuilder<'a, PrettyPrinter<'a, D>, ColorSpec> { match expression { - Expression::Seq(vec) => self.seq(&vec), - Expression::Get { name } => self.get(&name), - Expression::Let { bindings, expr } => self.r#let(&bindings, &expr), - Expression::GetFirstNonEmpty { names } => self.get_first_non_empty(&names), - Expression::Query(db_query) => self.query("query", &db_query), - Expression::Execute(db_query) => self.query("execute", &db_query), - Expression::Reverse(expression) => self.unary_function("reverse", &expression), - Expression::Sum(vec) => self.function("sum", &vec), - Expression::Concat(vec) => self.function("concat", &vec), + Expression::Seq(vec) => self.seq(vec), + Expression::Get { name } => self.get(name), + Expression::Let { bindings, expr } => self.r#let(bindings, expr), + Expression::GetFirstNonEmpty { names } => self.get_first_non_empty(names), + Expression::Query(db_query) => self.query("query", db_query), + Expression::Execute(db_query) => self.query("execute", db_query), + Expression::Reverse(expression) => self.unary_function("reverse", expression), + Expression::Sum(vec) => self.function("sum", vec), + Expression::Concat(vec) => self.function("concat", vec), Expression::Unique(expression) => self.unary_function("unique", expression), Expression::Required(expression) => self.unary_function("required", expression), - Expression::Join { parent, children } => self.join(&parent, &children), - Expression::MapField { field, records } => self.map_field(&field, &records), + Expression::Join { parent, children } => self.join(parent, children), + Expression::MapField { field, records } => self.map_field(field, records), } } @@ -116,7 +116,7 @@ where }) .parens(), ), - PrismaValue::List(values) => self.list(&values), + PrismaValue::List(values) => self.list(values), _ => self .keyword("const") .append(self.text(format!("{value:?}")).annotate(color_lit()).parens()), diff --git a/query-engine/core/src/compiler/translate/query.rs b/query-engine/core/src/compiler/translate/query.rs index f3ff82c95298..a54c0fe1cea5 100644 --- a/query-engine/core/src/compiler/translate/query.rs +++ b/query-engine/core/src/compiler/translate/query.rs @@ -6,14 +6,12 @@ use quaint::{ prelude::{ConnectionInfo, ExternalConnectionInfo, SqlFamily}, visitor::Visitor, }; +use query_builder::DbQuery; use read::translate_read_query; -use sql_query_connector::context::Context; +use sql_query_builder::Context; use write::translate_write_query; -use crate::{ - compiler::expression::{DbQuery, Expression}, - Query, -}; +use crate::{compiler::expression::Expression, Query}; use super::TranslateResult; diff --git a/query-engine/core/src/compiler/translate/query/read.rs b/query-engine/core/src/compiler/translate/query/read.rs index 785d6d13c0ff..ab540d024722 100644 --- a/query-engine/core/src/compiler/translate/query/read.rs +++ b/query-engine/core/src/compiler/translate/query/read.rs @@ -1,13 +1,5 @@ use std::collections::HashSet; -use itertools::Itertools; -use query_structure::{ - ConditionValue, Filter, ModelProjection, PrismaValue, QueryMode, ScalarCondition, ScalarFilter, ScalarProjection, -}; -use sql_query_connector::{ - context::Context, model_extensions::AsColumns, query_arguments_ext::QueryArgumentsExt, query_builder, -}; - use crate::{ compiler::{ expression::{Binding, Expression, JoinExpression}, @@ -15,6 +7,11 @@ use crate::{ }, FilteredQuery, ReadQuery, RelatedRecordsQuery, }; +use itertools::Itertools; +use query_structure::{ + ConditionValue, Filter, ModelProjection, PrismaValue, QueryMode, ScalarCondition, ScalarFilter, ScalarProjection, +}; +use sql_query_builder::{read, AsColumns, Context, QueryArgumentsExt}; use super::build_db_query; @@ -23,7 +20,7 @@ pub(crate) fn translate_read_query(query: ReadQuery, ctx: &Context<'_>) -> Trans ReadQuery::RecordQuery(rq) => { let selected_fields = rq.selected_fields.without_relations().into_virtuals_last(); - let query = query_builder::read::get_records( + let query = read::get_records( &rq.model, ModelProjection::from(&selected_fields) .as_columns(ctx) @@ -49,7 +46,7 @@ pub(crate) fn translate_read_query(query: ReadQuery, ctx: &Context<'_>) -> Trans let needs_reversed_order = mrq.args.needs_reversed_order(); // TODO: we ignore chunking for now - let query = query_builder::read::get_records( + let query = read::get_records( &mrq.model, ModelProjection::from(&selected_fields) .as_columns(ctx) @@ -177,7 +174,7 @@ fn build_read_one2m_query(rrq: RelatedRecordsQuery, ctx: &Context<'_>) -> Transl let to_one_relation = !rrq.parent_field.arity().is_list(); // TODO: we ignore chunking for now - let query = query_builder::read::get_records( + let query = read::get_records( &rrq.parent_field.related_model(), ModelProjection::from(&selected_fields) .as_columns(ctx) diff --git a/query-engine/core/src/compiler/translate/query/write.rs b/query-engine/core/src/compiler/translate/query/write.rs index b3ae28a7b76e..2dbbf12327c7 100644 --- a/query-engine/core/src/compiler/translate/query/write.rs +++ b/query-engine/core/src/compiler/translate/query/write.rs @@ -1,5 +1,6 @@ use query_structure::ModelProjection; -use sql_query_connector::{context::Context, generate_insert_statements, query_builder}; +use sql_query_builder::{write, Context}; +use sql_query_connector::generate_insert_statements; use crate::{ compiler::{expression::Expression, translate::TranslateResult}, @@ -13,12 +14,7 @@ pub(crate) fn translate_write_query(query: WriteQuery, ctx: &Context<'_>) -> Tra WriteQuery::CreateRecord(cr) => { // TODO: MySQL needs additional logic to generate IDs on our side. // See sql_query_connector::database::operations::write::create_record - let query = query_builder::write::create_record( - &cr.model, - cr.args, - &ModelProjection::from(&cr.selected_fields), - ctx, - ); + let query = write::create_record(&cr.model, cr.args, &ModelProjection::from(&cr.selected_fields), ctx); // TODO: we probably need some additional node type or extra info in the WriteQuery node // to help the client executor figure out the returned ID in the case when it's inferred diff --git a/query-engine/core/src/interpreter/query_interpreters/write.rs b/query-engine/core/src/interpreter/query_interpreters/write.rs index 3dcb992b4356..f6267bf0488c 100644 --- a/query-engine/core/src/interpreter/query_interpreters/write.rs +++ b/query-engine/core/src/interpreter/query_interpreters/write.rs @@ -5,8 +5,8 @@ use crate::{ query_ast::*, QueryResult, RecordSelection, }; -use connector::{ConnectionLike, DatasourceFieldName, NativeUpsert, WriteArgs}; -use query_structure::{ManyRecords, Model, RawJson}; +use connector::{ConnectionLike, NativeUpsert}; +use query_structure::{DatasourceFieldName, ManyRecords, Model, RawJson, WriteArgs}; use telemetry::TraceParent; pub(crate) async fn execute( diff --git a/query-engine/core/src/query_ast/read.rs b/query-engine/core/src/query_ast/read.rs index 2326183b7b54..09a5af04dc79 100644 --- a/query-engine/core/src/query_ast/read.rs +++ b/query-engine/core/src/query_ast/read.rs @@ -1,9 +1,8 @@ //! Prisma read query AST use super::FilteredQuery; use crate::ToGraphviz; -use connector::AggregationSelection; use enumflags2::BitFlags; -use query_structure::{prelude::*, Filter, QueryArguments, RelationLoadStrategy}; +use query_structure::{prelude::*, AggregationSelection, Filter, QueryArguments, RelationLoadStrategy}; use std::fmt::Display; #[allow(clippy::enum_variant_names)] diff --git a/query-engine/core/src/query_ast/write.rs b/query-engine/core/src/query_ast/write.rs index ca0287179e32..b538e2675e32 100644 --- a/query-engine/core/src/query_ast/write.rs +++ b/query-engine/core/src/query_ast/write.rs @@ -1,8 +1,8 @@ //! Write query AST use super::{FilteredNestedMutation, FilteredQuery}; use crate::{ReadQuery, RecordQuery, ToGraphviz}; -use connector::{DatasourceFieldName, NativeUpsert, RecordFilter, WriteArgs}; -use query_structure::{prelude::*, Filter}; +use connector::NativeUpsert; +use query_structure::{prelude::*, DatasourceFieldName, Filter, RecordFilter, WriteArgs}; use std::collections::HashMap; #[derive(Debug, Clone)] diff --git a/query-engine/core/src/query_graph_builder/read/aggregations/mod.rs b/query-engine/core/src/query_graph_builder/read/aggregations/mod.rs index 94e8b1bcbdc9..685d9c4e1e23 100644 --- a/query-engine/core/src/query_graph_builder/read/aggregations/mod.rs +++ b/query-engine/core/src/query_graph_builder/read/aggregations/mod.rs @@ -6,9 +6,8 @@ pub(crate) use group_by::*; use super::*; use crate::FieldPair; -use connector::AggregationSelection; use itertools::Itertools; -use query_structure::{Model, ScalarFieldRef}; +use query_structure::{AggregationSelection, Model, ScalarFieldRef}; use schema::constants::aggregations::*; /// Resolves the given field as a aggregation query. diff --git a/query-engine/core/src/query_graph_builder/write/create.rs b/query-engine/core/src/query_graph_builder/write/create.rs index 86360291ead5..9dc0c0821465 100644 --- a/query-engine/core/src/query_graph_builder/write/create.rs +++ b/query-engine/core/src/query_graph_builder/write/create.rs @@ -4,9 +4,8 @@ use crate::{ query_graph::{Node, NodeRef, QueryGraph, QueryGraphDependency}, ArgumentListLookup, ParsedField, ParsedInputList, ParsedInputMap, }; -use connector::WriteArgs; use psl::{datamodel_connector::ConnectorCapability, parser_database::RelationFieldId}; -use query_structure::{IntoFilter, Model, Zipper}; +use query_structure::{IntoFilter, Model, WriteArgs, Zipper}; use schema::{constants::args, QuerySchema}; use std::convert::TryInto; use write_args_parser::*; diff --git a/query-engine/core/src/query_graph_builder/write/nested/delete_nested.rs b/query-engine/core/src/query_graph_builder/write/nested/delete_nested.rs index 5044451193f3..c3de8071d804 100644 --- a/query-engine/core/src/query_graph_builder/write/nested/delete_nested.rs +++ b/query-engine/core/src/query_graph_builder/write/nested/delete_nested.rs @@ -4,8 +4,7 @@ use crate::{ query_graph::{Node, NodeRef, QueryGraph, QueryGraphDependency}, ParsedInputMap, ParsedInputValue, }; -use connector::RecordFilter; -use query_structure::{Filter, Model, PrismaValue, RelationFieldRef}; +use query_structure::{Filter, Model, PrismaValue, RecordFilter, RelationFieldRef}; use std::convert::TryInto; /// Adds a delete (single) record node to the graph and connects it to the parent. diff --git a/query-engine/core/src/query_graph_builder/write/utils.rs b/query-engine/core/src/query_graph_builder/write/utils.rs index b5db88b2240d..a882c5a2d312 100644 --- a/query-engine/core/src/query_graph_builder/write/utils.rs +++ b/query-engine/core/src/query_graph_builder/write/utils.rs @@ -3,10 +3,12 @@ use crate::{ query_graph::{Flow, Node, NodeRef, QueryGraph, QueryGraphDependency}, Computation, ParsedInputValue, QueryGraphBuilderError, QueryGraphBuilderResult, }; -use connector::{DatasourceFieldName, RecordFilter, WriteArgs, WriteOperation}; use indexmap::IndexMap; use psl::parser_database::ReferentialAction; -use query_structure::{FieldSelection, Filter, Model, PrismaValue, RelationFieldRef, SelectionResult}; +use query_structure::{ + DatasourceFieldName, FieldSelection, Filter, Model, PrismaValue, RecordFilter, RelationFieldRef, SelectionResult, + WriteArgs, WriteOperation, +}; use schema::QuerySchema; /// Coerces single values (`ParsedInputValue::Single` and `ParsedInputValue::Map`) into a vector. diff --git a/query-engine/core/src/query_graph_builder/write/write_args_parser.rs b/query-engine/core/src/query_graph_builder/write/write_args_parser.rs index 5e5cc464fa51..0b79f1a0e7ca 100644 --- a/query-engine/core/src/query_graph_builder/write/write_args_parser.rs +++ b/query-engine/core/src/query_graph_builder/write/write_args_parser.rs @@ -1,7 +1,9 @@ use super::*; use crate::query_document::{ParsedInputMap, ParsedInputValue}; -use connector::{DatasourceFieldName, WriteArgs, WriteOperation}; -use query_structure::{CompositeFieldRef, Field, Model, PrismaValue, RelationFieldRef, ScalarFieldRef, TypeIdentifier}; +use query_structure::{ + CompositeFieldRef, DatasourceFieldName, Field, Model, PrismaValue, RelationFieldRef, ScalarFieldRef, + TypeIdentifier, WriteArgs, WriteOperation, +}; use schema::constants::{args, json_null, operations}; use std::{borrow::Cow, convert::TryInto}; diff --git a/query-engine/query-builders/query-builder/Cargo.toml b/query-engine/query-builders/query-builder/Cargo.toml new file mode 100644 index 000000000000..4c35b489e828 --- /dev/null +++ b/query-engine/query-builders/query-builder/Cargo.toml @@ -0,0 +1,9 @@ +[package] +edition = "2021" +name = "query-builder" +version = "0.1.0" + +[dependencies] +serde.workspace = true + +query-structure = { path = "../../query-structure" } diff --git a/query-engine/query-builders/query-builder/src/lib.rs b/query-engine/query-builders/query-builder/src/lib.rs new file mode 100644 index 000000000000..240ca848580c --- /dev/null +++ b/query-engine/query-builders/query-builder/src/lib.rs @@ -0,0 +1,14 @@ +use query_structure::PrismaValue; +use serde::Serialize; + +#[derive(Debug, Serialize)] +pub struct DbQuery { + pub query: String, + pub params: Vec, +} + +impl DbQuery { + pub fn new(query: String, params: Vec) -> Self { + Self { query, params } + } +} diff --git a/query-engine/query-builders/sql-query-builder/Cargo.toml b/query-engine/query-builders/sql-query-builder/Cargo.toml new file mode 100644 index 000000000000..80cccff5f961 --- /dev/null +++ b/query-engine/query-builders/sql-query-builder/Cargo.toml @@ -0,0 +1,19 @@ +[package] +edition = "2021" +name = "sql-query-builder" +version = "0.1.0" + +[dependencies] +quaint = { path = "../../../quaint" } +query-structure = { path = "../../query-structure" } +query-builder = { path = "../query-builder" } +telemetry = { path = "../../../libs/telemetry" } +prisma-value = { path = "../../../libs/prisma-value" } +psl = { path = "../../../psl/psl" } + +itertools.workspace = true +chrono.workspace = true +serde_json.workspace = true + +[features] +relation_joins = [] diff --git a/query-engine/connectors/sql-query-connector/src/column_metadata.rs b/query-engine/query-builders/sql-query-builder/src/column_metadata.rs similarity index 84% rename from query-engine/connectors/sql-query-connector/src/column_metadata.rs rename to query-engine/query-builders/sql-query-builder/src/column_metadata.rs index c64871b7eb22..0e3ab88df37e 100644 --- a/query-engine/connectors/sql-query-connector/src/column_metadata.rs +++ b/query-engine/query-builders/sql-query-builder/src/column_metadata.rs @@ -2,7 +2,7 @@ use query_structure::{FieldArity, TypeIdentifier}; /// Helps dealing with column value conversion and possible error resolution. #[derive(Clone, Debug, Copy)] -pub(crate) struct ColumnMetadata<'a> { +pub struct ColumnMetadata<'a> { identifier: &'a TypeIdentifier, name: Option<&'a str>, arity: FieldArity, @@ -41,7 +41,7 @@ impl<'a> ColumnMetadata<'a> { /// Create a set of metadata objects, combining column names and type /// information. -pub(crate) fn create<'a, T>(field_names: &'a [T], idents: &'a [(TypeIdentifier, FieldArity)]) -> Vec> +pub fn create<'a, T>(field_names: &'a [T], idents: &'a [(TypeIdentifier, FieldArity)]) -> Vec> where T: AsRef, { @@ -55,7 +55,7 @@ where } /// Create a set of metadata objects. -pub(crate) fn create_anonymous(idents: &[(TypeIdentifier, FieldArity)]) -> Vec> { +pub fn create_anonymous(idents: &[(TypeIdentifier, FieldArity)]) -> Vec> { idents .iter() .map(|(identifier, arity)| ColumnMetadata::new(identifier, *arity)) diff --git a/query-engine/connectors/sql-query-connector/src/context.rs b/query-engine/query-builders/sql-query-builder/src/context.rs similarity index 78% rename from query-engine/connectors/sql-query-connector/src/context.rs rename to query-engine/query-builders/sql-query-builder/src/context.rs index b3e28c8152c1..6bb1f2a1414a 100644 --- a/query-engine/connectors/sql-query-connector/src/context.rs +++ b/query-engine/query-builders/sql-query-builder/src/context.rs @@ -25,7 +25,19 @@ impl<'a> Context<'a> { } } + pub fn traceparent(&self) -> Option { + self.traceparent + } + pub(crate) fn schema_name(&self) -> &str { self.connection_info.schema_name() } + + pub fn max_insert_rows(&self) -> Option { + self.max_insert_rows + } + + pub fn max_bind_values(&self) -> Option { + self.max_bind_values + } } diff --git a/query-engine/connectors/sql-query-connector/src/cursor_condition.rs b/query-engine/query-builders/sql-query-builder/src/cursor_condition.rs similarity index 100% rename from query-engine/connectors/sql-query-connector/src/cursor_condition.rs rename to query-engine/query-builders/sql-query-builder/src/cursor_condition.rs diff --git a/query-engine/connectors/sql-query-connector/src/filter/alias.rs b/query-engine/query-builders/sql-query-builder/src/filter/alias.rs similarity index 95% rename from query-engine/connectors/sql-query-connector/src/filter/alias.rs rename to query-engine/query-builders/sql-query-builder/src/filter/alias.rs index 10fc31080aae..5487cb108ab0 100644 --- a/query-engine/connectors/sql-query-connector/src/filter/alias.rs +++ b/query-engine/query-builders/sql-query-builder/src/filter/alias.rs @@ -43,7 +43,7 @@ impl Alias { /// A string representation of the current alias. The current mode can be /// overridden by defining the `mode_override`. - pub fn to_string(&self, mode_override: Option) -> String { + pub fn to_string(self, mode_override: Option) -> String { match mode_override.unwrap_or(self.mode) { AliasMode::Table => format!("t{}", self.counter), AliasMode::Join => format!("j{}", self.counter), @@ -51,7 +51,7 @@ impl Alias { } #[cfg(feature = "relation_joins")] - pub fn to_table_string(&self) -> String { + pub fn to_table_string(self) -> String { self.to_string(Some(AliasMode::Table)) } } diff --git a/query-engine/connectors/sql-query-connector/src/filter/mod.rs b/query-engine/query-builders/sql-query-builder/src/filter/mod.rs similarity index 79% rename from query-engine/connectors/sql-query-connector/src/filter/mod.rs rename to query-engine/query-builders/sql-query-builder/src/filter/mod.rs index 573024845b45..7cbe091a816a 100644 --- a/query-engine/connectors/sql-query-connector/src/filter/mod.rs +++ b/query-engine/query-builders/sql-query-builder/src/filter/mod.rs @@ -7,16 +7,16 @@ pub use visitor::*; use crate::{context::Context, join_utils::AliasedJoin}; -pub(crate) struct FilterBuilder {} +pub struct FilterBuilder {} pub(crate) struct FilterBuilderWithJoins {} -pub(crate) struct FilterBuilderWithoutJoins {} +pub struct FilterBuilderWithoutJoins {} impl FilterBuilder { pub(crate) fn with_top_level_joins() -> FilterBuilderWithJoins { FilterBuilderWithJoins {} } - pub(crate) fn without_top_level_joins() -> FilterBuilderWithoutJoins { + pub fn without_top_level_joins() -> FilterBuilderWithoutJoins { FilterBuilderWithoutJoins {} } } @@ -34,7 +34,7 @@ impl FilterBuilderWithJoins { impl FilterBuilderWithoutJoins { /// Visits a filter without any top-level joins. Can be safely used in any context. - pub(crate) fn visit_filter(&self, filter: Filter, ctx: &Context) -> ConditionTree<'static> { + pub fn visit_filter(&self, filter: Filter, ctx: &Context) -> ConditionTree<'static> { let (cond, _) = FilterVisitor::without_top_level_joins().visit_filter(filter, ctx); cond diff --git a/query-engine/connectors/sql-query-connector/src/filter/visitor.rs b/query-engine/query-builders/sql-query-builder/src/filter/visitor.rs similarity index 100% rename from query-engine/connectors/sql-query-connector/src/filter/visitor.rs rename to query-engine/query-builders/sql-query-builder/src/filter/visitor.rs diff --git a/query-engine/connectors/sql-query-connector/src/join_utils.rs b/query-engine/query-builders/sql-query-builder/src/join_utils.rs similarity index 100% rename from query-engine/connectors/sql-query-connector/src/join_utils.rs rename to query-engine/query-builders/sql-query-builder/src/join_utils.rs diff --git a/query-engine/connectors/sql-query-connector/src/query_builder/mod.rs b/query-engine/query-builders/sql-query-builder/src/lib.rs similarity index 65% rename from query-engine/connectors/sql-query-connector/src/query_builder/mod.rs rename to query-engine/query-builders/sql-query-builder/src/lib.rs index 15d696b4e7ea..dafabf1f3772 100644 --- a/query-engine/connectors/sql-query-connector/src/query_builder/mod.rs +++ b/query-engine/query-builders/sql-query-builder/src/lib.rs @@ -1,16 +1,32 @@ +pub mod column_metadata; +mod context; +mod cursor_condition; +mod filter; +mod join_utils; +pub mod limit; +mod model_extensions; +mod nested_aggregations; +mod ordering; +mod query_arguments_ext; pub mod read; #[cfg(feature = "relation_joins")] pub mod select; +mod sql_trace; pub mod write; -use crate::context::Context; -use crate::model_extensions::SelectionResultExt; use quaint::ast::{Column, Comparable, ConditionTree, Query, Row, Values}; use query_structure::SelectionResult; +pub use column_metadata::ColumnMetadata; +pub use context::Context; +pub use filter::FilterBuilder; +pub use model_extensions::{AsColumn, AsColumns, AsTable, RelationFieldExt, SelectionResultExt}; +pub use query_arguments_ext::QueryArgumentsExt; +pub use sql_trace::SqlTraceComment; + const PARAMETER_LIMIT: usize = 2000; -pub(super) fn chunked_conditions( +pub fn chunked_conditions( columns: &[Column<'static>], records: &[&SelectionResult], ctx: &Context<'_>, @@ -29,7 +45,7 @@ where .collect() } -pub(super) fn in_conditions<'a>( +pub fn in_conditions<'a>( columns: &'a [Column<'static>], results: impl IntoIterator, ctx: &Context<'_>, diff --git a/query-engine/query-builders/sql-query-builder/src/limit.rs b/query-engine/query-builders/sql-query-builder/src/limit.rs new file mode 100644 index 000000000000..1e392fea529c --- /dev/null +++ b/query-engine/query-builders/sql-query-builder/src/limit.rs @@ -0,0 +1,31 @@ +use crate::{model_extensions::*, Context}; +use quaint::ast::*; +use query_structure::*; + +pub fn wrap_with_limit_subquery_if_needed<'a>( + model: &Model, + filter_condition: ConditionTree<'a>, + limit: Option, + ctx: &Context, +) -> ConditionTree<'a> { + if let Some(limit) = limit { + let columns = model + .primary_identifier() + .as_scalar_fields() + .expect("primary identifier must contain scalar fields") + .into_iter() + .map(|f| f.as_column(ctx)) + .collect::>(); + + ConditionTree::from( + Row::from(columns.clone()).in_selection( + Select::from_table(model.as_table(ctx)) + .columns(columns) + .so_that(filter_condition) + .limit(limit), + ), + ) + } else { + filter_condition + } +} diff --git a/query-engine/connectors/sql-query-connector/src/model_extensions/column.rs b/query-engine/query-builders/sql-query-builder/src/model_extensions/column.rs similarity index 100% rename from query-engine/connectors/sql-query-connector/src/model_extensions/column.rs rename to query-engine/query-builders/sql-query-builder/src/model_extensions/column.rs diff --git a/query-engine/query-builders/sql-query-builder/src/model_extensions/mod.rs b/query-engine/query-builders/sql-query-builder/src/model_extensions/mod.rs new file mode 100644 index 000000000000..4a92c88d4b85 --- /dev/null +++ b/query-engine/query-builders/sql-query-builder/src/model_extensions/mod.rs @@ -0,0 +1,8 @@ +mod column; +mod relation; +mod scalar_field; +mod selection_result; +mod table; + +pub use self::{column::*, relation::*, selection_result::*, table::*}; +pub(crate) use scalar_field::*; diff --git a/query-engine/connectors/sql-query-connector/src/model_extensions/relation.rs b/query-engine/query-builders/sql-query-builder/src/model_extensions/relation.rs similarity index 99% rename from query-engine/connectors/sql-query-connector/src/model_extensions/relation.rs rename to query-engine/query-builders/sql-query-builder/src/model_extensions/relation.rs index 981390536807..49518c90f4c5 100644 --- a/query-engine/connectors/sql-query-connector/src/model_extensions/relation.rs +++ b/query-engine/query-builders/sql-query-builder/src/model_extensions/relation.rs @@ -5,7 +5,7 @@ use crate::{ use quaint::{ast::Table, prelude::Column}; use query_structure::{walkers, ModelProjection, Relation, RelationField}; -pub(crate) trait RelationFieldExt { +pub trait RelationFieldExt { fn m2m_columns(&self, ctx: &Context<'_>) -> Vec>; fn join_columns(&self, ctx: &Context<'_>) -> ColumnIterator; fn identifier_columns(&self, ctx: &Context<'_>) -> ColumnIterator; diff --git a/query-engine/connectors/sql-query-connector/src/model_extensions/scalar_field.rs b/query-engine/query-builders/sql-query-builder/src/model_extensions/scalar_field.rs similarity index 78% rename from query-engine/connectors/sql-query-connector/src/model_extensions/scalar_field.rs rename to query-engine/query-builders/sql-query-builder/src/model_extensions/scalar_field.rs index a3e88aa1d403..b94dc6be698f 100644 --- a/query-engine/connectors/sql-query-connector/src/model_extensions/scalar_field.rs +++ b/query-engine/query-builders/sql-query-builder/src/model_extensions/scalar_field.rs @@ -1,6 +1,6 @@ use crate::context::Context; use chrono::Utc; -use prisma_value::{PlaceholderType, PrismaValue}; +use prisma_value::PrismaValue; use quaint::{ ast::{EnumName, Value, ValueType, VarType}, prelude::{EnumVariant, TypeDataLength, TypeFamily}, @@ -123,43 +123,6 @@ impl ScalarFieldExt for ScalarField { } } -/// Attempts to convert a PrismaValue to a database value without any additional type information. -/// Can't reliably map Null values. -pub fn convert_lossy<'a>(pv: PrismaValue) -> Value<'a> { - match pv { - PrismaValue::String(s) => s.into(), - PrismaValue::Float(f) => f.into(), - PrismaValue::Boolean(b) => b.into(), - PrismaValue::DateTime(d) => d.with_timezone(&Utc).into(), - PrismaValue::Enum(e) => e.into(), - PrismaValue::Int(i) => i.into(), - PrismaValue::BigInt(i) => i.into(), - PrismaValue::Uuid(u) => u.to_string().into(), - PrismaValue::List(l) => Value::array(l.into_iter().map(convert_lossy)), - PrismaValue::Json(s) => Value::json(serde_json::from_str(&s).unwrap()), - PrismaValue::Bytes(b) => Value::bytes(b), - PrismaValue::Null => Value::null_int32(), // Can't tell which type the null is supposed to be. - PrismaValue::Object(_) => unimplemented!(), - PrismaValue::Placeholder { name, r#type } => Value::var(name, convert_placeholder_type_to_var_type(&r#type)), - } -} - -fn convert_placeholder_type_to_var_type(pt: &PlaceholderType) -> VarType { - match pt { - PlaceholderType::Any => VarType::Unknown, - PlaceholderType::String => VarType::Text, - PlaceholderType::Int => VarType::Int32, - PlaceholderType::BigInt => VarType::Int64, - PlaceholderType::Float => VarType::Numeric, - PlaceholderType::Boolean => VarType::Boolean, - PlaceholderType::Decimal => VarType::Numeric, - PlaceholderType::Date => VarType::DateTime, - PlaceholderType::Array(t) => VarType::Array(Box::new(convert_placeholder_type_to_var_type(t))), - PlaceholderType::Object => VarType::Json, - PlaceholderType::Bytes => VarType::Bytes, - } -} - fn parse_scalar_length(sf: &ScalarField) -> Option { sf.native_type() .and_then(|nt| nt.args().into_iter().next()) diff --git a/query-engine/connectors/sql-query-connector/src/model_extensions/selection_result.rs b/query-engine/query-builders/sql-query-builder/src/model_extensions/selection_result.rs similarity index 97% rename from query-engine/connectors/sql-query-connector/src/model_extensions/selection_result.rs rename to query-engine/query-builders/sql-query-builder/src/model_extensions/selection_result.rs index 21d6aac3dbe2..4031f7f169a3 100644 --- a/query-engine/connectors/sql-query-connector/src/model_extensions/selection_result.rs +++ b/query-engine/query-builders/sql-query-builder/src/model_extensions/selection_result.rs @@ -3,7 +3,7 @@ use crate::context::Context; use quaint::Value; use query_structure::{PrismaValue, SelectedField, SelectionResult}; -pub(crate) trait SelectionResultExt { +pub trait SelectionResultExt { fn misses_autogen_value(&self) -> bool; fn db_values<'a>(&self, ctx: &Context<'_>) -> Vec>; diff --git a/query-engine/connectors/sql-query-connector/src/model_extensions/table.rs b/query-engine/query-builders/sql-query-builder/src/model_extensions/table.rs similarity index 98% rename from query-engine/connectors/sql-query-connector/src/model_extensions/table.rs rename to query-engine/query-builders/sql-query-builder/src/model_extensions/table.rs index ead15c34658e..5d8275e38b84 100644 --- a/query-engine/connectors/sql-query-connector/src/model_extensions/table.rs +++ b/query-engine/query-builders/sql-query-builder/src/model_extensions/table.rs @@ -12,7 +12,7 @@ pub(crate) fn db_name_with_schema(model: &Model, ctx: &Context<'_>) -> Table<'st (schema_prefix, model_db_name).into() } -pub(crate) trait AsTable { +pub trait AsTable { fn as_table(&self, ctx: &Context<'_>) -> Table<'static>; } diff --git a/query-engine/connectors/sql-query-connector/src/nested_aggregations.rs b/query-engine/query-builders/sql-query-builder/src/nested_aggregations.rs similarity index 100% rename from query-engine/connectors/sql-query-connector/src/nested_aggregations.rs rename to query-engine/query-builders/sql-query-builder/src/nested_aggregations.rs diff --git a/query-engine/connectors/sql-query-connector/src/ordering.rs b/query-engine/query-builders/sql-query-builder/src/ordering.rs similarity index 100% rename from query-engine/connectors/sql-query-connector/src/ordering.rs rename to query-engine/query-builders/sql-query-builder/src/ordering.rs diff --git a/query-engine/connectors/sql-query-connector/src/query_arguments_ext.rs b/query-engine/query-builders/sql-query-builder/src/query_arguments_ext.rs similarity index 100% rename from query-engine/connectors/sql-query-connector/src/query_arguments_ext.rs rename to query-engine/query-builders/sql-query-builder/src/query_arguments_ext.rs diff --git a/query-engine/connectors/sql-query-connector/src/query_builder/read.rs b/query-engine/query-builders/sql-query-builder/src/read.rs similarity index 97% rename from query-engine/connectors/sql-query-connector/src/query_builder/read.rs rename to query-engine/query-builders/sql-query-builder/src/read.rs index e33d51857a2f..f359b94c25d7 100644 --- a/query-engine/connectors/sql-query-connector/src/query_builder/read.rs +++ b/query-engine/query-builders/sql-query-builder/src/read.rs @@ -1,12 +1,17 @@ -use crate::{ - cursor_condition, filter::FilterBuilder, model_extensions::*, nested_aggregations, ordering::OrderByBuilder, - sql_trace::SqlTraceComment, Context, -}; -use connector_interface::AggregationSelection; use itertools::Itertools; use quaint::ast::*; use query_structure::*; +use crate::{ + context::Context, + cursor_condition, + filter::FilterBuilder, + model_extensions::{AsColumn, AsColumns, AsTable}, + nested_aggregations, + ordering::OrderByBuilder, + sql_trace::SqlTraceComment, +}; + pub trait SelectDefinition { fn into_select<'a>( self, @@ -168,7 +173,7 @@ where /// ``` /// Important note: Do not use the AsColumn trait here as we need to construct column references that are relative, /// not absolute - e.g. `SELECT "field" FROM (...)` NOT `SELECT "full"."path"."to"."field" FROM (...)`. -pub(crate) fn aggregate( +pub fn aggregate( model: &Model, selections: &[AggregationSelection], args: QueryArguments, @@ -222,7 +227,7 @@ pub(crate) fn aggregate( ) } -pub(crate) fn group_by_aggregate( +pub fn group_by_aggregate( model: &Model, args: QueryArguments, selections: &[AggregationSelection], diff --git a/query-engine/connectors/sql-query-connector/src/query_builder/select/lateral.rs b/query-engine/query-builders/sql-query-builder/src/select/lateral.rs similarity index 100% rename from query-engine/connectors/sql-query-connector/src/query_builder/select/lateral.rs rename to query-engine/query-builders/sql-query-builder/src/select/lateral.rs diff --git a/query-engine/connectors/sql-query-connector/src/query_builder/select/mod.rs b/query-engine/query-builders/sql-query-builder/src/select/mod.rs similarity index 99% rename from query-engine/connectors/sql-query-connector/src/query_builder/select/mod.rs rename to query-engine/query-builders/sql-query-builder/src/select/mod.rs index f0b4fd7abe2a..a2682d670557 100644 --- a/query-engine/connectors/sql-query-connector/src/query_builder/select/mod.rs +++ b/query-engine/query-builders/sql-query-builder/src/select/mod.rs @@ -23,7 +23,7 @@ use self::{lateral::LateralJoinSelectBuilder, subquery::SubqueriesSelectBuilder} pub(crate) const JSON_AGG_IDENT: &str = "__prisma_data__"; -pub(crate) struct SelectBuilder; +pub struct SelectBuilder; impl SelectBuilder { pub fn build(args: QueryArguments, selected_fields: &FieldSelection, ctx: &Context<'_>) -> Select<'static> { diff --git a/query-engine/connectors/sql-query-connector/src/query_builder/select/subquery.rs b/query-engine/query-builders/sql-query-builder/src/select/subquery.rs similarity index 100% rename from query-engine/connectors/sql-query-connector/src/query_builder/select/subquery.rs rename to query-engine/query-builders/sql-query-builder/src/select/subquery.rs diff --git a/query-engine/connectors/sql-query-connector/src/sql_trace.rs b/query-engine/query-builders/sql-query-builder/src/sql_trace.rs similarity index 100% rename from query-engine/connectors/sql-query-connector/src/sql_trace.rs rename to query-engine/query-builders/sql-query-builder/src/sql_trace.rs diff --git a/query-engine/connectors/sql-query-connector/src/query_builder/write.rs b/query-engine/query-builders/sql-query-builder/src/write.rs similarity index 94% rename from query-engine/connectors/sql-query-connector/src/query_builder/write.rs rename to query-engine/query-builders/sql-query-builder/src/write.rs index 5cfbd8002fe0..1059cb6069f8 100644 --- a/query-engine/connectors/sql-query-connector/src/query_builder/write.rs +++ b/query-engine/query-builders/sql-query-builder/src/write.rs @@ -1,6 +1,5 @@ use crate::limit::wrap_with_limit_subquery_if_needed; use crate::{model_extensions::*, sql_trace::SqlTraceComment, Context}; -use connector_interface::{DatasourceFieldName, ScalarWriteOperation, WriteArgs}; use quaint::ast::*; use query_structure::*; use std::{collections::HashSet, convert::TryInto}; @@ -40,7 +39,7 @@ pub fn create_record( /// where each `WriteArg` in the Vec is one row. /// Requires `affected_fields` to be non-empty to produce valid SQL. #[allow(clippy::mutable_key_type)] -pub(crate) fn create_records_nonempty( +pub fn create_records_nonempty( model: &Model, args: Vec, skip_duplicates: bool, @@ -97,7 +96,7 @@ pub(crate) fn create_records_nonempty( } /// `INSERT` empty records statement. -pub(crate) fn create_records_empty( +pub fn create_records_empty( model: &Model, skip_duplicates: bool, selected_fields: Option<&ModelProjection>, @@ -117,7 +116,7 @@ pub(crate) fn create_records_empty( insert } -pub(crate) fn build_update_and_set_query( +pub fn build_update_and_set_query( model: &Model, args: WriteArgs, selected_fields: Option<&ModelProjection>, @@ -185,22 +184,20 @@ pub(crate) fn build_update_and_set_query( query } -pub(crate) fn chunk_update_with_ids( +pub fn chunk_update_with_ids( update: Update<'static>, model: &Model, ids: &[&SelectionResult], filter_condition: ConditionTree<'static>, ctx: &Context<'_>, -) -> crate::Result>> { +) -> Vec> { let columns: Vec<_> = ModelProjection::from(model.primary_identifier()) .as_columns(ctx) .collect(); - let query = super::chunked_conditions(&columns, ids, ctx, |conditions| { + super::chunked_conditions(&columns, ids, ctx, |conditions| { update.clone().so_that(conditions.and(filter_condition.clone())) - }); - - Ok(query) + }) } /// Converts a list of selected fields into an iterator of table columns. @@ -211,7 +208,7 @@ fn projection_into_columns( selected_fields.as_columns(ctx).map(|c| c.set_is_selected(true)) } -pub(crate) fn delete_returning( +pub fn delete_returning( model: &Model, filter: ConditionTree<'static>, selected_fields: &ModelProjection, @@ -224,7 +221,7 @@ pub(crate) fn delete_returning( .into() } -pub(crate) fn delete_many_from_filter( +pub fn delete_many_from_filter( model: &Model, filter_condition: ConditionTree<'static>, limit: Option, @@ -238,7 +235,7 @@ pub(crate) fn delete_many_from_filter( .into() } -pub(crate) fn delete_many_from_ids_and_filter( +pub fn delete_many_from_ids_and_filter( model: &Model, ids: &[&SelectionResult], filter_condition: ConditionTree<'static>, @@ -254,7 +251,7 @@ pub(crate) fn delete_many_from_ids_and_filter( }) } -pub(crate) fn create_relation_table_records( +pub fn create_relation_table_records( field: &RelationFieldRef, parent_id: &SelectionResult, child_ids: &[SelectionResult], @@ -279,7 +276,7 @@ pub(crate) fn create_relation_table_records( insert.build().on_conflict(OnConflict::DoNothing).into() } -pub(crate) fn delete_relation_table_records( +pub fn delete_relation_table_records( parent_field: &RelationFieldRef, parent_id: &SelectionResult, child_ids: &[SelectionResult], diff --git a/query-engine/query-structure/Cargo.toml b/query-engine/query-structure/Cargo.toml index 183dd2847301..78c0e4469fc6 100644 --- a/query-engine/query-structure/Cargo.toml +++ b/query-engine/query-structure/Cargo.toml @@ -16,6 +16,7 @@ cuid = { workspace = true, optional = true } ulid = { workspace = true, optional = true } nanoid = { version = "0.4.0", optional = true } chrono.workspace = true +indexmap.workspace = true [target.'cfg(target_arch = "wasm32")'.dependencies] getrandom = { workspace = true, features = ["js"] } diff --git a/query-engine/query-structure/src/aggregate_selection.rs b/query-engine/query-structure/src/aggregate_selection.rs new file mode 100644 index 000000000000..f83bf6e0ff95 --- /dev/null +++ b/query-engine/query-structure/src/aggregate_selection.rs @@ -0,0 +1,70 @@ +use psl::schema_ast::ast::FieldArity; + +use crate::{ScalarFieldRef, TypeIdentifier}; + +/// Selections for aggregation queries. +#[derive(Debug, Clone)] +pub enum AggregationSelection { + /// Single field selector. Only valid in the context of group by statements. + Field(ScalarFieldRef), + + /// Counts records of the model that match the query. + /// `all` indicates that an all-records selection has been made (e.g. SQL *). + /// `fields` are specific fields to count on. By convention, if `all` is true, + /// it will always be the last of the count results. + Count { all: bool, fields: Vec }, + + /// Compute average for each field contained. + Average(Vec), + + /// Compute sum for each field contained. + Sum(Vec), + + /// Compute mininum for each field contained. + Min(Vec), + + /// Compute maximum for each field contained. + Max(Vec), +} + +impl AggregationSelection { + /// Returns (field_db_name, TypeIdentifier, FieldArity) + pub fn identifiers(&self) -> Vec<(String, TypeIdentifier, FieldArity)> { + match self { + AggregationSelection::Field(field) => { + vec![(field.db_name().to_owned(), field.type_identifier(), field.arity())] + } + + AggregationSelection::Count { all, fields } => { + let mut mapped = Self::map_field_types(fields, Some(TypeIdentifier::Int)); + + if *all { + mapped.push(("all".to_owned(), TypeIdentifier::Int, FieldArity::Required)); + } + + mapped + } + + AggregationSelection::Average(fields) => Self::map_field_types(fields, Some(TypeIdentifier::Float)), + AggregationSelection::Sum(fields) => Self::map_field_types(fields, None), + AggregationSelection::Min(fields) => Self::map_field_types(fields, None), + AggregationSelection::Max(fields) => Self::map_field_types(fields, None), + } + } + + fn map_field_types( + fields: &[ScalarFieldRef], + fixed_type: Option, + ) -> Vec<(String, TypeIdentifier, FieldArity)> { + fields + .iter() + .map(|f| { + ( + f.db_name().to_owned(), + fixed_type.unwrap_or_else(|| f.type_identifier()), + FieldArity::Required, + ) + }) + .collect() + } +} diff --git a/query-engine/query-structure/src/filter/mod.rs b/query-engine/query-structure/src/filter/mod.rs index 82727f35c80e..64e837e62f3c 100644 --- a/query-engine/query-structure/src/filter/mod.rs +++ b/query-engine/query-structure/src/filter/mod.rs @@ -10,6 +10,7 @@ mod composite; mod into_filter; mod json; mod list; +mod record; mod relation; mod scalar; @@ -18,6 +19,7 @@ pub use composite::*; pub use into_filter::*; pub use json::*; pub use list::*; +pub use record::*; pub use relation::*; pub use scalar::*; diff --git a/query-engine/query-structure/src/filter/record.rs b/query-engine/query-structure/src/filter/record.rs new file mode 100644 index 000000000000..41ea87c47325 --- /dev/null +++ b/query-engine/query-structure/src/filter/record.rs @@ -0,0 +1,55 @@ +use crate::SelectionResult; + +use super::Filter; + +/// A wrapper struct allowing to either filter for records or for the core to +/// communicate already known record selectors to connectors. +/// +/// Connector implementations should use known selectors to skip unnecessary fetch operations +/// if the query core already determined the selectors in a previous step. Simply put, +/// `selectors` should always have precendence over `filter`. +#[derive(Debug, Clone)] +pub struct RecordFilter { + pub filter: Filter, + pub selectors: Option>, +} + +impl RecordFilter { + pub fn empty() -> Self { + Self { + filter: Filter::empty(), + selectors: None, + } + } + + pub fn has_selectors(&self) -> bool { + self.selectors.is_some() + } +} + +impl From for RecordFilter { + fn from(filter: Filter) -> Self { + Self { + filter, + selectors: None, + } + } +} + +impl From> for RecordFilter { + fn from(selectors: Vec) -> Self { + Self { + filter: Filter::empty(), + selectors: Some(selectors), + } + } +} + +impl From for RecordFilter { + fn from(selector: SelectionResult) -> Self { + Self { + filter: Filter::empty(), + selectors: Some(vec![selector]), + } + } +} diff --git a/query-engine/query-structure/src/lib.rs b/query-engine/query-structure/src/lib.rs index abf47fe8447c..66c4af906dc2 100644 --- a/query-engine/query-structure/src/lib.rs +++ b/query-engine/query-structure/src/lib.rs @@ -1,3 +1,4 @@ +mod aggregate_selection; mod composite_type; mod convert; mod default_value; @@ -18,12 +19,14 @@ mod query_arguments; mod record; mod relation; mod selection_result; +mod write_args; mod zipper; pub mod filter; pub mod prelude; pub use self::{default_value::*, native_type_instance::*, zipper::*}; +pub use aggregate_selection::*; pub use composite_type::*; pub use convert::convert; pub use distinct::*; @@ -41,6 +44,7 @@ pub use query_arguments::*; pub use record::*; pub use relation::*; pub use selection_result::*; +pub use write_args::*; // Re-exports pub use prisma_value::*; diff --git a/query-engine/connectors/query-connector/src/write_args.rs b/query-engine/query-structure/src/write_args.rs similarity index 98% rename from query-engine/connectors/query-connector/src/write_args.rs rename to query-engine/query-structure/src/write_args.rs index b02fa873f83c..6429bed497c1 100644 --- a/query-engine/connectors/query-connector/src/write_args.rs +++ b/query-engine/query-structure/src/write_args.rs @@ -1,9 +1,8 @@ -use crate::error::{ConnectorError, ErrorKind}; -use indexmap::{map::Keys, IndexMap}; -use query_structure::{ +use crate::{ CompositeFieldRef, Field, Filter, Model, ModelProjection, PrismaValue, ScalarFieldRef, SelectedField, SelectionResult, }; +use indexmap::{map::Keys, IndexMap}; use std::{borrow::Borrow, convert::TryInto, ops::Deref}; /// WriteArgs represent data to be written to an underlying data source. @@ -334,19 +333,20 @@ impl From<(&SelectedField, PrismaValue)> for WriteOperation { } impl TryInto for WriteOperation { - type Error = ConnectorError; + type Error = UnexpectedWriteOperation; fn try_into(self) -> Result { match self { WriteOperation::Scalar(ScalarWriteOperation::Set(pv)) => Ok(pv), WriteOperation::Composite(CompositeWriteOperation::Set(pv)) => Ok(pv), - x => Err(ConnectorError::from_kind(ErrorKind::InternalConversionError(format!( - "Unable to convert write expression {x:?} into prisma value." - )))), + x => Err(UnexpectedWriteOperation(x)), } } } +#[derive(Debug)] +pub struct UnexpectedWriteOperation(pub WriteOperation); + impl WriteArgs { pub fn new(args: IndexMap, request_now: PrismaValue) -> Self { Self { args, request_now } From 6bb32562c7ccfdbf97551c5d472ac510e8843530 Mon Sep 17 00:00:00 2001 From: Alexey Orlenko Date: Wed, 15 Jan 2025 10:39:13 +0100 Subject: [PATCH 51/65] Remove napi copypasta from wasm engine --- query-engine/query-engine-wasm/src/wasm/engine.rs | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/query-engine/query-engine-wasm/src/wasm/engine.rs b/query-engine/query-engine-wasm/src/wasm/engine.rs index 2f76eb2375e6..a1c7cc7846e9 100644 --- a/query-engine/query-engine-wasm/src/wasm/engine.rs +++ b/query-engine/query-engine-wasm/src/wasm/engine.rs @@ -373,9 +373,7 @@ impl QueryEngine { let engine = inner.as_engine()?; let request = RequestBody::try_from_str(&request, engine.engine_protocol())?; - let query_doc = request - .into_doc(engine.query_schema()) - .map_err(|err| napi::Error::from_reason(err.to_string()))?; + let query_doc = request.into_doc(engine.query_schema())?; let plan = query_core::compiler::compile(engine.query_schema(), query_doc).map_err(ApiError::from)?; Ok(serde_json::to_string(&plan)?) From 9cd83016c5a930c2582a03bcda45b948b76c20ac Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Florian=20G=C3=B6=C3=9Fler?= Date: Wed, 15 Jan 2025 20:55:31 +0100 Subject: [PATCH 52/65] feat(compiler): pass through connection_info (#5125) --- query-engine/core/src/compiler/mod.rs | 9 +++++-- query-engine/core/src/compiler/translate.rs | 27 ++++++++++++------- .../core/src/compiler/translate/query.rs | 24 +++++++++-------- .../core/src/compiler/translate/query/read.rs | 6 ++--- .../src/compiler/translate/query/write.rs | 6 ++--- .../sql-query-builder/src/context.rs | 2 +- .../query-engine-node-api/src/engine.rs | 19 +++++++++++-- .../query-engine-wasm/src/wasm/engine.rs | 7 +++-- .../query-engine/examples/compiler.rs | 9 ++++++- 9 files changed, 74 insertions(+), 35 deletions(-) diff --git a/query-engine/core/src/compiler/mod.rs b/query-engine/core/src/compiler/mod.rs index 26170861f259..878daa2f0e35 100644 --- a/query-engine/core/src/compiler/mod.rs +++ b/query-engine/core/src/compiler/mod.rs @@ -4,6 +4,7 @@ pub mod translate; use std::sync::Arc; pub use expression::Expression; +use quaint::connector::ConnectionInfo; use schema::QuerySchema; use thiserror::Error; pub use translate::{translate, TranslateError}; @@ -19,11 +20,15 @@ pub enum CompileError { TranslateError(#[from] TranslateError), } -pub fn compile(query_schema: &Arc, query_doc: QueryDocument) -> crate::Result { +pub fn compile( + query_schema: &Arc, + query_doc: QueryDocument, + connection_info: &ConnectionInfo, +) -> crate::Result { let QueryDocument::Single(query) = query_doc else { return Err(CompileError::UnsupportedRequest.into()); }; let (graph, _serializer) = QueryGraphBuilder::new(query_schema).build(query)?; - Ok(translate(graph).map_err(CompileError::from)?) + Ok(translate(graph, connection_info).map_err(CompileError::from)?) } diff --git a/query-engine/core/src/compiler/translate.rs b/query-engine/core/src/compiler/translate.rs index 650d03e936fb..81742b50c304 100644 --- a/query-engine/core/src/compiler/translate.rs +++ b/query-engine/core/src/compiler/translate.rs @@ -1,10 +1,10 @@ mod query; +use crate::{EdgeRef, Node, NodeRef, Query, QueryGraph}; +use quaint::connector::ConnectionInfo; use query::translate_query; use thiserror::Error; -use crate::{EdgeRef, Node, NodeRef, Query, QueryGraph}; - use super::expression::{Binding, Expression}; #[derive(Debug, Error)] @@ -18,28 +18,35 @@ pub enum TranslateError { pub type TranslateResult = Result; -pub fn translate(mut graph: QueryGraph) -> TranslateResult { +pub fn translate(mut graph: QueryGraph, connection_info: &ConnectionInfo) -> TranslateResult { graph .root_nodes() .into_iter() - .map(|node| NodeTranslator::new(&mut graph, node, &[]).translate()) + .map(|node| NodeTranslator::new(&mut graph, node, &[], connection_info).translate()) .collect::>>() .map(Expression::Seq) } -struct NodeTranslator<'a, 'b> { +struct NodeTranslator<'a, 'b, 'c> { graph: &'a mut QueryGraph, node: NodeRef, #[allow(dead_code)] parent_edges: &'b [EdgeRef], + connection_info: &'c ConnectionInfo, } -impl<'a, 'b> NodeTranslator<'a, 'b> { - fn new(graph: &'a mut QueryGraph, node: NodeRef, parent_edges: &'b [EdgeRef]) -> Self { +impl<'a, 'b, 'c> NodeTranslator<'a, 'b, 'c> { + fn new( + graph: &'a mut QueryGraph, + node: NodeRef, + parent_edges: &'b [EdgeRef], + connection_info: &'c ConnectionInfo, + ) -> Self { Self { graph, node, parent_edges, + connection_info, } } @@ -64,7 +71,7 @@ impl<'a, 'b> NodeTranslator<'a, 'b> { .try_into() .expect("current node must be query"); - translate_query(query) + translate_query(query, self.connection_info) } #[allow(dead_code)] @@ -99,7 +106,7 @@ impl<'a, 'b> NodeTranslator<'a, 'b> { .into_iter() .map(|(_, node)| { let edges = self.graph.incoming_edges(&node); - NodeTranslator::new(self.graph, node, &edges).translate() + NodeTranslator::new(self.graph, node, &edges, self.connection_info).translate() }) .collect::, _>>()?; @@ -121,7 +128,7 @@ impl<'a, 'b> NodeTranslator<'a, 'b> { .map(|(_, node)| { let name = node.id(); let edges = self.graph.incoming_edges(&node); - let expr = NodeTranslator::new(self.graph, node, &edges).translate()?; + let expr = NodeTranslator::new(self.graph, node, &edges, self.connection_info).translate()?; Ok(Binding { name, expr }) }) .collect::>>()?; diff --git a/query-engine/core/src/compiler/translate/query.rs b/query-engine/core/src/compiler/translate/query.rs index a54c0fe1cea5..23c9e7321972 100644 --- a/query-engine/core/src/compiler/translate/query.rs +++ b/query-engine/core/src/compiler/translate/query.rs @@ -3,7 +3,7 @@ mod read; mod write; use quaint::{ - prelude::{ConnectionInfo, ExternalConnectionInfo, SqlFamily}, + prelude::{ConnectionInfo, SqlFamily}, visitor::Visitor, }; use query_builder::DbQuery; @@ -15,14 +15,8 @@ use crate::{compiler::expression::Expression, Query}; use super::TranslateResult; -pub(crate) fn translate_query(query: Query) -> TranslateResult { - let connection_info = ConnectionInfo::External(ExternalConnectionInfo::new( - SqlFamily::Postgres, - "public".to_owned(), - None, - )); - - let ctx = Context::new(&connection_info, None); +pub(crate) fn translate_query(query: Query, connection_info: &ConnectionInfo) -> TranslateResult { + let ctx = Context::new(connection_info, None); match query { Query::Read(rq) => translate_read_query(rq, &ctx), @@ -30,8 +24,16 @@ pub(crate) fn translate_query(query: Query) -> TranslateResult { } } -fn build_db_query<'a>(query: impl Into>) -> TranslateResult { - let (sql, params) = quaint::visitor::Postgres::build(query)?; +fn build_db_query<'a>(query: impl Into>, ctx: &Context<'_>) -> TranslateResult { + let (sql, params) = match ctx.connection_info.sql_family() { + SqlFamily::Postgres => quaint::visitor::Postgres::build(query)?, + // TODO: implement proper switch for other databases once proper feature flags are supported/logic is extracted + _ => unimplemented!(), + // SqlFamily::Mysql => quaint::visitor::Mysql::build(query)?, + // SqlFamily::Sqlite => quaint::visitor::Sqlite::build(query)?, + // SqlFamily::Mssql => quaint::visitor::Mssql::build(query)?, + }; + let params = params .into_iter() .map(convert::quaint_value_to_prisma_value) diff --git a/query-engine/core/src/compiler/translate/query/read.rs b/query-engine/core/src/compiler/translate/query/read.rs index ab540d024722..0839416ce7c4 100644 --- a/query-engine/core/src/compiler/translate/query/read.rs +++ b/query-engine/core/src/compiler/translate/query/read.rs @@ -31,7 +31,7 @@ pub(crate) fn translate_read_query(query: ReadQuery, ctx: &Context<'_>) -> Trans ) .limit(1); - let expr = Expression::Query(build_db_query(query)?); + let expr = Expression::Query(build_db_query(query, ctx)?); let expr = Expression::Unique(Box::new(expr)); if rq.nested.is_empty() { @@ -56,7 +56,7 @@ pub(crate) fn translate_read_query(query: ReadQuery, ctx: &Context<'_>) -> Trans ctx, ); - let expr = Expression::Query(build_db_query(query)?); + let expr = Expression::Query(build_db_query(query, ctx)?); let expr = if needs_reversed_order { Expression::Reverse(Box::new(expr)) @@ -186,7 +186,7 @@ fn build_read_one2m_query(rrq: RelatedRecordsQuery, ctx: &Context<'_>) -> Transl let query = if to_one_relation { query.limit(1) } else { query }; - let mut expr = Expression::Query(build_db_query(query)?); + let mut expr = Expression::Query(build_db_query(query, ctx)?); if to_one_relation { expr = Expression::Unique(Box::new(expr)); diff --git a/query-engine/core/src/compiler/translate/query/write.rs b/query-engine/core/src/compiler/translate/query/write.rs index 2dbbf12327c7..4e361a6f746e 100644 --- a/query-engine/core/src/compiler/translate/query/write.rs +++ b/query-engine/core/src/compiler/translate/query/write.rs @@ -19,7 +19,7 @@ pub(crate) fn translate_write_query(query: WriteQuery, ctx: &Context<'_>) -> Tra // TODO: we probably need some additional node type or extra info in the WriteQuery node // to help the client executor figure out the returned ID in the case when it's inferred // from the query arguments. - Expression::Query(build_db_query(query)?) + Expression::Query(build_db_query(query, ctx)?) } WriteQuery::CreateManyRecords(cmr) => { @@ -33,7 +33,7 @@ pub(crate) fn translate_write_query(query: WriteQuery, ctx: &Context<'_>) -> Tra ctx, ) .into_iter() - .map(build_db_query) + .map(|query| build_db_query(query, ctx)) .map(|maybe_db_query| maybe_db_query.map(Expression::Execute)) .collect::>>()?, ) @@ -41,7 +41,7 @@ pub(crate) fn translate_write_query(query: WriteQuery, ctx: &Context<'_>) -> Tra Expression::Sum( generate_insert_statements(&cmr.model, cmr.args, cmr.skip_duplicates, None, ctx) .into_iter() - .map(build_db_query) + .map(|query| build_db_query(query, ctx)) .map(|maybe_db_query| maybe_db_query.map(Expression::Execute)) .collect::>>()?, ) diff --git a/query-engine/query-builders/sql-query-builder/src/context.rs b/query-engine/query-builders/sql-query-builder/src/context.rs index 6bb1f2a1414a..9634df12b43e 100644 --- a/query-engine/query-builders/sql-query-builder/src/context.rs +++ b/query-engine/query-builders/sql-query-builder/src/context.rs @@ -2,7 +2,7 @@ use quaint::prelude::ConnectionInfo; use telemetry::TraceParent; pub struct Context<'a> { - connection_info: &'a ConnectionInfo, + pub connection_info: &'a ConnectionInfo, pub(crate) traceparent: Option, /// Maximum rows allowed at once for an insert query. /// None is unlimited. diff --git a/query-engine/query-engine-node-api/src/engine.rs b/query-engine/query-engine-node-api/src/engine.rs index 1d17eb56ff87..d646cafff403 100644 --- a/query-engine/query-engine-node-api/src/engine.rs +++ b/query-engine/query-engine-node-api/src/engine.rs @@ -4,7 +4,7 @@ use napi::{threadsafe_function::ThreadSafeCallContext, Env, JsFunction, JsObject use napi_derive::napi; use prisma_metrics::{MetricFormat, WithMetricsInstrumentation}; use psl::PreviewFeature; -use quaint::connector::ExternalConnector; +use quaint::connector::{ConnectionInfo, ExternalConnector}; use query_core::{protocol::EngineProtocol, relation_load_strategy, schema, TransactionOptions, TxId}; use query_engine_common::{ engine::{ @@ -368,7 +368,22 @@ impl QueryEngine { .into_doc(engine.query_schema()) .map_err(|err| napi::Error::from_reason(err.to_string()))?; - let plan = query_core::compiler::compile(engine.query_schema(), query_doc).map_err(ApiError::from)?; + let connection_info = match self.connector_mode { + ConnectorMode::Js { ref adapter } => ConnectionInfo::External( + adapter + .get_connection_info() + .await + .map_err(|err| napi::Error::from_reason(err.to_string()))?, + ), + ConnectorMode::Rust => { + return Err(napi::Error::from_reason( + "Query compiler requires JS driver adapter".to_string(), + )) + } + }; + + let plan = query_core::compiler::compile(engine.query_schema(), query_doc, &connection_info) + .map_err(ApiError::from)?; let response = if human_readable { plan.to_string() diff --git a/query-engine/query-engine-wasm/src/wasm/engine.rs b/query-engine/query-engine-wasm/src/wasm/engine.rs index a1c7cc7846e9..48b3c100e361 100644 --- a/query-engine/query-engine-wasm/src/wasm/engine.rs +++ b/query-engine/query-engine-wasm/src/wasm/engine.rs @@ -8,7 +8,7 @@ use crate::{ use driver_adapters::JsObject; use js_sys::Function as JsFunction; use psl::ConnectorRegistry; -use quaint::connector::ExternalConnector; +use quaint::connector::{ConnectionInfo, ExternalConnector}; use query_core::{ protocol::EngineProtocol, relation_load_strategy, @@ -375,7 +375,10 @@ impl QueryEngine { let request = RequestBody::try_from_str(&request, engine.engine_protocol())?; let query_doc = request.into_doc(engine.query_schema())?; - let plan = query_core::compiler::compile(engine.query_schema(), query_doc).map_err(ApiError::from)?; + let connection_info = ConnectionInfo::External(self.adapter.get_connection_info().await?); + + let plan = query_core::compiler::compile(engine.query_schema(), query_doc, &connection_info) + .map_err(ApiError::from)?; Ok(serde_json::to_string(&plan)?) } .with_subscriber(dispatcher) diff --git a/query-engine/query-engine/examples/compiler.rs b/query-engine/query-engine/examples/compiler.rs index 4555eeb9ab1e..099ea2a80359 100644 --- a/query-engine/query-engine/examples/compiler.rs +++ b/query-engine/query-engine/examples/compiler.rs @@ -1,5 +1,6 @@ use std::sync::Arc; +use quaint::connector::{ConnectionInfo, ExternalConnectionInfo, SqlFamily}; use query_core::{query_graph_builder::QueryGraphBuilder, QueryDocument}; use request_handlers::{JsonBody, JsonSingleQuery, RequestBody}; use serde_json::json; @@ -15,6 +16,12 @@ pub fn main() -> anyhow::Result<()> { let schema = Arc::new(schema); let query_schema = Arc::new(query_core::schema::build(schema, true)); + let connection_info = ConnectionInfo::External(ExternalConnectionInfo::new( + SqlFamily::Postgres, + "public".to_owned(), + None, + )); + // prisma.user.findUnique({ // where: { // email: Prisma.Param("userEmail") @@ -66,7 +73,7 @@ pub fn main() -> anyhow::Result<()> { println!("{graph}"); - let expr = query_core::compiler::translate(graph)?; + let expr = query_core::compiler::translate(graph, &connection_info)?; println!("{}", expr.pretty_print(true, 80)?); From 5f0c2326775489474415efc340a97cac27ebf4e7 Mon Sep 17 00:00:00 2001 From: jacek-prisma Date: Thu, 16 Jan 2025 14:24:07 +0000 Subject: [PATCH 53/65] feat: generic query building (#5127) * feat: generic query building * fix: compiler example * fix: comment out non pg variants for now * fix: unused imports * chore: remove currently unnecessary impl * fix: enable postgres feature for now --- Cargo.lock | 4 +- Cargo.toml | 1 + .../connectors/sql-query-connector/Cargo.toml | 3 + .../src/database/operations/read.rs | 3 +- .../src/database/operations/read/process.rs | 2 +- .../src/database/operations/write.rs | 143 +----------------- .../connectors/sql-query-connector/src/lib.rs | 2 - query-engine/core/Cargo.toml | 9 +- query-engine/core/src/compiler/mod.rs | 18 ++- query-engine/core/src/compiler/translate.rs | 26 ++-- .../core/src/compiler/translate/query.rs | 33 +--- .../core/src/compiler/translate/query/read.rs | 85 +++++------ .../src/compiler/translate/query/write.rs | 42 +++-- .../query-builders/query-builder/Cargo.toml | 3 + .../query-builders/query-builder/src/lib.rs | 29 +++- .../src/query_arguments_ext.rs | 0 .../sql-query-builder/Cargo.toml | 3 +- .../sql-query-builder/src}/convert.rs | 2 +- .../sql-query-builder/src/cursor_condition.rs | 2 +- .../sql-query-builder/src/lib.rs | 81 +++++++++- .../sql-query-builder/src/ordering.rs | 3 +- .../sql-query-builder/src/write.rs | 105 +++++++++++++ query-engine/query-engine/Cargo.toml | 1 + .../query-engine/examples/compiler.rs | 11 +- .../query-structure/src/query_arguments.rs | 5 + 25 files changed, 340 insertions(+), 276 deletions(-) rename query-engine/query-builders/{sql-query-builder => query-builder}/src/query_arguments_ext.rs (100%) rename query-engine/{core/src/compiler/translate/query => query-builders/sql-query-builder/src}/convert.rs (98%) diff --git a/Cargo.lock b/Cargo.lock index ec9b4ae466b2..d156fc51229f 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -3800,7 +3800,6 @@ dependencies = [ "serde", "serde_json", "sql-query-builder", - "sql-query-connector", "telemetry", "thiserror", "tokio", @@ -3836,6 +3835,7 @@ dependencies = [ "serde", "serde_json", "serial_test", + "sql-query-builder", "sql-query-connector", "structopt", "telemetry", @@ -5133,6 +5133,7 @@ dependencies = [ name = "sql-query-builder" version = "0.1.0" dependencies = [ + "bigdecimal", "chrono", "itertools 0.12.0", "prisma-value", @@ -5160,6 +5161,7 @@ dependencies = [ "prisma-value", "psl", "quaint", + "query-builder", "query-connector", "query-structure", "rand 0.8.5", diff --git a/Cargo.toml b/Cargo.toml index 4658ac4617c9..3d3948494403 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -38,6 +38,7 @@ members = [ [workspace.dependencies] async-trait = { version = "0.1.77" } +bigdecimal = "0.3" enumflags2 = { version = "0.7", features = ["serde"] } futures = "0.3" psl = { path = "./psl/psl" } diff --git a/query-engine/connectors/sql-query-connector/Cargo.toml b/query-engine/connectors/sql-query-connector/Cargo.toml index d53013d15832..0c04ef95f340 100644 --- a/query-engine/connectors/sql-query-connector/Cargo.toml +++ b/query-engine/connectors/sql-query-connector/Cargo.toml @@ -62,6 +62,9 @@ path = "../query-connector" [dependencies.query-structure] path = "../../query-structure" +[dependencies.query-builder] +path = "../../query-builders/query-builder" + [dependencies.sql-query-builder] path = "../../query-builders/sql-query-builder" diff --git a/query-engine/connectors/sql-query-connector/src/database/operations/read.rs b/query-engine/connectors/sql-query-connector/src/database/operations/read.rs index 9a9ac4469ce0..7e3c881f802c 100644 --- a/query-engine/connectors/sql-query-connector/src/database/operations/read.rs +++ b/query-engine/connectors/sql-query-connector/src/database/operations/read.rs @@ -8,8 +8,9 @@ use crate::{QueryExt, Queryable, SqlError}; use connector_interface::*; use futures::stream::{FuturesUnordered, StreamExt}; use quaint::ast::*; +use query_builder::QueryArgumentsExt; use query_structure::*; -use sql_query_builder::{column_metadata, read, AsColumns, AsTable, Context, QueryArgumentsExt, RelationFieldExt}; +use sql_query_builder::{column_metadata, read, AsColumns, AsTable, Context, RelationFieldExt}; pub(crate) async fn get_single_record( conn: &dyn Queryable, diff --git a/query-engine/connectors/sql-query-connector/src/database/operations/read/process.rs b/query-engine/connectors/sql-query-connector/src/database/operations/read/process.rs index 042dc2815b6a..56332089f975 100644 --- a/query-engine/connectors/sql-query-connector/src/database/operations/read/process.rs +++ b/query-engine/connectors/sql-query-connector/src/database/operations/read/process.rs @@ -1,8 +1,8 @@ use std::borrow::Cow; use itertools::{Either, Itertools}; +use query_builder::QueryArgumentsExt; use query_structure::{QueryArguments, Record}; -use sql_query_builder::QueryArgumentsExt; macro_rules! processor_state { ($name:ident $(-> $transition:ident($bound:ident))?) => { diff --git a/query-engine/connectors/sql-query-connector/src/database/operations/write.rs b/query-engine/connectors/sql-query-connector/src/database/operations/write.rs index 07a385bab3ce..8fe2bcaac411 100644 --- a/query-engine/connectors/sql-query-connector/src/database/operations/write.rs +++ b/query-engine/connectors/sql-query-connector/src/database/operations/write.rs @@ -2,8 +2,7 @@ use super::update::*; use crate::row::ToSqlRow; use crate::value::to_prisma_value; use crate::{error::SqlError, QueryExt, Queryable}; -use itertools::Itertools; -use quaint::ast::{Insert, Query}; +use quaint::ast::Query; use quaint::prelude::ResultSet; use quaint::{ error::ErrorKind, @@ -12,32 +11,9 @@ use quaint::{ use query_structure::*; use sql_query_builder::{column_metadata, write, Context, FilterBuilder, SelectionResultExt, SqlTraceComment}; use std::borrow::Cow; -use std::{ - collections::{HashMap, HashSet}, - ops::Deref, -}; +use std::collections::HashMap; use user_facing_errors::query_engine::DatabaseConstraint; -#[cfg(target_arch = "wasm32")] -macro_rules! trace { - (target: $target:expr, $($arg:tt)+) => {{ - // No-op in WebAssembly - }}; - ($($arg:tt)+) => {{ - // No-op in WebAssembly - }}; -} - -#[cfg(not(target_arch = "wasm32"))] -macro_rules! trace { - (target: $target:expr, $($arg:tt)+) => { - tracing::log::trace!(target: $target, $($arg)+); - }; - ($($arg:tt)+) => { - tracing::log::trace!($($arg)+); - }; -} - async fn generate_id( conn: &dyn Queryable, id_field: &FieldSelection, @@ -191,49 +167,6 @@ pub(crate) async fn create_record( } } -/// Returns a set of fields that are used in the arguments for the create operation. -fn collect_affected_fields(args: &[WriteArgs], model: &Model) -> HashSet { - let mut fields = HashSet::new(); - args.iter().for_each(|arg| fields.extend(arg.keys())); - - fields - .into_iter() - .map(|dsfn| model.fields().scalar().find(|sf| sf.db_name() == dsfn.deref()).unwrap()) - .collect() -} - -/// Generates a list of insert statements to execute. If `selected_fields` is set, insert statements -/// will return the specified columns of inserted rows. -pub fn generate_insert_statements( - model: &Model, - args: Vec, - skip_duplicates: bool, - selected_fields: Option<&ModelProjection>, - ctx: &Context<'_>, -) -> Vec> { - let affected_fields = collect_affected_fields(&args, model); - - if affected_fields.is_empty() { - args.into_iter() - .map(|_| write::create_records_empty(model, skip_duplicates, selected_fields, ctx)) - .collect() - } else { - let partitioned_batches = partition_into_batches(args, ctx); - trace!("Total of {} batches to be executed.", partitioned_batches.len()); - trace!( - "Batch sizes: {:?}", - partitioned_batches.iter().map(|b| b.len()).collect_vec() - ); - - partitioned_batches - .into_iter() - .map(|batch| { - write::create_records_nonempty(model, batch, skip_duplicates, &affected_fields, selected_fields, ctx) - }) - .collect() - } -} - /// Inserts records specified as a list of `WriteArgs`. Returns number of inserted records. pub(crate) async fn create_records_count( conn: &dyn Queryable, @@ -242,7 +175,7 @@ pub(crate) async fn create_records_count( skip_duplicates: bool, ctx: &Context<'_>, ) -> crate::Result { - let inserts = generate_insert_statements(model, args, skip_duplicates, None, ctx); + let inserts = write::generate_insert_statements(model, args, skip_duplicates, None, ctx); let mut count = 0; for insert in inserts { count += conn.execute(insert.into()).await?; @@ -265,7 +198,7 @@ pub(crate) async fn create_records_returning( let idents = selected_fields.type_identifiers_with_arities(); let meta = column_metadata::create(&field_names, &idents); let mut records = ManyRecords::new(field_names.clone()); - let inserts = generate_insert_statements(model, args, skip_duplicates, Some(&selected_fields.into()), ctx); + let inserts = write::generate_insert_statements(model, args, skip_duplicates, Some(&selected_fields.into()), ctx); for insert in inserts { let result_set = conn.query(insert.into()).await?; @@ -281,74 +214,6 @@ pub(crate) async fn create_records_returning( Ok(records) } -/// Partitions data into batches, respecting `max_bind_values` and `max_insert_rows` settings from -/// the `Context`. -fn partition_into_batches(args: Vec, ctx: &Context<'_>) -> Vec> { - let batches = if let Some(max_params) = ctx.max_bind_values() { - // We need to split inserts if they are above a parameter threshold, as well as split based on number of rows. - // -> Horizontal partitioning by row number, vertical by number of args. - args.into_iter() - .peekable() - .batching(|iter| { - let mut param_count: usize = 0; - let mut batch = vec![]; - - while param_count < max_params { - // If the param count _including_ the next item doens't exceed the limit, - // we continue filling up the current batch. - let proceed = match iter.peek() { - Some(next) => (param_count + next.len()) <= max_params, - None => break, - }; - - if proceed { - match iter.next() { - Some(next) => { - param_count += next.len(); - batch.push(next) - } - None => break, - } - } else { - break; - } - } - - if batch.is_empty() { - None - } else { - Some(batch) - } - }) - .collect_vec() - } else { - vec![args] - }; - - if let Some(max_rows) = ctx.max_insert_rows() { - let capacity = batches.len(); - batches - .into_iter() - .fold(Vec::with_capacity(capacity), |mut batches, next_batch| { - if next_batch.len() > max_rows { - batches.extend( - next_batch - .into_iter() - .chunks(max_rows) - .into_iter() - .map(|chunk| chunk.into_iter().collect_vec()), - ); - } else { - batches.push(next_batch); - } - - batches - }) - } else { - batches - } -} - /// Update one record in a database defined in `conn` and the records /// defined in `args`, resulting the identifiers that were modified in the /// operation. diff --git a/query-engine/connectors/sql-query-connector/src/lib.rs b/query-engine/connectors/sql-query-connector/src/lib.rs index 28ec5862e227..b29085a918d0 100644 --- a/query-engine/connectors/sql-query-connector/src/lib.rs +++ b/query-engine/connectors/sql-query-connector/src/lib.rs @@ -11,8 +11,6 @@ mod value; use self::{query_ext::QueryExt, row::*}; use quaint::prelude::Queryable; -pub use database::operations::write::generate_insert_statements; - pub use database::FromSource; #[cfg(feature = "driver-adapters")] pub use database::Js; diff --git a/query-engine/core/Cargo.toml b/query-engine/core/Cargo.toml index cd41c4ccf840..a1d976e71416 100644 --- a/query-engine/core/Cargo.toml +++ b/query-engine/core/Cargo.toml @@ -9,7 +9,7 @@ graphql-protocol = [] [dependencies] async-trait.workspace = true -bigdecimal = "0.3" +bigdecimal.workspace = true chrono.workspace = true connection-string.workspace = true connector = { path = "../connectors/query-connector", package = "query-connector" } @@ -45,10 +45,5 @@ lru = "0.7.7" enumflags2.workspace = true derive_more.workspace = true -# HACK: query builders need to be a separate crate, and maybe the compiler too -# HACK: we hardcode PostgreSQL as the dialect for now -sql-query-connector = { path = "../connectors/sql-query-connector", features = [ - "postgresql", -] } # HACK: this should not be in core either -quaint.workspace = true +quaint = { workspace = true, features = ["postgresql"] } diff --git a/query-engine/core/src/compiler/mod.rs b/query-engine/core/src/compiler/mod.rs index 878daa2f0e35..f1d0e8c75e37 100644 --- a/query-engine/core/src/compiler/mod.rs +++ b/query-engine/core/src/compiler/mod.rs @@ -4,8 +4,12 @@ pub mod translate; use std::sync::Arc; pub use expression::Expression; -use quaint::connector::ConnectionInfo; +use quaint::{ + prelude::{ConnectionInfo, SqlFamily}, + visitor, +}; use schema::QuerySchema; +use sql_query_builder::{Context, SqlQueryBuilder}; use thiserror::Error; pub use translate::{translate, TranslateError}; @@ -29,6 +33,16 @@ pub fn compile( return Err(CompileError::UnsupportedRequest.into()); }; + let ctx = Context::new(connection_info, None); let (graph, _serializer) = QueryGraphBuilder::new(query_schema).build(query)?; - Ok(translate(graph, connection_info).map_err(CompileError::from)?) + let res = match connection_info.sql_family() { + SqlFamily::Postgres => translate(graph, &SqlQueryBuilder::>::new(ctx)), + // feature flags are disabled for now + // SqlFamily::Mysql => translate(graph, &SqlQueryBuilder::>::new(ctx)), + // SqlFamily::Sqlite => translate(graph, &SqlQueryBuilder::>::new(ctx)), + // SqlFamily::Mssql => translate(graph, &SqlQueryBuilder::>::new(ctx)), + _ => unimplemented!(), + }; + + Ok(res.map_err(CompileError::TranslateError)?) } diff --git a/query-engine/core/src/compiler/translate.rs b/query-engine/core/src/compiler/translate.rs index 81742b50c304..a5fa6b30f0f1 100644 --- a/query-engine/core/src/compiler/translate.rs +++ b/query-engine/core/src/compiler/translate.rs @@ -1,8 +1,8 @@ mod query; use crate::{EdgeRef, Node, NodeRef, Query, QueryGraph}; -use quaint::connector::ConnectionInfo; use query::translate_query; +use query_builder::QueryBuilder; use thiserror::Error; use super::expression::{Binding, Expression}; @@ -12,41 +12,41 @@ pub enum TranslateError { #[error("node {0} has no content")] NodeContentEmpty(String), - #[error("{0}")] - QuaintError(#[from] quaint::error::Error), + #[error("query builder error: {0}")] + QueryBuildFailure(#[source] Box), } pub type TranslateResult = Result; -pub fn translate(mut graph: QueryGraph, connection_info: &ConnectionInfo) -> TranslateResult { +pub fn translate(mut graph: QueryGraph, builder: &dyn QueryBuilder) -> TranslateResult { graph .root_nodes() .into_iter() - .map(|node| NodeTranslator::new(&mut graph, node, &[], connection_info).translate()) + .map(|node| NodeTranslator::new(&mut graph, node, &[], builder).translate()) .collect::>>() .map(Expression::Seq) } -struct NodeTranslator<'a, 'b, 'c> { +struct NodeTranslator<'a, 'b> { graph: &'a mut QueryGraph, node: NodeRef, #[allow(dead_code)] parent_edges: &'b [EdgeRef], - connection_info: &'c ConnectionInfo, + query_builder: &'b dyn QueryBuilder, } -impl<'a, 'b, 'c> NodeTranslator<'a, 'b, 'c> { +impl<'a, 'b> NodeTranslator<'a, 'b> { fn new( graph: &'a mut QueryGraph, node: NodeRef, parent_edges: &'b [EdgeRef], - connection_info: &'c ConnectionInfo, + query_builder: &'b dyn QueryBuilder, ) -> Self { Self { graph, node, parent_edges, - connection_info, + query_builder, } } @@ -71,7 +71,7 @@ impl<'a, 'b, 'c> NodeTranslator<'a, 'b, 'c> { .try_into() .expect("current node must be query"); - translate_query(query, self.connection_info) + translate_query(query, self.query_builder) } #[allow(dead_code)] @@ -106,7 +106,7 @@ impl<'a, 'b, 'c> NodeTranslator<'a, 'b, 'c> { .into_iter() .map(|(_, node)| { let edges = self.graph.incoming_edges(&node); - NodeTranslator::new(self.graph, node, &edges, self.connection_info).translate() + NodeTranslator::new(self.graph, node, &edges, self.query_builder).translate() }) .collect::, _>>()?; @@ -128,7 +128,7 @@ impl<'a, 'b, 'c> NodeTranslator<'a, 'b, 'c> { .map(|(_, node)| { let name = node.id(); let edges = self.graph.incoming_edges(&node); - let expr = NodeTranslator::new(self.graph, node, &edges, self.connection_info).translate()?; + let expr = NodeTranslator::new(self.graph, node, &edges, self.query_builder).translate()?; Ok(Binding { name, expr }) }) .collect::>>()?; diff --git a/query-engine/core/src/compiler/translate/query.rs b/query-engine/core/src/compiler/translate/query.rs index 23c9e7321972..1fe1acf243ad 100644 --- a/query-engine/core/src/compiler/translate/query.rs +++ b/query-engine/core/src/compiler/translate/query.rs @@ -1,42 +1,17 @@ -mod convert; mod read; mod write; -use quaint::{ - prelude::{ConnectionInfo, SqlFamily}, - visitor::Visitor, -}; -use query_builder::DbQuery; +use query_builder::QueryBuilder; use read::translate_read_query; -use sql_query_builder::Context; use write::translate_write_query; use crate::{compiler::expression::Expression, Query}; use super::TranslateResult; -pub(crate) fn translate_query(query: Query, connection_info: &ConnectionInfo) -> TranslateResult { - let ctx = Context::new(connection_info, None); - +pub(crate) fn translate_query(query: Query, builder: &dyn QueryBuilder) -> TranslateResult { match query { - Query::Read(rq) => translate_read_query(rq, &ctx), - Query::Write(wq) => translate_write_query(wq, &ctx), + Query::Read(rq) => translate_read_query(rq, builder), + Query::Write(wq) => translate_write_query(wq, builder), } } - -fn build_db_query<'a>(query: impl Into>, ctx: &Context<'_>) -> TranslateResult { - let (sql, params) = match ctx.connection_info.sql_family() { - SqlFamily::Postgres => quaint::visitor::Postgres::build(query)?, - // TODO: implement proper switch for other databases once proper feature flags are supported/logic is extracted - _ => unimplemented!(), - // SqlFamily::Mysql => quaint::visitor::Mysql::build(query)?, - // SqlFamily::Sqlite => quaint::visitor::Sqlite::build(query)?, - // SqlFamily::Mssql => quaint::visitor::Mssql::build(query)?, - }; - - let params = params - .into_iter() - .map(convert::quaint_value_to_prisma_value) - .collect::>(); - Ok(DbQuery::new(sql, params)) -} diff --git a/query-engine/core/src/compiler/translate/query/read.rs b/query-engine/core/src/compiler/translate/query/read.rs index 0839416ce7c4..f736331ec8ac 100644 --- a/query-engine/core/src/compiler/translate/query/read.rs +++ b/query-engine/core/src/compiler/translate/query/read.rs @@ -4,40 +4,37 @@ use crate::{ compiler::{ expression::{Binding, Expression, JoinExpression}, translate::TranslateResult, + TranslateError, }, FilteredQuery, ReadQuery, RelatedRecordsQuery, }; use itertools::Itertools; +use query_builder::{QueryArgumentsExt, QueryBuilder}; use query_structure::{ - ConditionValue, Filter, ModelProjection, PrismaValue, QueryMode, ScalarCondition, ScalarFilter, ScalarProjection, + ConditionValue, Filter, PrismaValue, QueryArguments, QueryMode, ScalarCondition, ScalarFilter, ScalarProjection, }; -use sql_query_builder::{read, AsColumns, Context, QueryArgumentsExt}; -use super::build_db_query; - -pub(crate) fn translate_read_query(query: ReadQuery, ctx: &Context<'_>) -> TranslateResult { +pub(crate) fn translate_read_query(query: ReadQuery, builder: &dyn QueryBuilder) -> TranslateResult { Ok(match query { ReadQuery::RecordQuery(rq) => { let selected_fields = rq.selected_fields.without_relations().into_virtuals_last(); - let query = read::get_records( - &rq.model, - ModelProjection::from(&selected_fields) - .as_columns(ctx) - .mark_all_selected(), - selected_fields.virtuals(), + let args = QueryArguments::from(( + rq.model.clone(), rq.filter.expect("ReadOne query should always have filter set"), - ctx, - ) - .limit(1); + )) + .with_take(Some(1)); + let query = builder + .build_get_records(&rq.model, args, &selected_fields) + .map_err(TranslateError::QueryBuildFailure)?; - let expr = Expression::Query(build_db_query(query, ctx)?); + let expr = Expression::Query(query); let expr = Expression::Unique(Box::new(expr)); if rq.nested.is_empty() { expr } else { - add_inmemory_join(expr, rq.nested, ctx)? + add_inmemory_join(expr, rq.nested, builder)? } } @@ -46,17 +43,11 @@ pub(crate) fn translate_read_query(query: ReadQuery, ctx: &Context<'_>) -> Trans let needs_reversed_order = mrq.args.needs_reversed_order(); // TODO: we ignore chunking for now - let query = read::get_records( - &mrq.model, - ModelProjection::from(&selected_fields) - .as_columns(ctx) - .mark_all_selected(), - selected_fields.virtuals(), - mrq.args, - ctx, - ); - - let expr = Expression::Query(build_db_query(query, ctx)?); + let query = builder + .build_get_records(&mrq.model, mrq.args, &selected_fields) + .map_err(TranslateError::QueryBuildFailure)?; + + let expr = Expression::Query(query); let expr = if needs_reversed_order { Expression::Reverse(Box::new(expr)) @@ -67,15 +58,15 @@ pub(crate) fn translate_read_query(query: ReadQuery, ctx: &Context<'_>) -> Trans if mrq.nested.is_empty() { expr } else { - add_inmemory_join(expr, mrq.nested, ctx)? + add_inmemory_join(expr, mrq.nested, builder)? } } ReadQuery::RelatedRecordsQuery(rrq) => { if rrq.parent_field.relation().is_many_to_many() { - build_read_m2m_query(rrq, ctx)? + build_read_m2m_query(rrq, builder)? } else { - build_read_one2m_query(rrq, ctx)? + build_read_one2m_query(rrq, builder)? } } @@ -83,7 +74,11 @@ pub(crate) fn translate_read_query(query: ReadQuery, ctx: &Context<'_>) -> Trans }) } -fn add_inmemory_join(parent: Expression, nested: Vec, ctx: &Context<'_>) -> TranslateResult { +fn add_inmemory_join( + parent: Expression, + nested: Vec, + builder: &dyn QueryBuilder, +) -> TranslateResult { let all_linking_fields = nested .iter() .flat_map(|nested| match nested { @@ -139,7 +134,7 @@ fn add_inmemory_join(parent: Expression, nested: Vec, ctx: &Context<' })); } - let child_query = translate_read_query(ReadQuery::RelatedRecordsQuery(rrq), ctx)?; + let child_query = translate_read_query(ReadQuery::RelatedRecordsQuery(rrq), builder)?; Ok(JoinExpression { child: child_query, @@ -164,29 +159,27 @@ fn add_inmemory_join(parent: Expression, nested: Vec, ctx: &Context<' }) } -fn build_read_m2m_query(_query: RelatedRecordsQuery, _ctx: &Context<'_>) -> TranslateResult { +fn build_read_m2m_query(_query: RelatedRecordsQuery, _builder: &dyn QueryBuilder) -> TranslateResult { todo!() } -fn build_read_one2m_query(rrq: RelatedRecordsQuery, ctx: &Context<'_>) -> TranslateResult { +fn build_read_one2m_query(rrq: RelatedRecordsQuery, builder: &dyn QueryBuilder) -> TranslateResult { let selected_fields = rrq.selected_fields.without_relations().into_virtuals_last(); let needs_reversed_order = rrq.args.needs_reversed_order(); let to_one_relation = !rrq.parent_field.arity().is_list(); // TODO: we ignore chunking for now - let query = read::get_records( - &rrq.parent_field.related_model(), - ModelProjection::from(&selected_fields) - .as_columns(ctx) - .mark_all_selected(), - selected_fields.virtuals(), - rrq.args, - ctx, - ); - let query = if to_one_relation { query.limit(1) } else { query }; + let args = if to_one_relation { + rrq.args.with_take(Some(1)) + } else { + rrq.args + }; + let query = builder + .build_get_records(&rrq.parent_field.related_model(), args, &selected_fields) + .map_err(TranslateError::QueryBuildFailure)?; - let mut expr = Expression::Query(build_db_query(query, ctx)?); + let mut expr = Expression::Query(query); if to_one_relation { expr = Expression::Unique(Box::new(expr)); @@ -199,6 +192,6 @@ fn build_read_one2m_query(rrq: RelatedRecordsQuery, ctx: &Context<'_>) -> Transl if rrq.nested.is_empty() { Ok(expr) } else { - add_inmemory_join(expr, rrq.nested, ctx) + add_inmemory_join(expr, rrq.nested, builder) } } diff --git a/query-engine/core/src/compiler/translate/query/write.rs b/query-engine/core/src/compiler/translate/query/write.rs index 4e361a6f746e..286fd11e9147 100644 --- a/query-engine/core/src/compiler/translate/query/write.rs +++ b/query-engine/core/src/compiler/translate/query/write.rs @@ -1,49 +1,43 @@ -use query_structure::ModelProjection; -use sql_query_builder::{write, Context}; -use sql_query_connector::generate_insert_statements; +use query_builder::QueryBuilder; use crate::{ - compiler::{expression::Expression, translate::TranslateResult}, + compiler::{expression::Expression, translate::TranslateResult, TranslateError}, WriteQuery, }; -use super::build_db_query; - -pub(crate) fn translate_write_query(query: WriteQuery, ctx: &Context<'_>) -> TranslateResult { +pub(crate) fn translate_write_query(query: WriteQuery, builder: &dyn QueryBuilder) -> TranslateResult { Ok(match query { WriteQuery::CreateRecord(cr) => { // TODO: MySQL needs additional logic to generate IDs on our side. // See sql_query_connector::database::operations::write::create_record - let query = write::create_record(&cr.model, cr.args, &ModelProjection::from(&cr.selected_fields), ctx); + let query = builder + .build_create_record(&cr.model, cr.args, &cr.selected_fields) + .map_err(TranslateError::QueryBuildFailure)?; // TODO: we probably need some additional node type or extra info in the WriteQuery node // to help the client executor figure out the returned ID in the case when it's inferred // from the query arguments. - Expression::Query(build_db_query(query, ctx)?) + Expression::Query(query) } WriteQuery::CreateManyRecords(cmr) => { if let Some(selected_fields) = cmr.selected_fields { Expression::Concat( - generate_insert_statements( - &cmr.model, - cmr.args, - cmr.skip_duplicates, - Some(&selected_fields.fields.into()), - ctx, - ) - .into_iter() - .map(|query| build_db_query(query, ctx)) - .map(|maybe_db_query| maybe_db_query.map(Expression::Execute)) - .collect::>>()?, + builder + .build_inserts(&cmr.model, cmr.args, cmr.skip_duplicates, Some(&selected_fields.fields)) + .map_err(TranslateError::QueryBuildFailure)? + .into_iter() + .map(Expression::Execute) + .collect::>(), ) } else { Expression::Sum( - generate_insert_statements(&cmr.model, cmr.args, cmr.skip_duplicates, None, ctx) + builder + .build_inserts(&cmr.model, cmr.args, cmr.skip_duplicates, None) + .map_err(TranslateError::QueryBuildFailure)? .into_iter() - .map(|query| build_db_query(query, ctx)) - .map(|maybe_db_query| maybe_db_query.map(Expression::Execute)) - .collect::>>()?, + .map(Expression::Execute) + .collect::>(), ) } } diff --git a/query-engine/query-builders/query-builder/Cargo.toml b/query-engine/query-builders/query-builder/Cargo.toml index 4c35b489e828..3bc481aee186 100644 --- a/query-engine/query-builders/query-builder/Cargo.toml +++ b/query-engine/query-builders/query-builder/Cargo.toml @@ -7,3 +7,6 @@ version = "0.1.0" serde.workspace = true query-structure = { path = "../../query-structure" } + +[features] +relation_joins = [] diff --git a/query-engine/query-builders/query-builder/src/lib.rs b/query-engine/query-builders/query-builder/src/lib.rs index 240ca848580c..682ec5e21c2b 100644 --- a/query-engine/query-builders/query-builder/src/lib.rs +++ b/query-engine/query-builders/query-builder/src/lib.rs @@ -1,5 +1,32 @@ -use query_structure::PrismaValue; +use query_structure::{FieldSelection, Model, PrismaValue, QueryArguments, WriteArgs}; use serde::Serialize; +mod query_arguments_ext; + +pub use query_arguments_ext::QueryArgumentsExt; + +pub trait QueryBuilder { + fn build_get_records( + &self, + model: &Model, + query_arguments: QueryArguments, + selected_fields: &FieldSelection, + ) -> Result>; + + fn build_create_record( + &self, + model: &Model, + args: WriteArgs, + selected_fields: &FieldSelection, + ) -> Result>; + + fn build_inserts( + &self, + model: &Model, + args: Vec, + skip_duplicates: bool, + selected_fields: Option<&FieldSelection>, + ) -> Result, Box>; +} #[derive(Debug, Serialize)] pub struct DbQuery { diff --git a/query-engine/query-builders/sql-query-builder/src/query_arguments_ext.rs b/query-engine/query-builders/query-builder/src/query_arguments_ext.rs similarity index 100% rename from query-engine/query-builders/sql-query-builder/src/query_arguments_ext.rs rename to query-engine/query-builders/query-builder/src/query_arguments_ext.rs diff --git a/query-engine/query-builders/sql-query-builder/Cargo.toml b/query-engine/query-builders/sql-query-builder/Cargo.toml index 80cccff5f961..c24274d2749e 100644 --- a/query-engine/query-builders/sql-query-builder/Cargo.toml +++ b/query-engine/query-builders/sql-query-builder/Cargo.toml @@ -13,7 +13,8 @@ psl = { path = "../../../psl/psl" } itertools.workspace = true chrono.workspace = true +bigdecimal.workspace = true serde_json.workspace = true [features] -relation_joins = [] +relation_joins = ["query-builder/relation_joins"] diff --git a/query-engine/core/src/compiler/translate/query/convert.rs b/query-engine/query-builders/sql-query-builder/src/convert.rs similarity index 98% rename from query-engine/core/src/compiler/translate/query/convert.rs rename to query-engine/query-builders/sql-query-builder/src/convert.rs index 2ea8463f93c0..e4a4c0cbc3d6 100644 --- a/query-engine/core/src/compiler/translate/query/convert.rs +++ b/query-engine/query-builders/sql-query-builder/src/convert.rs @@ -1,7 +1,7 @@ use bigdecimal::{BigDecimal, FromPrimitive}; use chrono::{DateTime, NaiveDate, Utc}; +use prisma_value::{PlaceholderType, PrismaValue}; use quaint::ast::VarType; -use query_structure::{PlaceholderType, PrismaValue}; pub(crate) fn quaint_value_to_prisma_value(value: quaint::Value<'_>) -> PrismaValue { match value.typed { diff --git a/query-engine/query-builders/sql-query-builder/src/cursor_condition.rs b/query-engine/query-builders/sql-query-builder/src/cursor_condition.rs index 1f91eff8a299..542885bcd8fc 100644 --- a/query-engine/query-builders/sql-query-builder/src/cursor_condition.rs +++ b/query-engine/query-builders/sql-query-builder/src/cursor_condition.rs @@ -2,11 +2,11 @@ use crate::{ join_utils::AliasedJoin, model_extensions::{AsColumn, AsColumns, AsTable, SelectionResultExt}, ordering::OrderByDefinition, - query_arguments_ext::QueryArgumentsExt, Context, }; use itertools::Itertools; use quaint::ast::*; +use query_builder::QueryArgumentsExt; use query_structure::*; #[derive(Debug)] diff --git a/query-engine/query-builders/sql-query-builder/src/lib.rs b/query-engine/query-builders/sql-query-builder/src/lib.rs index dafabf1f3772..f4f8cee9749b 100644 --- a/query-engine/query-builders/sql-query-builder/src/lib.rs +++ b/query-engine/query-builders/sql-query-builder/src/lib.rs @@ -1,5 +1,6 @@ pub mod column_metadata; mod context; +mod convert; mod cursor_condition; mod filter; mod join_utils; @@ -7,25 +8,97 @@ pub mod limit; mod model_extensions; mod nested_aggregations; mod ordering; -mod query_arguments_ext; pub mod read; #[cfg(feature = "relation_joins")] pub mod select; mod sql_trace; pub mod write; -use quaint::ast::{Column, Comparable, ConditionTree, Query, Row, Values}; -use query_structure::SelectionResult; +use std::marker::PhantomData; + +use quaint::{ + ast::{Column, Comparable, ConditionTree, Query, Row, Values}, + visitor::Visitor, +}; +use query_builder::{DbQuery, QueryBuilder}; +use query_structure::{FieldSelection, Model, ModelProjection, QueryArguments, SelectionResult, WriteArgs}; pub use column_metadata::ColumnMetadata; pub use context::Context; pub use filter::FilterBuilder; pub use model_extensions::{AsColumn, AsColumns, AsTable, RelationFieldExt, SelectionResultExt}; -pub use query_arguments_ext::QueryArgumentsExt; pub use sql_trace::SqlTraceComment; const PARAMETER_LIMIT: usize = 2000; +pub struct SqlQueryBuilder<'a, Visitor> { + context: Context<'a>, + phantom: PhantomData, +} + +impl<'a, V> SqlQueryBuilder<'a, V> { + pub fn new(context: Context<'a>) -> Self { + Self { + context, + phantom: PhantomData, + } + } + + fn convert_query(&self, query: impl Into>) -> Result> + where + V: Visitor<'a>, + { + let (sql, params) = V::build(query)?; + let params = params + .into_iter() + .map(convert::quaint_value_to_prisma_value) + .collect::>(); + Ok(DbQuery::new(sql, params)) + } +} + +impl<'a, V: Visitor<'a>> QueryBuilder for SqlQueryBuilder<'a, V> { + fn build_get_records( + &self, + model: &Model, + query_arguments: QueryArguments, + selected_fields: &FieldSelection, + ) -> Result> { + let query = read::get_records( + model, + ModelProjection::from(selected_fields) + .as_columns(&self.context) + .mark_all_selected(), + selected_fields.virtuals(), + query_arguments, + &self.context, + ); + self.convert_query(query) + } + + fn build_create_record( + &self, + model: &Model, + args: WriteArgs, + selected_fields: &FieldSelection, + ) -> Result> { + let query = write::create_record(model, args, &selected_fields.into(), &self.context); + self.convert_query(query) + } + + fn build_inserts( + &self, + model: &Model, + args: Vec, + skip_duplicates: bool, + selected_fields: Option<&FieldSelection>, + ) -> Result, Box> { + let projection = selected_fields.map(ModelProjection::from); + let query = write::generate_insert_statements(model, args, skip_duplicates, projection.as_ref(), &self.context); + query.into_iter().map(|q| self.convert_query(q)).collect() + } +} + pub fn chunked_conditions( columns: &[Column<'static>], records: &[&SelectionResult], diff --git a/query-engine/query-builders/sql-query-builder/src/ordering.rs b/query-engine/query-builders/sql-query-builder/src/ordering.rs index 3906a3ca0aa9..dfddd19a8ec5 100644 --- a/query-engine/query-builders/sql-query-builder/src/ordering.rs +++ b/query-engine/query-builders/sql-query-builder/src/ordering.rs @@ -1,7 +1,8 @@ -use crate::{join_utils::*, model_extensions::*, query_arguments_ext::QueryArgumentsExt, Context}; +use crate::{join_utils::*, model_extensions::*, Context}; use itertools::Itertools; use psl::{datamodel_connector::ConnectorCapability, reachable_only_with_capability}; use quaint::ast::*; +use query_builder::QueryArgumentsExt; use query_structure::*; static ORDER_JOIN_PREFIX: &str = "orderby_"; diff --git a/query-engine/query-builders/sql-query-builder/src/write.rs b/query-engine/query-builders/sql-query-builder/src/write.rs index 1059cb6069f8..d9307b01e569 100644 --- a/query-engine/query-builders/sql-query-builder/src/write.rs +++ b/query-engine/query-builders/sql-query-builder/src/write.rs @@ -1,5 +1,6 @@ use crate::limit::wrap_with_limit_subquery_if_needed; use crate::{model_extensions::*, sql_trace::SqlTraceComment, Context}; +use itertools::Itertools; use quaint::ast::*; use query_structure::*; use std::{collections::HashSet, convert::TryInto}; @@ -300,3 +301,107 @@ pub fn delete_relation_table_records( .so_that(parent_id_criteria.and(child_id_criteria)) .add_traceparent(ctx.traceparent) } + +/// Generates a list of insert statements to execute. If `selected_fields` is set, insert statements +/// will return the specified columns of inserted rows. +pub fn generate_insert_statements( + model: &Model, + args: Vec, + skip_duplicates: bool, + selected_fields: Option<&ModelProjection>, + ctx: &Context<'_>, +) -> Vec> { + let affected_fields = collect_affected_fields(&args, model); + + if affected_fields.is_empty() { + args.into_iter() + .map(|_| create_records_empty(model, skip_duplicates, selected_fields, ctx)) + .collect() + } else { + let partitioned_batches = partition_into_batches(args, ctx); + + partitioned_batches + .into_iter() + .map(|batch| create_records_nonempty(model, batch, skip_duplicates, &affected_fields, selected_fields, ctx)) + .collect() + } +} + +/// Returns a set of fields that are used in the arguments for the create operation. +fn collect_affected_fields(args: &[WriteArgs], model: &Model) -> HashSet { + let mut fields = HashSet::new(); + args.iter().for_each(|arg| fields.extend(arg.keys())); + + fields + .into_iter() + .map(|dsfn| model.fields().scalar().find(|sf| sf.db_name() == &**dsfn).unwrap()) + .collect() +} + +/// Partitions data into batches, respecting `max_bind_values` and `max_insert_rows` settings from +/// the `Context`. +fn partition_into_batches(args: Vec, ctx: &Context<'_>) -> Vec> { + let batches = if let Some(max_params) = ctx.max_bind_values() { + // We need to split inserts if they are above a parameter threshold, as well as split based on number of rows. + // -> Horizontal partitioning by row number, vertical by number of args. + args.into_iter() + .peekable() + .batching(|iter| { + let mut param_count: usize = 0; + let mut batch = vec![]; + + while param_count < max_params { + // If the param count _including_ the next item doens't exceed the limit, + // we continue filling up the current batch. + let proceed = match iter.peek() { + Some(next) => (param_count + next.len()) <= max_params, + None => break, + }; + + if proceed { + match iter.next() { + Some(next) => { + param_count += next.len(); + batch.push(next) + } + None => break, + } + } else { + break; + } + } + + if batch.is_empty() { + None + } else { + Some(batch) + } + }) + .collect_vec() + } else { + vec![args] + }; + + if let Some(max_rows) = ctx.max_insert_rows() { + let capacity = batches.len(); + batches + .into_iter() + .fold(Vec::with_capacity(capacity), |mut batches, next_batch| { + if next_batch.len() > max_rows { + batches.extend( + next_batch + .into_iter() + .chunks(max_rows) + .into_iter() + .map(|chunk| chunk.into_iter().collect_vec()), + ); + } else { + batches.push(next_batch); + } + + batches + }) + } else { + batches + } +} diff --git a/query-engine/query-engine/Cargo.toml b/query-engine/query-engine/Cargo.toml index db011f9238d7..5d51d16237c6 100644 --- a/query-engine/query-engine/Cargo.toml +++ b/query-engine/query-engine/Cargo.toml @@ -39,6 +39,7 @@ serial_test = "*" quaint.workspace = true indoc.workspace = true indexmap.workspace = true +sql-query-builder = { path = "../query-builders/sql-query-builder" } [build-dependencies] build-utils.path = "../../libs/build-utils" diff --git a/query-engine/query-engine/examples/compiler.rs b/query-engine/query-engine/examples/compiler.rs index 099ea2a80359..e72a1acdf987 100644 --- a/query-engine/query-engine/examples/compiler.rs +++ b/query-engine/query-engine/examples/compiler.rs @@ -1,9 +1,13 @@ use std::sync::Arc; -use quaint::connector::{ConnectionInfo, ExternalConnectionInfo, SqlFamily}; +use quaint::{ + prelude::{ConnectionInfo, ExternalConnectionInfo, SqlFamily}, + visitor::Postgres, +}; use query_core::{query_graph_builder::QueryGraphBuilder, QueryDocument}; use request_handlers::{JsonBody, JsonSingleQuery, RequestBody}; use serde_json::json; +use sql_query_builder::{Context, SqlQueryBuilder}; pub fn main() -> anyhow::Result<()> { let schema_string = include_str!("./schema.prisma"); @@ -73,7 +77,10 @@ pub fn main() -> anyhow::Result<()> { println!("{graph}"); - let expr = query_core::compiler::translate(graph, &connection_info)?; + let ctx = Context::new(&connection_info, None); + let builder = SqlQueryBuilder::>::new(ctx); + + let expr = query_core::compiler::translate(graph, &builder)?; println!("{}", expr.pretty_print(true, 80)?); diff --git a/query-engine/query-structure/src/query_arguments.rs b/query-engine/query-structure/src/query_arguments.rs index 6739f3eeb0ae..6afc7b9e41c4 100644 --- a/query-engine/query-structure/src/query_arguments.rs +++ b/query-engine/query-structure/src/query_arguments.rs @@ -89,6 +89,11 @@ impl QueryArguments { } } + pub fn with_take(mut self, take: Option) -> Self { + self.take = take; + self + } + pub fn do_nothing(&self) -> bool { self.cursor.is_none() && self.take.is_none() From 48e2fc1c467ee04d3c1aa63e7244987d04c3046b Mon Sep 17 00:00:00 2001 From: jacek-prisma Date: Fri, 17 Jan 2025 14:34:53 +0000 Subject: [PATCH 54/65] chore: extract query-compiler and query-compiler-wasm (#5129) * feat: generic query building * fix: compiler example * fix: comment out non pg variants for now * fix: unused imports * chore: remove currently unnecessary impl * chore: extract query compiler crate * feat: add query-compiler-wasm * chore: cleanup * chore: get rid of adapter object, pass connection info and add CI * ci: update formatting job and pedantic check * build: remove driver-adapters dependency from compiler-wasm for now and remove cfg target wasm * chore: address comments * ci: remove unneeded RUSTFLAGS * chore: remove accidentally added file --- .github/workflows/formatting.yml | 24 +--- .../workflows/publish-query-compiler-wasm.yml | 65 +++++++++ Cargo.lock | 46 +++++- Cargo.toml | 4 + Makefile | 22 ++- query-compiler/query-compiler-wasm/.gitignore | 7 + query-compiler/query-compiler-wasm/Cargo.toml | 39 ++++++ query-compiler/query-compiler-wasm/build.rs | 3 + query-compiler/query-compiler-wasm/build.sh | 132 ++++++++++++++++++ .../query-compiler-wasm/package.json | 5 + .../query-compiler-wasm/src/compiler.rs | 94 +++++++++++++ query-compiler/query-compiler-wasm/src/lib.rs | 2 + .../query-compiler-wasm/src/params.rs | 68 +++++++++ query-compiler/query-compiler/Cargo.toml | 24 ++++ .../query-compiler/src}/expression.rs | 0 .../query-compiler/src}/expression/format.rs | 0 .../query-compiler/src/lib.rs | 27 ++-- .../query-compiler/src}/translate.rs | 2 +- .../query-compiler/src}/translate/query.rs | 3 +- .../src}/translate/query/read.rs | 10 +- .../src}/translate/query/write.rs | 6 +- query-engine/core/Cargo.toml | 6 +- query-engine/core/src/error.rs | 5 +- query-engine/core/src/lib.rs | 7 +- query-engine/core/src/query_ast/mod.rs | 2 +- query-engine/core/src/query_ast/read.rs | 4 +- query-engine/core/src/query_ast/write.rs | 2 +- query-engine/core/src/query_graph/mod.rs | 8 +- .../driver-adapters/executor/package.json | 6 +- .../driver-adapters/executor/src/demo-qc.ts | 114 +++++++++++++++ .../executor/src/query-compiler-wasm.ts | 45 ++++++ .../executor/src/query-compiler.ts | 25 ++++ query-engine/driver-adapters/src/lib.rs | 1 + query-engine/driver-adapters/src/types.rs | 5 +- query-engine/query-engine-node-api/Cargo.toml | 1 + .../query-engine-node-api/src/engine.rs | 6 +- .../query-engine-wasm/src/wasm/engine.rs | 27 +--- .../query-engine/examples/compiler.rs | 88 ------------ .../query-engine/examples/schema.prisma | 33 ----- query-engine/request-handlers/Cargo.toml | 2 +- .../request-handlers/src/load_executor.rs | 4 +- 41 files changed, 745 insertions(+), 229 deletions(-) create mode 100644 .github/workflows/publish-query-compiler-wasm.yml create mode 100644 query-compiler/query-compiler-wasm/.gitignore create mode 100644 query-compiler/query-compiler-wasm/Cargo.toml create mode 100644 query-compiler/query-compiler-wasm/build.rs create mode 100755 query-compiler/query-compiler-wasm/build.sh create mode 100644 query-compiler/query-compiler-wasm/package.json create mode 100644 query-compiler/query-compiler-wasm/src/compiler.rs create mode 100644 query-compiler/query-compiler-wasm/src/lib.rs create mode 100644 query-compiler/query-compiler-wasm/src/params.rs create mode 100644 query-compiler/query-compiler/Cargo.toml rename {query-engine/core/src/compiler => query-compiler/query-compiler/src}/expression.rs (100%) rename {query-engine/core/src/compiler => query-compiler/query-compiler/src}/expression/format.rs (100%) rename query-engine/core/src/compiler/mod.rs => query-compiler/query-compiler/src/lib.rs (51%) rename {query-engine/core/src/compiler => query-compiler/query-compiler/src}/translate.rs (98%) rename {query-engine/core/src/compiler => query-compiler/query-compiler/src}/translate/query.rs (87%) rename {query-engine/core/src/compiler => query-compiler/query-compiler/src}/translate/query/read.rs (97%) rename {query-engine/core/src/compiler => query-compiler/query-compiler/src}/translate/query/write.rs (93%) create mode 100644 query-engine/driver-adapters/executor/src/demo-qc.ts create mode 100644 query-engine/driver-adapters/executor/src/query-compiler-wasm.ts create mode 100644 query-engine/driver-adapters/executor/src/query-compiler.ts delete mode 100644 query-engine/query-engine/examples/compiler.rs delete mode 100644 query-engine/query-engine/examples/schema.prisma diff --git a/.github/workflows/formatting.yml b/.github/workflows/formatting.yml index c71aff0d9fbb..de92dc469c4f 100644 --- a/.github/workflows/formatting.yml +++ b/.github/workflows/formatting.yml @@ -18,34 +18,16 @@ concurrency: cancel-in-progress: true jobs: - clippy: - name: clippy linting + checks: + name: run lints and formatting checks runs-on: ubuntu-latest - env: - RUSTFLAGS: "-Dwarnings" steps: - uses: actions/checkout@v4 - uses: actions-rust-lang/setup-rust-toolchain@v1 with: components: clippy targets: wasm32-unknown-unknown - # Check the whole workspace with clippy for the native compilation - # target, and query-engine-wasm and dependencies for wasm32-unknown-unknown. - # Note that `--all-targets` is unrelated to `--target` as in target platform, - # it is a shortcut for `--lib --bins --tests --benches --examples`. - - run: | - cargo clippy --workspace --all-features --all-targets - cargo clippy --all-features --all-targets -p query-engine-wasm -p prisma-schema-build --target wasm32-unknown-unknown - - rustfmt: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v4 - - uses: actions-rust-lang/setup-rust-toolchain@v1 - with: - components: rustfmt - - name: Check formatting - run: cargo fmt -- --check + - run: make pedantic shellcheck: runs-on: ubuntu-latest diff --git a/.github/workflows/publish-query-compiler-wasm.yml b/.github/workflows/publish-query-compiler-wasm.yml new file mode 100644 index 000000000000..52030b74c71a --- /dev/null +++ b/.github/workflows/publish-query-compiler-wasm.yml @@ -0,0 +1,65 @@ +name: Build and publish @prisma/query-compiler-wasm +run-name: npm - release @prisma/query-compiler-wasm@${{ github.event.inputs.packageVersion }} from ${{ github.event.inputs.enginesHash }} on ${{ github.event.inputs.npmDistTag }} + +concurrency: publish-query-compiler-wasm + +on: + # usually triggered via GH Actions Workflow in prisma/engines-wrapper repo + workflow_dispatch: + inputs: + packageVersion: + required: true + description: "New @prisma/query-compiler-wasm package version" + enginesHash: + required: true + description: "query-compiler commit to build" + npmDistTag: + required: true + default: "latest" + description: "npm dist-tag (e.g. latest or integration)" + +jobs: + build: + name: Build and publish @prisma/query-compiler-wasm + runs-on: ubuntu-latest + steps: + - name: Print input + run: echo "${{ toJson(github.event.inputs) }}" + + - uses: actions/checkout@v4 + with: + ref: ${{ github.event.inputs.enginesHash }} + + - uses: ./.github/workflows/include/rust-wasm-setup + + - name: Build @prisma/query-compiler-wasm + run: make build-qc-wasm + env: + QE_WASM_VERSION: ${{ github.event.inputs.packageVersion }} + + - name: Install Node.js + uses: actions/setup-node@v4 + with: + node-version: "20.x" + + - name: Set up NPM token for publishing + run: echo "//registry.npmjs.org/:_authToken=${{ secrets.NPM_TOKEN }}" > ~/.npmrc + + - name: Publish @prisma/query-compiler-wasm + run: npm publish --access public --tag ${{ github.event.inputs.npmDistTag }} + working-directory: query-compiler/query-compiler-wasm/pkg + + # + # Failure handlers + # + - name: Set current job url in SLACK_FOOTER env var + if: ${{ failure() }} + run: echo "SLACK_FOOTER=<$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID|Click here to go to the job logs>" >> $GITHUB_ENV + - name: Slack Notification on Failure + if: ${{ failure() }} + uses: rtCamp/action-slack-notify@v2.3.2 + env: + SLACK_TITLE: "Building and publishing @prisma/query-compiler-wasm failed :x:" + SLACK_COLOR: "#FF0000" + SLACK_CHANNEL: feed-prisma-query-compiler-wasm-publish-failures + SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK_WASM_FAILING }} diff --git a/Cargo.lock b/Cargo.lock index d156fc51229f..45abb86496ba 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -3367,7 +3367,7 @@ checksum = "cc9c68a3f6da06753e9335d63e27f6b9754dd1920d941135b7ea8224f141adb2" [[package]] name = "postgres-native-tls" version = "0.5.0" -source = "git+https://github.com/prisma/rust-postgres?branch=pgbouncer-mode#c62b9928d402685e152161907e8480603c29ef65" +source = "git+https://github.com/prisma/rust-postgres?branch=pgbouncer-mode#0db3436fc7d5c1848df5728407f342228a91f54d" dependencies = [ "native-tls", "tokio", @@ -3378,7 +3378,7 @@ dependencies = [ [[package]] name = "postgres-protocol" version = "0.6.7" -source = "git+https://github.com/prisma/rust-postgres?branch=pgbouncer-mode#c62b9928d402685e152161907e8480603c29ef65" +source = "git+https://github.com/prisma/rust-postgres?branch=pgbouncer-mode#0db3436fc7d5c1848df5728407f342228a91f54d" dependencies = [ "base64 0.22.1", "byteorder", @@ -3395,7 +3395,7 @@ dependencies = [ [[package]] name = "postgres-types" version = "0.2.8" -source = "git+https://github.com/prisma/rust-postgres?branch=pgbouncer-mode#c62b9928d402685e152161907e8480603c29ef65" +source = "git+https://github.com/prisma/rust-postgres?branch=pgbouncer-mode#0db3436fc7d5c1848df5728407f342228a91f54d" dependencies = [ "bit-vec", "bytes", @@ -3750,6 +3750,41 @@ dependencies = [ "serde", ] +[[package]] +name = "query-compiler" +version = "0.1.0" +dependencies = [ + "itertools 0.12.0", + "pretty", + "quaint", + "query-builder", + "query-core", + "query-structure", + "serde", + "sql-query-builder", + "thiserror", +] + +[[package]] +name = "query-compiler-wasm" +version = "0.1.0" +dependencies = [ + "build-utils", + "js-sys", + "psl", + "quaint", + "query-compiler", + "query-core", + "request-handlers", + "schema", + "serde", + "serde_json", + "tracing", + "tsify", + "wasm-bindgen", + "wasm-rs-dbg", +] + [[package]] name = "query-connector" version = "0.1.0" @@ -3789,10 +3824,8 @@ dependencies = [ "lru 0.7.8", "once_cell", "petgraph", - "pretty", "prisma-metrics", "psl", - "quaint", "query-builder", "query-connector", "query-structure", @@ -3922,6 +3955,7 @@ dependencies = [ "prisma-metrics", "psl", "quaint", + "query-compiler", "query-connector", "query-core", "query-engine-common", @@ -5710,7 +5744,7 @@ dependencies = [ [[package]] name = "tokio-postgres" version = "0.7.12" -source = "git+https://github.com/prisma/rust-postgres?branch=pgbouncer-mode#c62b9928d402685e152161907e8480603c29ef65" +source = "git+https://github.com/prisma/rust-postgres?branch=pgbouncer-mode#0db3436fc7d5c1848df5728407f342228a91f54d" dependencies = [ "async-trait", "byteorder", diff --git a/Cargo.toml b/Cargo.toml index 3d3948494403..3dc65184f54f 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -29,6 +29,8 @@ members = [ "query-engine/request-handlers", "query-engine/schema", "query-engine/query-builders/*", + "query-compiler/query-compiler", + "query-compiler/query-compiler-wasm", "libs/*", "prisma-fmt", "prisma-schema-wasm", @@ -72,6 +74,7 @@ napi = { version = "2.16.13", default-features = false, features = [ ] } napi-derive = "2.16.12" pin-project = "1" +pretty = { version = "0.12", features = ["termcolor"] } rand = { version = "0.8" } regex = { version = "1", features = ["std"] } serde_repr = { version = "0.1.17" } @@ -79,6 +82,7 @@ serde-wasm-bindgen = { version = "0.5" } tracing = { version = "0.1" } tracing-futures = "0.2" tsify = { version = "0.4.5" } +thiserror = "1.0" # version for `wasm-bindgen@0.2.93`, see: # https://github.com/rustwasm/wasm-bindgen/pull/4072/ diff --git a/Makefile b/Makefile index 7358317bb00c..fee7c054fcb0 100644 --- a/Makefile +++ b/Makefile @@ -32,11 +32,15 @@ clean-se-wasm: @echo "Cleaning schema-engine/schema-engine-wasm/pkg" && \ cd schema-engine/schema-engine-wasm/pkg && find . ! -name '.' ! -name '..' ! -name 'README.md' -exec rm -rf {} + +clean-qc-wasm: + @echo "Cleaning query-compiler/query-compiler-wasm/pkg" && \ + cd query-compiler/query-compiler-wasm/pkg && find . ! -name '.' ! -name '..' ! -name 'README.md' -exec rm -rf {} + + clean-cargo: @echo "Cleaning cargo" && \ cargo clean -clean: clean-qe-wasm clean-cargo +clean: clean-qe-wasm clean-se-wasm clean-qc-wasm clean-cargo ################### # script wrappers # @@ -79,6 +83,10 @@ build-se-wasm: cd schema-engine/schema-engine-wasm && \ ./build.sh $(QE_WASM_VERSION) schema-engine/schema-engine-wasm/pkg +build-qc-wasm: + cd query-compiler/query-compiler-wasm && \ + ./build.sh $(QE_WASM_VERSION) query-compiler/query-compiler-wasm/pkg + build-schema-wasm: @printf '%s\n' "🛠️ Building the Rust crate" cargo build --profile $(PROFILE) --target=wasm32-unknown-unknown -p prisma-schema-build @@ -91,9 +99,15 @@ build-schema-wasm: # Emulate pedantic CI compilation. pedantic: - RUSTFLAGS="-D warnings" cargo fmt -- --check - RUSTFLAGS="-D warnings" cargo clippy --all-features --all-targets - RUSTFLAGS="-D warnings" cargo clippy --all-features --all-targets -p query-engine-wasm -p schema-engine-wasm -p prisma-schema-build --target wasm32-unknown-unknown + cargo fmt -- --check + cargo clippy --all-features --all-targets -- -Dwarnings + cargo clippy --all-features --all-targets \ + -p query-engine-wasm \ + -p schema-engine-wasm \ + -p query-compiler-wasm \ + -p prisma-schema-build \ + --target wasm32-unknown-unknown \ + -- -Dwarnings release: cargo build --release diff --git a/query-compiler/query-compiler-wasm/.gitignore b/query-compiler/query-compiler-wasm/.gitignore new file mode 100644 index 000000000000..a6f0e4dca125 --- /dev/null +++ b/query-compiler/query-compiler-wasm/.gitignore @@ -0,0 +1,7 @@ +/target +**/*.rs.bk +Cargo.lock +bin/ +pkg/ +wasm-pack.log +node_modules/ \ No newline at end of file diff --git a/query-compiler/query-compiler-wasm/Cargo.toml b/query-compiler/query-compiler-wasm/Cargo.toml new file mode 100644 index 000000000000..fef9446c23f5 --- /dev/null +++ b/query-compiler/query-compiler-wasm/Cargo.toml @@ -0,0 +1,39 @@ +[package] +name = "query-compiler-wasm" +version = "0.1.0" +edition = "2021" + +[lib] +doc = false +crate-type = ["cdylib"] +name = "query_compiler_wasm" + +[dependencies] +psl.workspace = true +quaint.workspace = true +tracing.workspace = true + +js-sys.workspace = true +serde.workspace = true +serde_json.workspace = true +tsify.workspace = true +wasm-bindgen.workspace = true +wasm-rs-dbg.workspace = true +query-core = { path = "../../query-engine/core" } +query-compiler = { path = "../query-compiler", default-features = false } +schema = { path = "../../query-engine/schema" } +request-handlers = { path = "../../query-engine/request-handlers", default-features = false } + +[build-dependencies] +build-utils.path = "../../libs/build-utils" + +[features] +sqlite = ["psl/sqlite", "query-compiler/sqlite"] +postgresql = ["psl/postgresql", "query-compiler/postgresql"] +mysql = ["psl/mysql", "query-compiler/mysql"] + +[package.metadata.wasm-pack.profile.release] +wasm-opt = false # use wasm-opt explicitly in `./build.sh` + +[package.metadata.wasm-pack.profile.profiling] +wasm-opt = false # use wasm-opt explicitly in `./build.sh` diff --git a/query-compiler/query-compiler-wasm/build.rs b/query-compiler/query-compiler-wasm/build.rs new file mode 100644 index 000000000000..33aded23a4a5 --- /dev/null +++ b/query-compiler/query-compiler-wasm/build.rs @@ -0,0 +1,3 @@ +fn main() { + build_utils::store_git_commit_hash_in_env(); +} diff --git a/query-compiler/query-compiler-wasm/build.sh b/query-compiler/query-compiler-wasm/build.sh new file mode 100755 index 000000000000..2a69b3509369 --- /dev/null +++ b/query-compiler/query-compiler-wasm/build.sh @@ -0,0 +1,132 @@ +#!/usr/bin/env bash +# Call this script as `./build.sh ` +# +# Note: this script started as a copy of the `query-engine-wasm`'s `build.sh` script, but will likely diverge over time. +# For this reason, we're avoiding premature refactoring and keeping the two scripts separate. + +set -euo pipefail + +CURRENT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )" +REPO_ROOT="$( cd "$( dirname "$CURRENT_DIR/../../../" )" >/dev/null 2>&1 && pwd )" +OUT_VERSION="${1:-"0.0.0"}" +OUT_FOLDER="${2:-"query-compiler/query-compiler-wasm/pkg"}" +OUT_TARGET="bundler" +# wasm-opt pass +WASM_OPT_ARGS=( + "-Os" # execute size-focused optimization passes (-Oz actually increases size by 1KB) + "--vacuum" # removes obviously unneeded code + "--duplicate-function-elimination" # removes duplicate functions + "--duplicate-import-elimination" # removes duplicate imports + "--remove-unused-module-elements" # removes unused module elements + "--dae-optimizing" # removes arguments to calls in an lto-like manner + "--remove-unused-names" # removes names from location that are never branched to + "--rse" # removes redundant local.sets + "--gsi" # global struct inference, to optimize constant values + "--gufa-optimizing" # optimize the entire program using type monomorphization + "--strip-dwarf" # removes DWARF debug information + "--strip-producers" # removes the "producers" section + "--strip-target-features" # removes the "target_features" section +) + +# if it's a relative path, let it be relative to the repo root +if [[ "$OUT_FOLDER" != /* ]]; then + OUT_FOLDER="$REPO_ROOT/$OUT_FOLDER" +fi +OUT_JSON="${OUT_FOLDER}/package.json" + +echo "ℹ️ target version: $OUT_VERSION" +echo "ℹ️ out folder: $OUT_FOLDER" + +if [[ -z "${WASM_BUILD_PROFILE:-}" ]]; then + if [[ -z "${BUILDKITE:-}" ]] && [[ -z "${GITHUB_ACTIONS:-}" ]]; then + WASM_BUILD_PROFILE="dev" + else + WASM_BUILD_PROFILE="release" + fi +fi + +if [ "$WASM_BUILD_PROFILE" = "dev" ]; then + WASM_TARGET_SUBDIR="debug" +else + WASM_TARGET_SUBDIR="$WASM_BUILD_PROFILE" +fi + + + +build() { + echo "ℹ️ Note that query-compiler compiled to WASM uses a different Rust toolchain" + cargo --version + + local CONNECTOR="$1" + local CARGO_TARGET_DIR + CARGO_TARGET_DIR=$(cargo metadata --format-version 1 | jq -r .target_directory) + echo "🔨 Building $CONNECTOR" + CARGO_PROFILE_RELEASE_OPT_LEVEL="z" cargo build \ + -p query-compiler-wasm \ + --profile "$WASM_BUILD_PROFILE" \ + --features "$CONNECTOR" \ + --target wasm32-unknown-unknown + + local IN_FILE="$CARGO_TARGET_DIR/wasm32-unknown-unknown/$WASM_TARGET_SUBDIR/query_compiler_wasm.wasm" + local OUT_FILE="$OUT_FOLDER/$CONNECTOR/query_compiler_bg.wasm" + + wasm-bindgen --target "$OUT_TARGET" --out-name query_compiler --out-dir "$OUT_FOLDER/$CONNECTOR" "$IN_FILE" + optimize "$OUT_FILE" + + if ! command -v wasm2wat &> /dev/null; then + echo "Skipping wasm2wat, as it is not installed." + else + wasm2wat "$OUT_FILE" -o "./query_compiler.$CONNECTOR.wat" + fi +} + +optimize() { + local OUT_FILE="$1" + case "$WASM_BUILD_PROFILE" in + release) + # In release mode, we want to strip the debug symbols. + wasm-opt "${WASM_OPT_ARGS[@]}" \ + "--strip-debug" \ + "$OUT_FILE" \ + -o "$OUT_FILE" + ;; + profiling) + # In profiling mode, we want to keep the debug symbols. + wasm-opt "${WASM_OPT_ARGS[@]}" \ + "--debuginfo" \ + "${OUT_FILE}" \ + -o "${OUT_FILE}" + ;; + *) + # In other modes (e.g., "dev"), skip wasm-opt. + echo "Skipping wasm-opt." + ;; + esac +} + +report_size() { + local CONNECTOR + local GZ_SIZE + local FORMATTED_GZ_SIZE + + CONNECTOR="$1" + GZ_SIZE=$(gzip -c "${OUT_FOLDER}/$CONNECTOR/query_compiler_bg.wasm" | wc -c) + FORMATTED_GZ_SIZE=$(echo "$GZ_SIZE"|numfmt --format '%.3f' --to=iec-i --suffix=B) + + echo "$CONNECTOR:" + echo "ℹ️ raw: $(du -h "${OUT_FOLDER}/$CONNECTOR/query_compiler_bg.wasm")" + echo "ℹ️ zip: $GZ_SIZE bytes ($FORMATTED_GZ_SIZE)" + echo "" +} + +echo "Building query-compiler-wasm using $WASM_BUILD_PROFILE profile" + +build "postgresql" +build "sqlite" +build "mysql" + +jq '.version=$version' --arg version "$OUT_VERSION" package.json > "$OUT_JSON" + +report_size "postgresql" +report_size "sqlite" +report_size "mysql" diff --git a/query-compiler/query-compiler-wasm/package.json b/query-compiler/query-compiler-wasm/package.json new file mode 100644 index 000000000000..0a391675b303 --- /dev/null +++ b/query-compiler/query-compiler-wasm/package.json @@ -0,0 +1,5 @@ +{ + "name": "@prisma/query-compiler-wasm", + "version": "0.0.0", + "type": "module" +} diff --git a/query-compiler/query-compiler-wasm/src/compiler.rs b/query-compiler/query-compiler-wasm/src/compiler.rs new file mode 100644 index 000000000000..1095d630721c --- /dev/null +++ b/query-compiler/query-compiler-wasm/src/compiler.rs @@ -0,0 +1,94 @@ +use psl::ConnectorRegistry; +use quaint::connector::ConnectionInfo; +use query_core::protocol::EngineProtocol; +use request_handlers::RequestBody; +use serde::Deserialize; +use std::sync::Arc; +use tsify::Tsify; +use wasm_bindgen::prelude::wasm_bindgen; + +use crate::params::{AdapterFlavour, JsConnectionInfo}; + +const CONNECTOR_REGISTRY: ConnectorRegistry<'_> = &[ + #[cfg(feature = "postgresql")] + psl::builtin_connectors::POSTGRES, + #[cfg(feature = "mysql")] + psl::builtin_connectors::MYSQL, + #[cfg(feature = "sqlite")] + psl::builtin_connectors::SQLITE, +]; + +#[wasm_bindgen] +extern "C" { + /// This function registers the reason for a Wasm panic via the + /// JS function `globalThis.PRISMA_WASM_PANIC_REGISTRY.set_message()` + #[wasm_bindgen(js_namespace = ["global", "PRISMA_WASM_PANIC_REGISTRY"], js_name = "set_message")] + fn prisma_set_wasm_panic_message(s: &str); +} + +/// Registers a singleton panic hook that will register the reason for the Wasm panic in JS. +/// Without this, the panic message would be lost: you'd see `RuntimeError: unreachable` message in JS, +/// with no reference to the Rust function and line that panicked. +/// This function should be manually called before any other public function in this module. +/// Note: no method is safe to call after a panic has occurred. +fn register_panic_hook() { + use std::sync::Once; + static SET_HOOK: Once = Once::new(); + + SET_HOOK.call_once(|| { + std::panic::set_hook(Box::new(|info| { + let message = &info.to_string(); + prisma_set_wasm_panic_message(message); + })); + }); +} + +#[derive(Deserialize, Tsify)] +#[serde(rename_all = "camelCase")] +#[tsify(from_wasm_abi)] +pub struct QueryCompilerParams { + // TODO: support multiple datamodels + datamodel: String, + flavour: AdapterFlavour, + connection_info: JsConnectionInfo, +} + +#[wasm_bindgen] +pub struct QueryCompiler { + schema: Arc, + connection_info: ConnectionInfo, + protocol: EngineProtocol, +} + +#[wasm_bindgen] +impl QueryCompiler { + #[wasm_bindgen(constructor)] + pub fn new(params: QueryCompilerParams) -> Result { + let QueryCompilerParams { + datamodel, + flavour, + connection_info, + } = params; + + // Note: if we used `psl::validate`, we'd add ~1MB to the Wasm artifact (before gzip). + let schema = Arc::new(psl::parse_without_validation(datamodel.into(), CONNECTOR_REGISTRY)); + let schema = Arc::new(schema::build(schema, true)); + + tracing::info!(git_hash = env!("GIT_HASH"), "Starting query-compiler-wasm"); + register_panic_hook(); + + Ok(Self { + schema, + connection_info: ConnectionInfo::External(connection_info.into_external_connection_info(flavour)), + protocol: EngineProtocol::Json, + }) + } + + #[wasm_bindgen] + pub fn compile(&self, request: String) -> Result { + let request = RequestBody::try_from_str(&request, self.protocol)?; + let query_doc = request.into_doc(&self.schema)?; + let plan = query_compiler::compile(&self.schema, query_doc, &self.connection_info)?; + Ok(serde_json::to_string(&plan)?) + } +} diff --git a/query-compiler/query-compiler-wasm/src/lib.rs b/query-compiler/query-compiler-wasm/src/lib.rs new file mode 100644 index 000000000000..e46abcf798b1 --- /dev/null +++ b/query-compiler/query-compiler-wasm/src/lib.rs @@ -0,0 +1,2 @@ +pub mod compiler; +pub mod params; diff --git a/query-compiler/query-compiler-wasm/src/params.rs b/query-compiler/query-compiler-wasm/src/params.rs new file mode 100644 index 000000000000..14e1c626452b --- /dev/null +++ b/query-compiler/query-compiler-wasm/src/params.rs @@ -0,0 +1,68 @@ +use quaint::prelude::{ExternalConnectionInfo, SqlFamily}; +use serde::Deserialize; + +// TODO: the code below largely duplicates driver_adapters::types, we should ideally use that +// crate instead, but it currently uses #cfg target a lot, which causes build issues when not +// explicitly building against wasm. + +#[derive(Default, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct JsConnectionInfo { + pub schema_name: Option, + pub max_bind_values: Option, +} + +impl JsConnectionInfo { + pub fn into_external_connection_info(self, provider: AdapterFlavour) -> ExternalConnectionInfo { + let schema_name = self.get_schema_name(provider); + let sql_family = SqlFamily::from(provider); + + ExternalConnectionInfo::new( + sql_family, + schema_name.to_owned(), + self.max_bind_values.map(|v| v as usize), + ) + } + + fn get_schema_name(&self, provider: AdapterFlavour) -> &str { + match self.schema_name.as_ref() { + Some(name) => name, + None => self.default_schema_name(provider), + } + } + + fn default_schema_name(&self, provider: AdapterFlavour) -> &str { + match provider { + #[cfg(feature = "mysql")] + AdapterFlavour::Mysql => quaint::connector::DEFAULT_MYSQL_DB, + #[cfg(feature = "postgresql")] + AdapterFlavour::Postgres => quaint::connector::DEFAULT_POSTGRES_SCHEMA, + #[cfg(feature = "sqlite")] + AdapterFlavour::Sqlite => quaint::connector::DEFAULT_SQLITE_DATABASE, + } + } +} + +#[derive(Debug, Eq, PartialEq, Clone, Copy, Deserialize)] +#[serde(rename_all = "camelCase")] +pub enum AdapterFlavour { + #[cfg(feature = "mysql")] + Mysql, + #[cfg(feature = "postgresql")] + Postgres, + #[cfg(feature = "sqlite")] + Sqlite, +} + +impl From for SqlFamily { + fn from(f: AdapterFlavour) -> Self { + match f { + #[cfg(feature = "mysql")] + AdapterFlavour::Mysql => SqlFamily::Mysql, + #[cfg(feature = "postgresql")] + AdapterFlavour::Postgres => SqlFamily::Postgres, + #[cfg(feature = "sqlite")] + AdapterFlavour::Sqlite => SqlFamily::Sqlite, + } + } +} diff --git a/query-compiler/query-compiler/Cargo.toml b/query-compiler/query-compiler/Cargo.toml new file mode 100644 index 000000000000..213a14f302d3 --- /dev/null +++ b/query-compiler/query-compiler/Cargo.toml @@ -0,0 +1,24 @@ +[package] +edition = "2021" +name = "query-compiler" +version = "0.1.0" + +[dependencies] +query-structure = { path = "../../query-engine/query-structure" } +query-builder = { path = "../../query-engine/query-builders/query-builder" } +query-core = { path = "../../query-engine/core" } +# we should drop sql-query-builder and quaint once we properly abstract over Mongo +sql-query-builder = { path = "../../query-engine/query-builders/sql-query-builder" } +quaint = { path = "../../quaint" } + +thiserror.workspace = true +serde.workspace = true +itertools.workspace = true +pretty.workspace = true + +[features] +default = ["sqlite", "postgresql", "mysql", "mssql"] +sqlite = ["quaint/sqlite"] +postgresql = ["quaint/postgresql"] +mysql = ["quaint/mysql"] +mssql = ["quaint/mssql"] diff --git a/query-engine/core/src/compiler/expression.rs b/query-compiler/query-compiler/src/expression.rs similarity index 100% rename from query-engine/core/src/compiler/expression.rs rename to query-compiler/query-compiler/src/expression.rs diff --git a/query-engine/core/src/compiler/expression/format.rs b/query-compiler/query-compiler/src/expression/format.rs similarity index 100% rename from query-engine/core/src/compiler/expression/format.rs rename to query-compiler/query-compiler/src/expression/format.rs diff --git a/query-engine/core/src/compiler/mod.rs b/query-compiler/query-compiler/src/lib.rs similarity index 51% rename from query-engine/core/src/compiler/mod.rs rename to query-compiler/query-compiler/src/lib.rs index f1d0e8c75e37..3a6e8cabbd09 100644 --- a/query-engine/core/src/compiler/mod.rs +++ b/query-compiler/query-compiler/src/lib.rs @@ -8,18 +8,21 @@ use quaint::{ prelude::{ConnectionInfo, SqlFamily}, visitor, }; -use schema::QuerySchema; +use query_core::{schema::QuerySchema, QueryGraphBuilderError}; use sql_query_builder::{Context, SqlQueryBuilder}; use thiserror::Error; pub use translate::{translate, TranslateError}; -use crate::{QueryDocument, QueryGraphBuilder}; +use query_core::{QueryDocument, QueryGraphBuilder}; #[derive(Debug, Error)] pub enum CompileError { #[error("only a single query can be compiled at a time")] UnsupportedRequest, + #[error("failed to build query graph: {0}")] + GraphBuildError(#[from] QueryGraphBuilderError), + #[error("{0}")] TranslateError(#[from] TranslateError), } @@ -28,21 +31,23 @@ pub fn compile( query_schema: &Arc, query_doc: QueryDocument, connection_info: &ConnectionInfo, -) -> crate::Result { +) -> Result { let QueryDocument::Single(query) = query_doc else { - return Err(CompileError::UnsupportedRequest.into()); + return Err(CompileError::UnsupportedRequest); }; let ctx = Context::new(connection_info, None); let (graph, _serializer) = QueryGraphBuilder::new(query_schema).build(query)?; - let res = match connection_info.sql_family() { + let res: Result = match connection_info.sql_family() { + #[cfg(feature = "postgresql")] SqlFamily::Postgres => translate(graph, &SqlQueryBuilder::>::new(ctx)), - // feature flags are disabled for now - // SqlFamily::Mysql => translate(graph, &SqlQueryBuilder::>::new(ctx)), - // SqlFamily::Sqlite => translate(graph, &SqlQueryBuilder::>::new(ctx)), - // SqlFamily::Mssql => translate(graph, &SqlQueryBuilder::>::new(ctx)), - _ => unimplemented!(), + #[cfg(feature = "mysql")] + SqlFamily::Mysql => translate(graph, &SqlQueryBuilder::>::new(ctx)), + #[cfg(feature = "sqlite")] + SqlFamily::Sqlite => translate(graph, &SqlQueryBuilder::>::new(ctx)), + #[cfg(feature = "mssql")] + SqlFamily::Mssql => translate(graph, &SqlQueryBuilder::>::new(ctx)), }; - Ok(res.map_err(CompileError::TranslateError)?) + res.map_err(CompileError::TranslateError) } diff --git a/query-engine/core/src/compiler/translate.rs b/query-compiler/query-compiler/src/translate.rs similarity index 98% rename from query-engine/core/src/compiler/translate.rs rename to query-compiler/query-compiler/src/translate.rs index a5fa6b30f0f1..5b45532ab53e 100644 --- a/query-engine/core/src/compiler/translate.rs +++ b/query-compiler/query-compiler/src/translate.rs @@ -1,8 +1,8 @@ mod query; -use crate::{EdgeRef, Node, NodeRef, Query, QueryGraph}; use query::translate_query; use query_builder::QueryBuilder; +use query_core::{EdgeRef, Node, NodeRef, Query, QueryGraph}; use thiserror::Error; use super::expression::{Binding, Expression}; diff --git a/query-engine/core/src/compiler/translate/query.rs b/query-compiler/query-compiler/src/translate/query.rs similarity index 87% rename from query-engine/core/src/compiler/translate/query.rs rename to query-compiler/query-compiler/src/translate/query.rs index 1fe1acf243ad..5e19fc58bf18 100644 --- a/query-engine/core/src/compiler/translate/query.rs +++ b/query-compiler/query-compiler/src/translate/query.rs @@ -2,10 +2,11 @@ mod read; mod write; use query_builder::QueryBuilder; +use query_core::Query; use read::translate_read_query; use write::translate_write_query; -use crate::{compiler::expression::Expression, Query}; +use crate::expression::Expression; use super::TranslateResult; diff --git a/query-engine/core/src/compiler/translate/query/read.rs b/query-compiler/query-compiler/src/translate/query/read.rs similarity index 97% rename from query-engine/core/src/compiler/translate/query/read.rs rename to query-compiler/query-compiler/src/translate/query/read.rs index f736331ec8ac..f51e2d0a9f96 100644 --- a/query-engine/core/src/compiler/translate/query/read.rs +++ b/query-compiler/query-compiler/src/translate/query/read.rs @@ -1,15 +1,13 @@ use std::collections::HashSet; use crate::{ - compiler::{ - expression::{Binding, Expression, JoinExpression}, - translate::TranslateResult, - TranslateError, - }, - FilteredQuery, ReadQuery, RelatedRecordsQuery, + expression::{Binding, Expression, JoinExpression}, + translate::TranslateResult, + TranslateError, }; use itertools::Itertools; use query_builder::{QueryArgumentsExt, QueryBuilder}; +use query_core::{FilteredQuery, ReadQuery, RelatedRecordsQuery}; use query_structure::{ ConditionValue, Filter, PrismaValue, QueryArguments, QueryMode, ScalarCondition, ScalarFilter, ScalarProjection, }; diff --git a/query-engine/core/src/compiler/translate/query/write.rs b/query-compiler/query-compiler/src/translate/query/write.rs similarity index 93% rename from query-engine/core/src/compiler/translate/query/write.rs rename to query-compiler/query-compiler/src/translate/query/write.rs index 286fd11e9147..50772ca8d38c 100644 --- a/query-engine/core/src/compiler/translate/query/write.rs +++ b/query-compiler/query-compiler/src/translate/query/write.rs @@ -1,9 +1,7 @@ use query_builder::QueryBuilder; +use query_core::WriteQuery; -use crate::{ - compiler::{expression::Expression, translate::TranslateResult, TranslateError}, - WriteQuery, -}; +use crate::{expression::Expression, translate::TranslateResult, TranslateError}; pub(crate) fn translate_write_query(query: WriteQuery, builder: &dyn QueryBuilder) -> TranslateResult { Ok(match query { diff --git a/query-engine/core/Cargo.toml b/query-engine/core/Cargo.toml index a1d976e71416..71558efb3af1 100644 --- a/query-engine/core/Cargo.toml +++ b/query-engine/core/Cargo.toml @@ -25,11 +25,10 @@ sql-query-builder = { path = "../query-builders/sql-query-builder" } query-structure = { path = "../query-structure", features = [ "default_generators", ] } -pretty = { version = "0.12", features = ["termcolor"] } prisma-metrics = { path = "../../libs/metrics", optional = true } serde.workspace = true serde_json.workspace = true -thiserror = "1.0" +thiserror.workspace = true tokio = { version = "1", features = ["macros", "rt", "time"] } tracing = { workspace = true, features = ["attributes"] } tracing-futures.workspace = true @@ -44,6 +43,3 @@ telemetry = { path = "../../libs/telemetry" } lru = "0.7.7" enumflags2.workspace = true derive_more.workspace = true - -# HACK: this should not be in core either -quaint = { workspace = true, features = ["postgresql"] } diff --git a/query-engine/core/src/error.rs b/query-engine/core/src/error.rs index e779fc311b3a..b067a325a4a5 100644 --- a/query-engine/core/src/error.rs +++ b/query-engine/core/src/error.rs @@ -1,4 +1,4 @@ -use crate::{compiler::CompileError, InterpreterError, QueryGraphBuilderError, RelationViolation, TransactionError}; +use crate::{InterpreterError, QueryGraphBuilderError, RelationViolation, TransactionError}; use connector::error::ConnectorError; use query_structure::DomainError; use thiserror::Error; @@ -67,9 +67,6 @@ pub enum CoreError { #[error("Query timed out")] QueryTimeout, - - #[error("Error compiling a query: {0}")] - CompileError(#[from] CompileError), } impl CoreError { diff --git a/query-engine/core/src/lib.rs b/query-engine/core/src/lib.rs index 3280660dd458..2ecb9b59b303 100644 --- a/query-engine/core/src/lib.rs +++ b/query-engine/core/src/lib.rs @@ -3,7 +3,6 @@ #[macro_use] extern crate tracing; -pub mod compiler; pub mod constants; pub mod executor; pub mod protocol; @@ -16,7 +15,10 @@ pub use self::{ error::{CoreError, ExtendedUserFacingError, FieldConversionError}, executor::{QueryExecutor, TransactionOptions}, interactive_transactions::{TransactionError, TxId}, + query_ast::*, query_document::*, + query_graph::*, + query_graph_builder::*, }; pub use connector::{ @@ -36,9 +38,6 @@ use self::{ executor::*, interactive_transactions::*, interpreter::{Env, ExpressionResult, Expressionista, InterpreterError, QueryInterpreter}, - query_ast::*, - query_graph::*, - query_graph_builder::*, response_ir::{IrSerializer, ResponseData}, result_ast::*, }; diff --git a/query-engine/core/src/query_ast/mod.rs b/query-engine/core/src/query_ast/mod.rs index 4b67b0ba4548..067cf3ae1809 100644 --- a/query-engine/core/src/query_ast/mod.rs +++ b/query-engine/core/src/query_ast/mod.rs @@ -9,7 +9,7 @@ use query_structure::{FieldSelection, Filter, Model, SelectionResult}; #[derive(Debug, Clone)] #[allow(clippy::large_enum_variant)] -pub(crate) enum Query { +pub enum Query { Read(ReadQuery), Write(WriteQuery), } diff --git a/query-engine/core/src/query_ast/read.rs b/query-engine/core/src/query_ast/read.rs index 09a5af04dc79..68086a06ae95 100644 --- a/query-engine/core/src/query_ast/read.rs +++ b/query-engine/core/src/query_ast/read.rs @@ -177,7 +177,7 @@ pub struct RecordQuery { pub filter: Option, // TODO: split into `user_selection` and `full_selection` and get rid of `selection_order` pub selected_fields: FieldSelection, - pub(crate) nested: Vec, + pub nested: Vec, pub selection_order: Vec, pub options: QueryOptions, pub relation_load_strategy: RelationLoadStrategy, @@ -191,7 +191,7 @@ pub struct ManyRecordsQuery { pub args: QueryArguments, // TODO: split into `user_selection` and `full_selection` and get rid of `selection_order` pub selected_fields: FieldSelection, - pub(crate) nested: Vec, + pub nested: Vec, pub selection_order: Vec, pub options: QueryOptions, pub relation_load_strategy: RelationLoadStrategy, diff --git a/query-engine/core/src/query_ast/write.rs b/query-engine/core/src/query_ast/write.rs index b538e2675e32..ead82f51cd0e 100644 --- a/query-engine/core/src/query_ast/write.rs +++ b/query-engine/core/src/query_ast/write.rs @@ -6,7 +6,7 @@ use query_structure::{prelude::*, DatasourceFieldName, Filter, RecordFilter, Wri use std::collections::HashMap; #[derive(Debug, Clone)] -pub(crate) enum WriteQuery { +pub enum WriteQuery { CreateRecord(CreateRecord), CreateManyRecords(CreateManyRecords), UpdateRecord(UpdateRecord), diff --git a/query-engine/core/src/query_graph/mod.rs b/query-engine/core/src/query_graph/mod.rs index 8459584a0c42..31c6ead0d6f6 100644 --- a/query-engine/core/src/query_graph/mod.rs +++ b/query-engine/core/src/query_graph/mod.rs @@ -23,7 +23,7 @@ use std::{collections::HashSet, fmt}; pub type QueryGraphResult = std::result::Result; #[allow(clippy::large_enum_variant)] -pub(crate) enum Node { +pub enum Node { /// Nodes representing actual queries to the underlying connector. Query(Query), @@ -85,7 +85,7 @@ impl Flow { } // Current limitation: We need to narrow it down to ID diffs for Hash and EQ. -pub(crate) enum Computation { +pub enum Computation { Diff(DiffNode), } @@ -339,7 +339,7 @@ impl QueryGraph { } /// Returns a reference to the content of `node`, if the content is still present. - pub(crate) fn node_content(&self, node: &NodeRef) -> Option<&Node> { + pub fn node_content(&self, node: &NodeRef) -> Option<&Node> { self.graph.node_weight(node.node_ix).unwrap().borrow() } @@ -383,7 +383,7 @@ impl QueryGraph { /// Removes the node from the graph but leaves the graph intact by keeping the empty /// node in the graph by plucking the content of the node, but not the node itself. - pub(crate) fn pluck_node(&mut self, node: &NodeRef) -> Node { + pub fn pluck_node(&mut self, node: &NodeRef) -> Node { self.graph.node_weight_mut(node.node_ix).unwrap().unset() } diff --git a/query-engine/driver-adapters/executor/package.json b/query-engine/driver-adapters/executor/package.json index 185797bfea2b..4afc6fff04bf 100644 --- a/query-engine/driver-adapters/executor/package.json +++ b/query-engine/driver-adapters/executor/package.json @@ -11,6 +11,7 @@ "build": "tsup ./src/testd-qe.ts ./src/bench.ts --format esm --dts", "test:qe": "node --import tsx ./src/testd-qe.ts", "demo:se": "node --import tsx ./src/demo-se.ts", + "demo:qc": "node --import tsx ./src/demo-qc.ts", "clean:d1": "rm -rf ../../connector-test-kit-rs/query-engine-tests/.wrangler" }, "tsup": { @@ -20,7 +21,10 @@ "../../../query-engine-wasm/pkg/sqlite/query_engine_bg.js", "../../../schema-engine-wasm/pkg/postgresql/schema_engine_bg.js", "../../../schema-engine-wasm/pkg/mysql/schema_engine_bg.js", - "../../../schema-engine-wasm/pkg/sqlite/schema_engine_bg.js" + "../../../schema-engine-wasm/pkg/sqlite/schema_engine_bg.js", + "../../../query-compiler-wasm/pkg/postgresql/query_compiler_bg.js", + "../../../query-compiler-wasm/pkg/mysql/query_compiler_bg.js", + "../../../query-compiler-wasm/pkg/sqlite/query_compiler_bg.js" ] }, "keywords": [], diff --git a/query-engine/driver-adapters/executor/src/demo-qc.ts b/query-engine/driver-adapters/executor/src/demo-qc.ts new file mode 100644 index 000000000000..eeffe35a4c25 --- /dev/null +++ b/query-engine/driver-adapters/executor/src/demo-qc.ts @@ -0,0 +1,114 @@ +import * as S from "@effect/schema/Schema"; +import { bindAdapter, ConnectionInfo } from "@prisma/driver-adapter-utils"; + +import type { DriverAdaptersManager } from "./driver-adapters-manager"; +import { Env } from "./types"; +import * as qc from "./query-compiler"; +import { err } from "./utils"; +import { setupDriverAdaptersManager } from "./setup"; + +/** + * Example run: `DRIVER_ADAPTER="libsql" pnpm demo:qc` + */ +async function main(): Promise { + const env = S.decodeUnknownSync(Env)(process.env); + console.log("[env]", env); + + /** + * Static input for demo purposes. + */ + + const url = "file:./db.sqlite"; + + const schema = /* prisma */ ` + generator client { + provider = "prisma-client-js" + } + + datasource db { + provider = "sqlite" + url = "file:./db.sqlite" + } + + model User { + id Int @id @default(autoincrement()) + email String @unique + name String? + posts Post[] + } + + model Post { + id Int @id @default(autoincrement()) + title String + content String + author User @relation(fields: [authorId], references: [id]) + authorId Int + } + `; + + const driverAdapterManager = await setupDriverAdaptersManager(env); + + const { compiler: compiler, adapter } = await initQC({ + env, + driverAdapterManager, + url, + schema, + }); + + const query = compiler.compile( + JSON.stringify({ + modelName: "User", + action: "createOne", + query: { + arguments: { + data: { + email: "whatever@gmail.com", + }, + }, + selection: { + id: true, + }, + }, + }), + ); + console.log("[query]", query); +} + +type InitQueryCompilerParams = { + env: Env; + driverAdapterManager: DriverAdaptersManager; + url: string; + schema: string; +}; + +async function initQC({ + env, + driverAdapterManager, + url, + schema, +}: InitQueryCompilerParams) { + const adapter = await driverAdapterManager.connect({ url }); + const errorCapturingAdapter = bindAdapter(adapter); + + let connectionInfo: ConnectionInfo = {}; + if (errorCapturingAdapter.getConnectionInfo) { + const result = errorCapturingAdapter.getConnectionInfo(); + if (!result.ok) { + throw result.error; + } + connectionInfo = result.value; + } + + const compiler = await qc.initQueryCompiler({ + datamodel: schema, + flavour: adapter.provider, + connectionInfo, + }); + + return { + compiler: compiler, + adapter: errorCapturingAdapter, + }; +} + +main().catch(err); diff --git a/query-engine/driver-adapters/executor/src/query-compiler-wasm.ts b/query-engine/driver-adapters/executor/src/query-compiler-wasm.ts new file mode 100644 index 000000000000..2eb500b18ca9 --- /dev/null +++ b/query-engine/driver-adapters/executor/src/query-compiler-wasm.ts @@ -0,0 +1,45 @@ +import * as wasmPostgres from "../../../../query-compiler/query-compiler-wasm/pkg/postgresql/query_compiler_bg.js"; +import * as wasmMysql from "../../../../query-compiler/query-compiler-wasm/pkg/mysql/query_compiler_bg.js"; +import * as wasmSqlite from "../../../../query-compiler/query-compiler-wasm/pkg/sqlite/query_compiler_bg.js"; +import fs from "node:fs/promises"; +import path from "node:path"; +import { __dirname } from "./utils.js"; + +const wasm = { + postgres: wasmPostgres, + mysql: wasmMysql, + sqlite: wasmSqlite, +}; + +type EngineName = keyof typeof wasm; + +const initializedModules = new Set(); + +export async function getQueryCompilerForProvider(provider: EngineName) { + const engine = wasm[provider]; + if (!initializedModules.has(provider)) { + const subDir = provider === "postgres" ? "postgresql" : provider; + const bytes = await fs.readFile( + path.resolve( + __dirname, + "..", + "..", + "..", + "..", + "query-compiler", + "query-compiler-wasm", + "pkg", + subDir, + "query_compiler_bg.wasm", + ), + ); + const module = new WebAssembly.Module(bytes); + const instance = new WebAssembly.Instance(module, { + "./query_compiler_bg.js": engine, + }); + engine.__wbg_set_wasm(instance.exports); + initializedModules.add(provider); + } + + return engine.QueryCompiler; +} diff --git a/query-engine/driver-adapters/executor/src/query-compiler.ts b/query-engine/driver-adapters/executor/src/query-compiler.ts new file mode 100644 index 000000000000..37e3794541cc --- /dev/null +++ b/query-engine/driver-adapters/executor/src/query-compiler.ts @@ -0,0 +1,25 @@ +import { ConnectionInfo } from "@prisma/driver-adapter-utils"; +import { __dirname } from "./utils"; +import { AdapterFlavour } from "query-engine-wasm-baseline"; + +export type QueryCompilerParams = { + // TODO: support multiple datamodels + datamodel: string; + flavour: AdapterFlavour; + connectionInfo: ConnectionInfo; +}; + +export interface QueryCompiler { + new (params: QueryCompilerParams): QueryCompiler; + compile(query: string): Promise; +} + +export async function initQueryCompiler( + params: QueryCompilerParams, +): Promise { + const { getQueryCompilerForProvider } = await import("./query-compiler-wasm"); + const WasmQueryCompiler = (await getQueryCompilerForProvider( + params.flavour, + )) as QueryCompiler; + return new WasmQueryCompiler(params); +} diff --git a/query-engine/driver-adapters/src/lib.rs b/query-engine/driver-adapters/src/lib.rs index 137df06d7315..69193b46710b 100644 --- a/query-engine/driver-adapters/src/lib.rs +++ b/query-engine/driver-adapters/src/lib.rs @@ -47,6 +47,7 @@ impl From for QuaintError { pub use queryable::from_js; pub(crate) use transaction::JsTransaction; +pub use types::{AdapterFlavour, JsConnectionInfo}; #[cfg(target_arch = "wasm32")] pub use wasm::JsObjectExtern as JsObject; diff --git a/query-engine/driver-adapters/src/types.rs b/query-engine/driver-adapters/src/types.rs index 03f9c5d6325a..01053ee7875e 100644 --- a/query-engine/driver-adapters/src/types.rs +++ b/query-engine/driver-adapters/src/types.rs @@ -18,10 +18,13 @@ use serde_repr::{Deserialize_repr, Serialize_repr}; #[derive(Debug, Eq, PartialEq, Clone)] pub enum AdapterFlavour { #[cfg(feature = "mysql")] + #[cfg_attr(target_arch = "wasm32", serde(rename = "mysql"))] Mysql, #[cfg(feature = "postgresql")] + #[cfg_attr(target_arch = "wasm32", serde(rename = "postgres"))] Postgres, #[cfg(feature = "sqlite")] + #[cfg_attr(target_arch = "wasm32", serde(rename = "sqlite"))] Sqlite, } @@ -71,7 +74,7 @@ impl From<&AdapterFlavour> for SqlFamily { #[cfg_attr(target_arch = "wasm32", derive(Deserialize))] #[cfg_attr(target_arch = "wasm32", serde(rename_all = "camelCase"))] #[derive(Default)] -pub(crate) struct JsConnectionInfo { +pub struct JsConnectionInfo { pub schema_name: Option, pub max_bind_values: Option, } diff --git a/query-engine/query-engine-node-api/Cargo.toml b/query-engine/query-engine-node-api/Cargo.toml index a12e7a28675e..c042e1f30429 100644 --- a/query-engine/query-engine-node-api/Cargo.toml +++ b/query-engine/query-engine-node-api/Cargo.toml @@ -23,6 +23,7 @@ query-core = { path = "../core", features = ["metrics"] } request-handlers = { path = "../request-handlers", features = ["all"] } query-connector = { path = "../connectors/query-connector" } query-engine-common = { path = "../../libs/query-engine-common" } +query-compiler = { path = "../../query-compiler/query-compiler" } user-facing-errors = { path = "../../libs/user-facing-errors" } telemetry = { path = "../../libs/telemetry" } psl = { workspace = true, features = ["all"] } diff --git a/query-engine/query-engine-node-api/src/engine.rs b/query-engine/query-engine-node-api/src/engine.rs index d646cafff403..0e8169253e50 100644 --- a/query-engine/query-engine-node-api/src/engine.rs +++ b/query-engine/query-engine-node-api/src/engine.rs @@ -4,7 +4,7 @@ use napi::{threadsafe_function::ThreadSafeCallContext, Env, JsFunction, JsObject use napi_derive::napi; use prisma_metrics::{MetricFormat, WithMetricsInstrumentation}; use psl::PreviewFeature; -use quaint::connector::{ConnectionInfo, ExternalConnector}; +use quaint::{connector::ExternalConnector, prelude::ConnectionInfo}; use query_core::{protocol::EngineProtocol, relation_load_strategy, schema, TransactionOptions, TxId}; use query_engine_common::{ engine::{ @@ -382,8 +382,8 @@ impl QueryEngine { } }; - let plan = query_core::compiler::compile(engine.query_schema(), query_doc, &connection_info) - .map_err(ApiError::from)?; + let plan = query_compiler::compile(engine.query_schema(), query_doc, &connection_info) + .map_err(|err| napi::Error::from_reason(err.to_string()))?; let response = if human_readable { plan.to_string() diff --git a/query-engine/query-engine-wasm/src/wasm/engine.rs b/query-engine/query-engine-wasm/src/wasm/engine.rs index 48b3c100e361..5adc474e5cfb 100644 --- a/query-engine/query-engine-wasm/src/wasm/engine.rs +++ b/query-engine/query-engine-wasm/src/wasm/engine.rs @@ -8,7 +8,7 @@ use crate::{ use driver_adapters::JsObject; use js_sys::Function as JsFunction; use psl::ConnectorRegistry; -use quaint::connector::{ConnectionInfo, ExternalConnector}; +use quaint::connector::ExternalConnector; use query_core::{ protocol::EngineProtocol, relation_load_strategy, @@ -359,29 +359,4 @@ impl QueryEngine { .with_subscriber(dispatcher) .await } - - #[wasm_bindgen] - pub async fn compile( - &self, - request: String, - _human_readable: bool, // ignored on wasm to not compile it in - ) -> Result { - let dispatcher = self.logger.dispatcher(); - - async { - let inner = self.inner.read().await; - let engine = inner.as_engine()?; - - let request = RequestBody::try_from_str(&request, engine.engine_protocol())?; - let query_doc = request.into_doc(engine.query_schema())?; - - let connection_info = ConnectionInfo::External(self.adapter.get_connection_info().await?); - - let plan = query_core::compiler::compile(engine.query_schema(), query_doc, &connection_info) - .map_err(ApiError::from)?; - Ok(serde_json::to_string(&plan)?) - } - .with_subscriber(dispatcher) - .await - } } diff --git a/query-engine/query-engine/examples/compiler.rs b/query-engine/query-engine/examples/compiler.rs deleted file mode 100644 index e72a1acdf987..000000000000 --- a/query-engine/query-engine/examples/compiler.rs +++ /dev/null @@ -1,88 +0,0 @@ -use std::sync::Arc; - -use quaint::{ - prelude::{ConnectionInfo, ExternalConnectionInfo, SqlFamily}, - visitor::Postgres, -}; -use query_core::{query_graph_builder::QueryGraphBuilder, QueryDocument}; -use request_handlers::{JsonBody, JsonSingleQuery, RequestBody}; -use serde_json::json; -use sql_query_builder::{Context, SqlQueryBuilder}; - -pub fn main() -> anyhow::Result<()> { - let schema_string = include_str!("./schema.prisma"); - let schema = psl::validate(schema_string.into()); - - if schema.diagnostics.has_errors() { - anyhow::bail!("invalid schema"); - } - - let schema = Arc::new(schema); - let query_schema = Arc::new(query_core::schema::build(schema, true)); - - let connection_info = ConnectionInfo::External(ExternalConnectionInfo::new( - SqlFamily::Postgres, - "public".to_owned(), - None, - )); - - // prisma.user.findUnique({ - // where: { - // email: Prisma.Param("userEmail") - // }, - // select: { - // val: true, - // posts: true, - // profile: true, - // } - // }) - let query: JsonSingleQuery = serde_json::from_value(json!({ - "modelName": "User", - "action": "findMany", - "query": { - "arguments": { - "where": { - "email": { - "$type": "Param", - "value": "userEmail" - } - } - }, - "selection": { - "val": true, - "posts": { - "arguments": {}, - "selection": { - "$scalars": true - } - }, - "profile": { - "arguments": {}, - "selection": { - "$scalars": true - } - } - } - } - }))?; - - let request = RequestBody::Json(JsonBody::Single(query)); - let doc = request.into_doc(&query_schema)?; - - let QueryDocument::Single(query) = doc else { - anyhow::bail!("expected single query"); - }; - - let (graph, _serializer) = QueryGraphBuilder::new(&query_schema).build(query)?; - - println!("{graph}"); - - let ctx = Context::new(&connection_info, None); - let builder = SqlQueryBuilder::>::new(ctx); - - let expr = query_core::compiler::translate(graph, &builder)?; - - println!("{}", expr.pretty_print(true, 80)?); - - Ok(()) -} diff --git a/query-engine/query-engine/examples/schema.prisma b/query-engine/query-engine/examples/schema.prisma deleted file mode 100644 index ff51ae3ddfe0..000000000000 --- a/query-engine/query-engine/examples/schema.prisma +++ /dev/null @@ -1,33 +0,0 @@ -generator client { - provider = "prisma-client-js" -} - -datasource db { - provider = "postgresql" - url = "postgresql://postgres:prisma@localhost:5438" -} - -model User { - id String @id @default(cuid()) - email String @unique - name String? - posts Post[] - val Int? - profile Profile? -} - -model Profile { - userId String @id - user User @relation(fields: [userId], references: [id]) -} - -model Post { - id String @id @default(cuid()) - createdAt DateTime @default(now()) - updatedAt DateTime @updatedAt - published Boolean - title String - content String? - authorId String? - author User? @relation(fields: [authorId], references: [id]) -} diff --git a/query-engine/request-handlers/Cargo.toml b/query-engine/request-handlers/Cargo.toml index 8d7e8b4e2222..228343a31715 100644 --- a/query-engine/request-handlers/Cargo.toml +++ b/query-engine/request-handlers/Cargo.toml @@ -61,7 +61,7 @@ mssql-native = [ "sql-query-connector/mssql-native", "user-facing-errors/mssql-native", ] -driver-adapters = ["sql-query-connector/driver-adapters"] +driver-adapters = ["sql", "sql-query-connector/driver-adapters"] all = [ "mongodb", "mysql-native", diff --git a/query-engine/request-handlers/src/load_executor.rs b/query-engine/request-handlers/src/load_executor.rs index 28c82e58ab82..c874da015e20 100644 --- a/query-engine/request-handlers/src/load_executor.rs +++ b/query-engine/request-handlers/src/load_executor.rs @@ -3,6 +3,7 @@ use psl::{builtin_connectors::*, Datasource, PreviewFeatures}; use quaint::connector::ExternalConnector; use query_core::{executor::InterpretingExecutor, Connector, QueryExecutor}; +#[cfg(feature = "sql")] use sql_query_connector::*; use std::collections::HashMap; use std::env; @@ -22,7 +23,7 @@ pub enum ConnectorKind<'a> { /// Loads a query executor based on the parsed Prisma schema (datasource). pub async fn load( connector_kind: ConnectorKind<'_>, - features: PreviewFeatures, + #[allow(unused_variables)] features: PreviewFeatures, #[allow(unused_variables)] tracing_enabled: bool, ) -> query_core::Result> { match connector_kind { @@ -159,6 +160,7 @@ mod native { } } +#[cfg(any(feature = "sql", feature = "mongodb"))] fn executor_for(connector: T, force_transactions: bool) -> Box where T: Connector + Send + Sync + 'static, From db0460451f6ddb16c9b9411119289c7804d2d7ca Mon Sep 17 00:00:00 2001 From: Alexey Orlenko Date: Mon, 20 Jan 2025 10:28:26 +0100 Subject: [PATCH 55/65] Fix building WASM under Nix --- shell.nix | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/shell.nix b/shell.nix index bf6c03cc3716..ac1931e79a3b 100644 --- a/shell.nix +++ b/shell.nix @@ -2,6 +2,14 @@ pkgs ? import { }, }: +let + wasm-bindgen-cli = pkgs.wasm-bindgen-cli.override { + version = "0.2.93"; + hash = "sha256-DDdu5mM3gneraM85pAepBXWn3TMofarVR4NbjMdz3r0="; + cargoHash = "sha256-birrg+XABBHHKJxfTKAMSlmTVYLmnmqMDfRnmG6g/YQ="; + }; + +in pkgs.mkShell { packages = with pkgs; [ binaryen From 5fc1879774d5d62f23296294093b9474d1eb8876 Mon Sep 17 00:00:00 2001 From: Alexey Orlenko Date: Fri, 17 Jan 2025 19:48:21 +0100 Subject: [PATCH 56/65] Fix type error --- libs/driver-adapters/executor/src/query-compiler.ts | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/libs/driver-adapters/executor/src/query-compiler.ts b/libs/driver-adapters/executor/src/query-compiler.ts index 37e3794541cc..ee752b9cbcfa 100644 --- a/libs/driver-adapters/executor/src/query-compiler.ts +++ b/libs/driver-adapters/executor/src/query-compiler.ts @@ -1,11 +1,10 @@ import { ConnectionInfo } from "@prisma/driver-adapter-utils"; import { __dirname } from "./utils"; -import { AdapterFlavour } from "query-engine-wasm-baseline"; export type QueryCompilerParams = { // TODO: support multiple datamodels datamodel: string; - flavour: AdapterFlavour; + flavour: 'postgres' | 'mysql' | 'sqlite'; connectionInfo: ConnectionInfo; }; From c71ba311710310b78eb247877988aa798f90c1f4 Mon Sep 17 00:00:00 2001 From: Alexey Orlenko Date: Mon, 20 Jan 2025 10:40:55 +0100 Subject: [PATCH 57/65] Uncomment react-native --- .github/workflows/build-engines.yml | 30 +++++++++++++------------- .github/workflows/test-compilation.yml | 12 +++++------ 2 files changed, 21 insertions(+), 21 deletions(-) diff --git a/.github/workflows/build-engines.yml b/.github/workflows/build-engines.yml index ac7a72024b52..ecf1bfcf7600 100644 --- a/.github/workflows/build-engines.yml +++ b/.github/workflows/build-engines.yml @@ -165,14 +165,14 @@ jobs: with: commit: ${{ github.sha }} - # build-react-native: - # name: Build Engines for React native - # needs: - # - is-release-necessary - # if: ${{ needs.is-release-necessary.outputs.release == 'true' }} - # uses: ./.github/workflows/build-engines-react-native-template.yml - # with: - # commit: ${{ github.sha }} + build-react-native: + name: Build Engines for React native + needs: + - is-release-necessary + if: ${{ needs.is-release-necessary.outputs.release == 'true' }} + uses: ./.github/workflows/build-engines-react-native-template.yml + with: + commit: ${{ github.sha }} build-windows: name: Build Engines for Windows @@ -192,7 +192,7 @@ jobs: - build-linux - build-macos-intel - build-macos-silicon - # - build-react-native + - build-react-native - build-windows env: BUCKET_NAME: "prisma-builds" @@ -247,12 +247,12 @@ jobs: cp -r rhel-openssl-1.1.x debian-openssl-1.1.x cp -r rhel-openssl-3.0.x debian-openssl-3.0.x - # - name: Create .zip for react-native - # working-directory: engines-artifacts - # run: | - # mkdir react-native - # zip -r react-native/binaries.zip ios android - # rm -rf ios android + - name: Create .zip for react-native + working-directory: engines-artifacts + run: | + mkdir react-native + zip -r react-native/binaries.zip ios android + rm -rf ios android - name: "Create compressed engine files (.gz)" working-directory: engines-artifacts diff --git a/.github/workflows/test-compilation.yml b/.github/workflows/test-compilation.yml index 193fdc26da96..3db71c67b5e7 100644 --- a/.github/workflows/test-compilation.yml +++ b/.github/workflows/test-compilation.yml @@ -50,9 +50,9 @@ jobs: - name: "Check that Cargo.lock did not change" run: "git diff --exit-code" - # test-react-native-compilation: - # name: React Native - # uses: ./.github/workflows/build-engines-react-native-template.yml - # with: - # commit: ${{ github.sha }} - # uploadArtifacts: false + test-react-native-compilation: + name: React Native + uses: ./.github/workflows/build-engines-react-native-template.yml + with: + commit: ${{ github.sha }} + uploadArtifacts: false From 4bee1bd05400167bba6b65902d0ebf29e98ee07b Mon Sep 17 00:00:00 2001 From: Alexey Orlenko Date: Mon, 20 Jan 2025 10:44:54 +0100 Subject: [PATCH 58/65] Add workspace lints to query-engine-wasm --- query-compiler/query-compiler-wasm/Cargo.toml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/query-compiler/query-compiler-wasm/Cargo.toml b/query-compiler/query-compiler-wasm/Cargo.toml index fef9446c23f5..4cc9c49de181 100644 --- a/query-compiler/query-compiler-wasm/Cargo.toml +++ b/query-compiler/query-compiler-wasm/Cargo.toml @@ -8,6 +8,9 @@ doc = false crate-type = ["cdylib"] name = "query_compiler_wasm" +[lints] +workspace = true + [dependencies] psl.workspace = true quaint.workspace = true From 384c3d1d6c65e25d0e57780ec0b3245a5f1a9114 Mon Sep 17 00:00:00 2001 From: Alexey Orlenko Date: Mon, 20 Jan 2025 10:52:20 +0100 Subject: [PATCH 59/65] Add workspace lints to schema-engine-wasm --- schema-engine/schema-engine-wasm/Cargo.toml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/schema-engine/schema-engine-wasm/Cargo.toml b/schema-engine/schema-engine-wasm/Cargo.toml index 759a46b46172..315e50be2978 100644 --- a/schema-engine/schema-engine-wasm/Cargo.toml +++ b/schema-engine/schema-engine-wasm/Cargo.toml @@ -8,6 +8,9 @@ doc = false crate-type = ["cdylib"] name = "schema_engine_wasm" +[lints] +workspace = true + [features] sqlite = ["driver-adapters/sqlite", "psl/sqlite"] postgresql = ["driver-adapters/postgresql", "psl/postgresql"] From 8035f1bc92e6bae9445c7465ea33dd0498fc790b Mon Sep 17 00:00:00 2001 From: Alexey Orlenko Date: Mon, 20 Jan 2025 10:58:29 +0100 Subject: [PATCH 60/65] Remove obsolete query schema params handling --- query-engine/core/src/query_document/parser.rs | 3 ++- query-engine/schema/src/input_types.rs | 11 +---------- 2 files changed, 3 insertions(+), 11 deletions(-) diff --git a/query-engine/core/src/query_document/parser.rs b/query-engine/core/src/query_document/parser.rs index 85ee16e27df7..e7b7c560b42e 100644 --- a/query-engine/core/src/query_document/parser.rs +++ b/query-engine/core/src/query_document/parser.rs @@ -232,7 +232,8 @@ impl QueryDocumentParser { possible_input_types: &[InputType<'a>], query_schema: &'a QuerySchema, ) -> QueryParserResult> { - // TODO: we disabled generating Param explicitly in the query schema for now + // TODO: make query parsing aware of whether we are using the query compiler, + // and disallow placeholders in the query document if we are not. if let ArgumentValue::Scalar(pv @ PrismaValue::Placeholder { .. }) = &value { return Ok(ParsedInputValue::Single(pv.clone())); } diff --git a/query-engine/schema/src/input_types.rs b/query-engine/schema/src/input_types.rs index 3b47b2a37789..4ce09bd97bd2 100644 --- a/query-engine/schema/src/input_types.rs +++ b/query-engine/schema/src/input_types.rs @@ -122,15 +122,10 @@ pub struct InputField<'a> { impl<'a> InputField<'a> { pub(crate) fn new( name: Cow<'a, str>, - mut field_types: Vec>, + field_types: Vec>, default_value: Option, is_required: bool, ) -> InputField<'a> { - // todo - #[allow(clippy::overly_complex_bool_expr)] - if false && field_types.iter().any(|t| t.is_scalar()) { - field_types.push(InputType::Scalar(ScalarType::Param)); - } InputField { name, default_value, @@ -284,10 +279,6 @@ impl<'a> InputType<'a> { InputType::Enum(containing) } - pub fn is_scalar(&self) -> bool { - matches!(self, Self::Scalar(_)) - } - pub fn is_json(&self) -> bool { matches!( self, From 851c3a07f8fae0c8c8baff80f76f168b93f591ae Mon Sep 17 00:00:00 2001 From: Alexey Orlenko Date: Mon, 20 Jan 2025 11:03:48 +0100 Subject: [PATCH 61/65] Don't build QC executor for now --- libs/driver-adapters/executor/package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/libs/driver-adapters/executor/package.json b/libs/driver-adapters/executor/package.json index 390beb027807..334f5dbc1c88 100644 --- a/libs/driver-adapters/executor/package.json +++ b/libs/driver-adapters/executor/package.json @@ -9,7 +9,7 @@ "description": "", "private": true, "scripts": { - "build": "tsup ./src/testd-qe.ts ./src/demo-se.ts ./src/demo-qc.ts ./src/bench.ts --format esm --dts", + "build": "tsup ./src/testd-qe.ts ./src/demo-se.ts ./src/bench.ts --format esm --dts", "test:qe": "node --import tsx ./src/testd-qe.ts", "demo:se": "node --import tsx ./src/demo-se.ts", "demo:qc": "node --import tsx ./src/demo-qc.ts", From 405c44acd9f2976fdeba9d05ce83c3268f4886fe Mon Sep 17 00:00:00 2001 From: Alexey Orlenko Date: Mon, 20 Jan 2025 11:27:21 +0100 Subject: [PATCH 62/65] Add playground back --- Cargo.lock | 14 +++ Cargo.toml | 1 + .../query-compiler-playground/Cargo.toml | 15 ++++ .../examples/compiler.rs | 88 +++++++++++++++++++ .../examples/schema.prisma | 33 +++++++ .../query-compiler-playground/src/lib.rs | 1 + 6 files changed, 152 insertions(+) create mode 100644 query-compiler/query-compiler-playground/Cargo.toml create mode 100644 query-compiler/query-compiler-playground/examples/compiler.rs create mode 100644 query-compiler/query-compiler-playground/examples/schema.prisma create mode 100644 query-compiler/query-compiler-playground/src/lib.rs diff --git a/Cargo.lock b/Cargo.lock index 25d219243a7a..6566d4a0c56e 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -3727,6 +3727,20 @@ dependencies = [ "thiserror 1.0.44", ] +[[package]] +name = "query-compiler-playground" +version = "0.1.0" +dependencies = [ + "anyhow", + "psl", + "quaint", + "query-compiler", + "query-core", + "request-handlers", + "serde_json", + "sql-query-builder", +] + [[package]] name = "query-compiler-wasm" version = "0.1.0" diff --git a/Cargo.toml b/Cargo.toml index a60238045ec9..1a1942571277 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -29,6 +29,7 @@ members = [ "query-engine/schema", "query-engine/query-builders/*", "query-compiler/query-compiler", + "query-compiler/query-compiler-playground", "query-compiler/query-compiler-wasm", "libs/*", "prisma-fmt", diff --git a/query-compiler/query-compiler-playground/Cargo.toml b/query-compiler/query-compiler-playground/Cargo.toml new file mode 100644 index 000000000000..76cd7938869f --- /dev/null +++ b/query-compiler/query-compiler-playground/Cargo.toml @@ -0,0 +1,15 @@ +[package] +name = "query-compiler-playground" +version = "0.1.0" +edition = "2021" + +[dependencies] +anyhow = "1" +serde_json.workspace = true + +psl.workspace = true +query-compiler.path = "../query-compiler" +request-handlers.path = "../../query-engine/request-handlers" +query-core.path = "../../query-engine/core" +quaint.workspace = true +sql-query-builder.path = "../../query-engine/query-builders/sql-query-builder" diff --git a/query-compiler/query-compiler-playground/examples/compiler.rs b/query-compiler/query-compiler-playground/examples/compiler.rs new file mode 100644 index 000000000000..b346016c0111 --- /dev/null +++ b/query-compiler/query-compiler-playground/examples/compiler.rs @@ -0,0 +1,88 @@ +use std::sync::Arc; + +use quaint::{ + prelude::{ConnectionInfo, ExternalConnectionInfo, SqlFamily}, + visitor::Postgres, +}; +use query_core::{query_graph_builder::QueryGraphBuilder, QueryDocument}; +use request_handlers::{JsonBody, JsonSingleQuery, RequestBody}; +use serde_json::json; +use sql_query_builder::{Context, SqlQueryBuilder}; + +pub fn main() -> anyhow::Result<()> { + let schema_string = include_str!("./schema.prisma"); + let schema = psl::validate(schema_string.into()); + + if schema.diagnostics.has_errors() { + anyhow::bail!("invalid schema"); + } + + let schema = Arc::new(schema); + let query_schema = Arc::new(query_core::schema::build(schema, true)); + + let connection_info = ConnectionInfo::External(ExternalConnectionInfo::new( + SqlFamily::Postgres, + "public".to_owned(), + None, + )); + + // prisma.user.findUnique({ + // where: { + // email: Prisma.Param("userEmail") + // }, + // select: { + // val: true, + // posts: true, + // profile: true, + // } + // }) + let query: JsonSingleQuery = serde_json::from_value(json!({ + "modelName": "User", + "action": "findMany", + "query": { + "arguments": { + "where": { + "email": { + "$type": "Param", + "value": "userEmail" + } + } + }, + "selection": { + "val": true, + "posts": { + "arguments": {}, + "selection": { + "$scalars": true + } + }, + "profile": { + "arguments": {}, + "selection": { + "$scalars": true + } + } + } + } + }))?; + + let request = RequestBody::Json(JsonBody::Single(query)); + let doc = request.into_doc(&query_schema)?; + + let QueryDocument::Single(query) = doc else { + anyhow::bail!("expected single query"); + }; + + let (graph, _serializer) = QueryGraphBuilder::new(&query_schema).build(query)?; + + println!("{graph}"); + + let ctx = Context::new(&connection_info, None); + let builder = SqlQueryBuilder::>::new(ctx); + + let expr = query_compiler::translate(graph, &builder)?; + + println!("{}", expr.pretty_print(true, 80)?); + + Ok(()) +} diff --git a/query-compiler/query-compiler-playground/examples/schema.prisma b/query-compiler/query-compiler-playground/examples/schema.prisma new file mode 100644 index 000000000000..ff51ae3ddfe0 --- /dev/null +++ b/query-compiler/query-compiler-playground/examples/schema.prisma @@ -0,0 +1,33 @@ +generator client { + provider = "prisma-client-js" +} + +datasource db { + provider = "postgresql" + url = "postgresql://postgres:prisma@localhost:5438" +} + +model User { + id String @id @default(cuid()) + email String @unique + name String? + posts Post[] + val Int? + profile Profile? +} + +model Profile { + userId String @id + user User @relation(fields: [userId], references: [id]) +} + +model Post { + id String @id @default(cuid()) + createdAt DateTime @default(now()) + updatedAt DateTime @updatedAt + published Boolean + title String + content String? + authorId String? + author User? @relation(fields: [authorId], references: [id]) +} diff --git a/query-compiler/query-compiler-playground/src/lib.rs b/query-compiler/query-compiler-playground/src/lib.rs new file mode 100644 index 000000000000..8b137891791f --- /dev/null +++ b/query-compiler/query-compiler-playground/src/lib.rs @@ -0,0 +1 @@ + From e5f4ae78ac5373d4cc62662dfad40df730bb67a5 Mon Sep 17 00:00:00 2001 From: Florian Goessler Date: Mon, 20 Jan 2025 16:36:01 +0100 Subject: [PATCH 63/65] cleanup(compiler): revert unneeded changes --- libs/driver-adapters/src/lib.rs | 2 +- libs/driver-adapters/src/types.rs | 5 +---- 2 files changed, 2 insertions(+), 5 deletions(-) diff --git a/libs/driver-adapters/src/lib.rs b/libs/driver-adapters/src/lib.rs index 69193b46710b..18e9ac96ef0f 100644 --- a/libs/driver-adapters/src/lib.rs +++ b/libs/driver-adapters/src/lib.rs @@ -47,7 +47,7 @@ impl From for QuaintError { pub use queryable::from_js; pub(crate) use transaction::JsTransaction; -pub use types::{AdapterFlavour, JsConnectionInfo}; +pub use types::AdapterFlavour; #[cfg(target_arch = "wasm32")] pub use wasm::JsObjectExtern as JsObject; diff --git a/libs/driver-adapters/src/types.rs b/libs/driver-adapters/src/types.rs index 01053ee7875e..03f9c5d6325a 100644 --- a/libs/driver-adapters/src/types.rs +++ b/libs/driver-adapters/src/types.rs @@ -18,13 +18,10 @@ use serde_repr::{Deserialize_repr, Serialize_repr}; #[derive(Debug, Eq, PartialEq, Clone)] pub enum AdapterFlavour { #[cfg(feature = "mysql")] - #[cfg_attr(target_arch = "wasm32", serde(rename = "mysql"))] Mysql, #[cfg(feature = "postgresql")] - #[cfg_attr(target_arch = "wasm32", serde(rename = "postgres"))] Postgres, #[cfg(feature = "sqlite")] - #[cfg_attr(target_arch = "wasm32", serde(rename = "sqlite"))] Sqlite, } @@ -74,7 +71,7 @@ impl From<&AdapterFlavour> for SqlFamily { #[cfg_attr(target_arch = "wasm32", derive(Deserialize))] #[cfg_attr(target_arch = "wasm32", serde(rename_all = "camelCase"))] #[derive(Default)] -pub struct JsConnectionInfo { +pub(crate) struct JsConnectionInfo { pub schema_name: Option, pub max_bind_values: Option, } From d6f8a092c5908261ad09fb500f108249b7baeeb8 Mon Sep 17 00:00:00 2001 From: Florian Goessler Date: Mon, 20 Jan 2025 16:47:49 +0100 Subject: [PATCH 64/65] cleanup(compiler): make query compiler playground a binary crate --- .../query-compiler-playground/src/lib.rs | 1 - .../query-compiler-playground/src/main.rs | 88 +++++++++++++++++++ .../src/schema.prisma | 33 +++++++ 3 files changed, 121 insertions(+), 1 deletion(-) delete mode 100644 query-compiler/query-compiler-playground/src/lib.rs create mode 100644 query-compiler/query-compiler-playground/src/main.rs create mode 100644 query-compiler/query-compiler-playground/src/schema.prisma diff --git a/query-compiler/query-compiler-playground/src/lib.rs b/query-compiler/query-compiler-playground/src/lib.rs deleted file mode 100644 index 8b137891791f..000000000000 --- a/query-compiler/query-compiler-playground/src/lib.rs +++ /dev/null @@ -1 +0,0 @@ - diff --git a/query-compiler/query-compiler-playground/src/main.rs b/query-compiler/query-compiler-playground/src/main.rs new file mode 100644 index 000000000000..b346016c0111 --- /dev/null +++ b/query-compiler/query-compiler-playground/src/main.rs @@ -0,0 +1,88 @@ +use std::sync::Arc; + +use quaint::{ + prelude::{ConnectionInfo, ExternalConnectionInfo, SqlFamily}, + visitor::Postgres, +}; +use query_core::{query_graph_builder::QueryGraphBuilder, QueryDocument}; +use request_handlers::{JsonBody, JsonSingleQuery, RequestBody}; +use serde_json::json; +use sql_query_builder::{Context, SqlQueryBuilder}; + +pub fn main() -> anyhow::Result<()> { + let schema_string = include_str!("./schema.prisma"); + let schema = psl::validate(schema_string.into()); + + if schema.diagnostics.has_errors() { + anyhow::bail!("invalid schema"); + } + + let schema = Arc::new(schema); + let query_schema = Arc::new(query_core::schema::build(schema, true)); + + let connection_info = ConnectionInfo::External(ExternalConnectionInfo::new( + SqlFamily::Postgres, + "public".to_owned(), + None, + )); + + // prisma.user.findUnique({ + // where: { + // email: Prisma.Param("userEmail") + // }, + // select: { + // val: true, + // posts: true, + // profile: true, + // } + // }) + let query: JsonSingleQuery = serde_json::from_value(json!({ + "modelName": "User", + "action": "findMany", + "query": { + "arguments": { + "where": { + "email": { + "$type": "Param", + "value": "userEmail" + } + } + }, + "selection": { + "val": true, + "posts": { + "arguments": {}, + "selection": { + "$scalars": true + } + }, + "profile": { + "arguments": {}, + "selection": { + "$scalars": true + } + } + } + } + }))?; + + let request = RequestBody::Json(JsonBody::Single(query)); + let doc = request.into_doc(&query_schema)?; + + let QueryDocument::Single(query) = doc else { + anyhow::bail!("expected single query"); + }; + + let (graph, _serializer) = QueryGraphBuilder::new(&query_schema).build(query)?; + + println!("{graph}"); + + let ctx = Context::new(&connection_info, None); + let builder = SqlQueryBuilder::>::new(ctx); + + let expr = query_compiler::translate(graph, &builder)?; + + println!("{}", expr.pretty_print(true, 80)?); + + Ok(()) +} diff --git a/query-compiler/query-compiler-playground/src/schema.prisma b/query-compiler/query-compiler-playground/src/schema.prisma new file mode 100644 index 000000000000..ff51ae3ddfe0 --- /dev/null +++ b/query-compiler/query-compiler-playground/src/schema.prisma @@ -0,0 +1,33 @@ +generator client { + provider = "prisma-client-js" +} + +datasource db { + provider = "postgresql" + url = "postgresql://postgres:prisma@localhost:5438" +} + +model User { + id String @id @default(cuid()) + email String @unique + name String? + posts Post[] + val Int? + profile Profile? +} + +model Profile { + userId String @id + user User @relation(fields: [userId], references: [id]) +} + +model Post { + id String @id @default(cuid()) + createdAt DateTime @default(now()) + updatedAt DateTime @updatedAt + published Boolean + title String + content String? + authorId String? + author User? @relation(fields: [authorId], references: [id]) +} From 7d1763d5451b092581df03167b02531ca69fd051 Mon Sep 17 00:00:00 2001 From: Florian Goessler Date: Mon, 20 Jan 2025 16:57:51 +0100 Subject: [PATCH 65/65] cleanup(compiler): drop example files --- .../examples/compiler.rs | 88 ------------------- .../examples/schema.prisma | 33 ------- 2 files changed, 121 deletions(-) delete mode 100644 query-compiler/query-compiler-playground/examples/compiler.rs delete mode 100644 query-compiler/query-compiler-playground/examples/schema.prisma diff --git a/query-compiler/query-compiler-playground/examples/compiler.rs b/query-compiler/query-compiler-playground/examples/compiler.rs deleted file mode 100644 index b346016c0111..000000000000 --- a/query-compiler/query-compiler-playground/examples/compiler.rs +++ /dev/null @@ -1,88 +0,0 @@ -use std::sync::Arc; - -use quaint::{ - prelude::{ConnectionInfo, ExternalConnectionInfo, SqlFamily}, - visitor::Postgres, -}; -use query_core::{query_graph_builder::QueryGraphBuilder, QueryDocument}; -use request_handlers::{JsonBody, JsonSingleQuery, RequestBody}; -use serde_json::json; -use sql_query_builder::{Context, SqlQueryBuilder}; - -pub fn main() -> anyhow::Result<()> { - let schema_string = include_str!("./schema.prisma"); - let schema = psl::validate(schema_string.into()); - - if schema.diagnostics.has_errors() { - anyhow::bail!("invalid schema"); - } - - let schema = Arc::new(schema); - let query_schema = Arc::new(query_core::schema::build(schema, true)); - - let connection_info = ConnectionInfo::External(ExternalConnectionInfo::new( - SqlFamily::Postgres, - "public".to_owned(), - None, - )); - - // prisma.user.findUnique({ - // where: { - // email: Prisma.Param("userEmail") - // }, - // select: { - // val: true, - // posts: true, - // profile: true, - // } - // }) - let query: JsonSingleQuery = serde_json::from_value(json!({ - "modelName": "User", - "action": "findMany", - "query": { - "arguments": { - "where": { - "email": { - "$type": "Param", - "value": "userEmail" - } - } - }, - "selection": { - "val": true, - "posts": { - "arguments": {}, - "selection": { - "$scalars": true - } - }, - "profile": { - "arguments": {}, - "selection": { - "$scalars": true - } - } - } - } - }))?; - - let request = RequestBody::Json(JsonBody::Single(query)); - let doc = request.into_doc(&query_schema)?; - - let QueryDocument::Single(query) = doc else { - anyhow::bail!("expected single query"); - }; - - let (graph, _serializer) = QueryGraphBuilder::new(&query_schema).build(query)?; - - println!("{graph}"); - - let ctx = Context::new(&connection_info, None); - let builder = SqlQueryBuilder::>::new(ctx); - - let expr = query_compiler::translate(graph, &builder)?; - - println!("{}", expr.pretty_print(true, 80)?); - - Ok(()) -} diff --git a/query-compiler/query-compiler-playground/examples/schema.prisma b/query-compiler/query-compiler-playground/examples/schema.prisma deleted file mode 100644 index ff51ae3ddfe0..000000000000 --- a/query-compiler/query-compiler-playground/examples/schema.prisma +++ /dev/null @@ -1,33 +0,0 @@ -generator client { - provider = "prisma-client-js" -} - -datasource db { - provider = "postgresql" - url = "postgresql://postgres:prisma@localhost:5438" -} - -model User { - id String @id @default(cuid()) - email String @unique - name String? - posts Post[] - val Int? - profile Profile? -} - -model Profile { - userId String @id - user User @relation(fields: [userId], references: [id]) -} - -model Post { - id String @id @default(cuid()) - createdAt DateTime @default(now()) - updatedAt DateTime @updatedAt - published Boolean - title String - content String? - authorId String? - author User? @relation(fields: [authorId], references: [id]) -}