diff --git a/CHANGELOG.md b/CHANGELOG.md index e8b7cf02..5dcd38c9 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,6 +4,28 @@ This changelog documents the changes between release versions. ## [Unreleased] +### Changed + +- **BREAKING:** Update to ndc-spec v0.2 ([#139](https://github.com/hasura/ndc-mongodb/pull/139)) + +#### ndc-spec v0.2 + +This database connector communicates with the GraphQL Engine using an IR +described by [ndc-spec](https://hasura.github.io/ndc-spec/). Version 0.2 makes +a number of improvements to the spec, and enables features that were previously +not possible. Highlights of those new features include: + +- relationships can use a nested object field on the target side as a join key +- grouping result documents, and aggregating on groups of documents (pending implementation in the mongo connector) +- queries on fields of nested collections (document fields that are arrays of objects) +- filtering on scalar values inside array document fields - previously it was possible to filter on fields of objects inside arrays, but not on scalars + +For more details on what has changed in the spec see [the +changelog](https://hasura.github.io/ndc-spec/specification/changelog.html#020). + +Use of the new spec requires a version of GraphQL Engine that supports ndc-spec +v0.2, and there are required metadata changes. + ## [1.6.0] - 2025-01-17 ### Added diff --git a/Cargo.lock b/Cargo.lock index 9f8de50b..2a33cbdc 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -460,7 +460,7 @@ dependencies = [ "async-tempfile", "futures", "googletest", - "itertools", + "itertools 0.13.0", "mongodb", "mongodb-support", "ndc-models", @@ -1523,6 +1523,15 @@ dependencies = [ "either", ] +[[package]] +name = "itertools" +version = "0.13.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "413ee7dfc52ee1a4949ceeb7dbc8a33f2d6c088194d9f922fb8318faf1f01186" +dependencies = [ + "either", +] + [[package]] name = "itoa" version = "1.0.11" @@ -1769,7 +1778,7 @@ dependencies = [ "http 0.2.12", "indent", "indexmap 2.2.6", - "itertools", + "itertools 0.13.0", "lazy_static", "mockall", "mongodb", @@ -1778,6 +1787,7 @@ dependencies = [ "ndc-models", "ndc-query-plan", "ndc-test-helpers", + "nonempty", "once_cell", "pretty_assertions", "proptest", @@ -1805,7 +1815,7 @@ dependencies = [ "futures-util", "googletest", "indexmap 2.2.6", - "itertools", + "itertools 0.13.0", "mongodb", "mongodb-agent-common", "mongodb-support", @@ -1836,7 +1846,7 @@ dependencies = [ "futures", "http 0.2.12", "indexmap 2.2.6", - "itertools", + "itertools 0.13.0", "mongodb", "mongodb-agent-common", "mongodb-support", @@ -1896,8 +1906,8 @@ dependencies = [ [[package]] name = "ndc-models" -version = "0.1.6" -source = "git+http://github.com/hasura/ndc-spec.git?tag=v0.1.6#d1be19e9cdd86ac7b6ad003ff82b7e5b4e96b84f" +version = "0.2.0" +source = "git+http://github.com/hasura/ndc-spec.git?tag=v0.2.0-rc.2#2fad1c699df79890dbb3877d1035ffd8bd0abfc2" dependencies = [ "indexmap 2.2.6", "ref-cast", @@ -1917,7 +1927,7 @@ dependencies = [ "enum-iterator", "indent", "indexmap 2.2.6", - "itertools", + "itertools 0.13.0", "lazy_static", "ndc-models", "ndc-test-helpers", @@ -1930,17 +1940,16 @@ dependencies = [ [[package]] name = "ndc-sdk" -version = "0.4.0" -source = "git+https://github.com/hasura/ndc-sdk-rs.git?tag=v0.4.0#665509f7d3b47ce4f014fc23f817a3599ba13933" +version = "0.5.0" +source = "git+https://github.com/hasura/ndc-sdk-rs.git?rev=643b96b8ee4c8b372b44433167ce2ac4de193332#643b96b8ee4c8b372b44433167ce2ac4de193332" dependencies = [ "async-trait", "axum", "axum-extra", - "bytes", "clap", "http 0.2.12", - "mime", "ndc-models", + "ndc-sdk-core", "ndc-test", "opentelemetry", "opentelemetry-http", @@ -1950,7 +1959,7 @@ dependencies = [ "opentelemetry_sdk", "prometheus", "reqwest 0.11.27", - "serde", + "semver", "serde_json", "thiserror", "tokio", @@ -1961,10 +1970,30 @@ dependencies = [ "url", ] +[[package]] +name = "ndc-sdk-core" +version = "0.5.0" +source = "git+https://github.com/hasura/ndc-sdk-rs.git?rev=643b96b8ee4c8b372b44433167ce2ac4de193332#643b96b8ee4c8b372b44433167ce2ac4de193332" +dependencies = [ + "async-trait", + "axum", + "bytes", + "http 0.2.12", + "mime", + "ndc-models", + "ndc-test", + "prometheus", + "serde", + "serde_json", + "thiserror", + "tokio", + "tracing", +] + [[package]] name = "ndc-test" -version = "0.1.6" -source = "git+http://github.com/hasura/ndc-spec.git?tag=v0.1.6#d1be19e9cdd86ac7b6ad003ff82b7e5b4e96b84f" +version = "0.2.0" +source = "git+http://github.com/hasura/ndc-spec.git?tag=v0.2.0-rc.2#2fad1c699df79890dbb3877d1035ffd8bd0abfc2" dependencies = [ "async-trait", "clap", @@ -1972,14 +2001,12 @@ dependencies = [ "indexmap 2.2.6", "ndc-models", "rand", - "reqwest 0.11.27", + "reqwest 0.12.4", "semver", "serde", "serde_json", - "smol_str", "thiserror", "tokio", - "url", ] [[package]] @@ -1987,7 +2014,7 @@ name = "ndc-test-helpers" version = "1.6.0" dependencies = [ "indexmap 2.2.6", - "itertools", + "itertools 0.13.0", "ndc-models", "serde_json", "smol_str", @@ -2005,9 +2032,9 @@ dependencies = [ [[package]] name = "nonempty" -version = "0.10.0" +version = "0.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "303e8749c804ccd6ca3b428de7fe0d86cb86bc7606bc15291f100fd487960bb8" +checksum = "549e471b99ccaf2f89101bec68f4d244457d5a95a9c3d0672e9564124397741d" [[package]] name = "nu-ansi-term" @@ -2426,7 +2453,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "81bddcdb20abf9501610992b6759a4c888aef7d1a7247ef75e2404275ac24af1" dependencies = [ "anyhow", - "itertools", + "itertools 0.12.1", "proc-macro2", "quote", "syn 2.0.66", @@ -2591,7 +2618,6 @@ dependencies = [ "js-sys", "log", "mime", - "mime_guess", "native-tls", "once_cell", "percent-encoding", @@ -2634,6 +2660,7 @@ dependencies = [ "js-sys", "log", "mime", + "mime_guess", "native-tls", "once_cell", "percent-encoding", diff --git a/Cargo.toml b/Cargo.toml index 3b0ea681..0433ae7e 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -18,14 +18,15 @@ resolver = "2" # The tag or rev of ndc-models must match the locked tag or rev of the # ndc-models dependency of ndc-sdk [workspace.dependencies] -ndc-sdk = { git = "https://github.com/hasura/ndc-sdk-rs.git", tag = "v0.4.0" } -ndc-models = { git = "http://github.com/hasura/ndc-spec.git", tag = "v0.1.6" } +ndc-sdk = { git = "https://github.com/hasura/ndc-sdk-rs.git", rev = "643b96b8ee4c8b372b44433167ce2ac4de193332" } +ndc-models = { git = "http://github.com/hasura/ndc-spec.git", tag = "v0.2.0-rc.2" } indexmap = { version = "2", features = [ "serde", ] } # should match the version that ndc-models uses -itertools = "^0.12.1" +itertools = "^0.13.0" mongodb = { version = "^3.1.0", features = ["tracing-unstable"] } +nonempty = "^0.11.0" schemars = "^0.8.12" serde = { version = "1", features = ["derive"] } serde_json = { version = "1.0", features = ["preserve_order", "raw_value"] } diff --git a/arion-compose/services/engine.nix b/arion-compose/services/engine.nix index 1d30bc2f..6924506f 100644 --- a/arion-compose/services/engine.nix +++ b/arion-compose/services/engine.nix @@ -85,6 +85,7 @@ in useHostStore = true; command = [ "engine" + "--unstable-feature=enable-ndc-v02-support" "--port=${port}" "--metadata-path=${metadata}" "--authn-config-path=${auth-config}" diff --git a/crates/cli/Cargo.toml b/crates/cli/Cargo.toml index 1ecc27c3..3cefa6ab 100644 --- a/crates/cli/Cargo.toml +++ b/crates/cli/Cargo.toml @@ -21,7 +21,7 @@ indexmap = { workspace = true } itertools = { workspace = true } ndc-models = { workspace = true } nom = { version = "^7.1.3", optional = true } -nonempty = "^0.10.0" +nonempty = { workspace = true } pretty = { version = "^0.12.3", features = ["termcolor"], optional = true } ref-cast = { workspace = true } regex = "^1.11.1" diff --git a/crates/cli/src/native_query/infer_result_type.rs b/crates/cli/src/native_query/infer_result_type.rs deleted file mode 100644 index eb5c8b02..00000000 --- a/crates/cli/src/native_query/infer_result_type.rs +++ /dev/null @@ -1,475 +0,0 @@ -use std::{collections::BTreeMap, iter::once}; - -use configuration::{ - schema::{ObjectField, ObjectType, Type}, - Configuration, -}; -use mongodb::bson::{Bson, Document}; -use mongodb_support::{ - aggregate::{Accumulator, Pipeline, Stage}, - BsonScalarType, -}; -use ndc_models::{CollectionName, FieldName, ObjectTypeName}; - -use crate::introspection::{sampling::make_object_type, type_unification::unify_object_types}; - -use super::{ - aggregation_expression::{ - self, infer_type_from_aggregation_expression, infer_type_from_reference_shorthand, - }, - error::{Error, Result}, - helpers::find_collection_object_type, - pipeline_type_context::{PipelineTypeContext, PipelineTypes}, - reference_shorthand::{parse_reference_shorthand, Reference}, -}; - -type ObjectTypes = BTreeMap; - -pub fn infer_result_type( - configuration: &Configuration, - // If we have to define a new object type, use this name - desired_object_type_name: &str, - input_collection: Option<&CollectionName>, - pipeline: &Pipeline, -) -> Result { - let collection_doc_type = input_collection - .map(|collection_name| find_collection_object_type(configuration, collection_name)) - .transpose()?; - let mut stages = pipeline.iter().enumerate(); - let mut context = PipelineTypeContext::new(configuration, collection_doc_type); - match stages.next() { - Some((stage_index, stage)) => infer_result_type_helper( - &mut context, - desired_object_type_name, - stage_index, - stage, - stages, - ), - None => Err(Error::EmptyPipeline), - }?; - context.try_into() -} - -pub fn infer_result_type_helper<'a, 'b>( - context: &mut PipelineTypeContext<'a>, - desired_object_type_name: &str, - stage_index: usize, - stage: &Stage, - mut rest: impl Iterator, -) -> Result<()> { - match stage { - Stage::Documents(docs) => { - let document_type_name = - context.unique_type_name(&format!("{desired_object_type_name}_documents")); - let new_object_types = infer_type_from_documents(&document_type_name, docs); - context.set_stage_doc_type(document_type_name, new_object_types); - } - Stage::Match(_) => (), - Stage::Sort(_) => (), - Stage::Limit(_) => (), - Stage::Lookup { .. } => todo!("lookup stage"), - Stage::Skip(_) => (), - Stage::Group { - key_expression, - accumulators, - } => { - let object_type_name = infer_type_from_group_stage( - context, - desired_object_type_name, - key_expression, - accumulators, - )?; - context.set_stage_doc_type(object_type_name, Default::default()) - } - Stage::Facet(_) => todo!("facet stage"), - Stage::Count(_) => todo!("count stage"), - Stage::ReplaceWith(selection) => { - let selection: &Document = selection.into(); - let result_type = aggregation_expression::infer_type_from_aggregation_expression( - context, - desired_object_type_name, - selection.clone().into(), - )?; - match result_type { - Type::Object(object_type_name) => { - context.set_stage_doc_type(object_type_name.into(), Default::default()); - } - t => Err(Error::ExpectedObject { actual_type: t })?, - } - } - Stage::Unwind { - path, - include_array_index, - preserve_null_and_empty_arrays, - } => { - let result_type = infer_type_from_unwind_stage( - context, - desired_object_type_name, - path, - include_array_index.as_deref(), - *preserve_null_and_empty_arrays, - )?; - context.set_stage_doc_type(result_type, Default::default()) - } - Stage::Other(doc) => { - let warning = Error::UnknownAggregationStage { - stage_index, - stage: doc.clone(), - }; - context.set_unknown_stage_doc_type(warning); - } - }; - match rest.next() { - Some((next_stage_index, next_stage)) => infer_result_type_helper( - context, - desired_object_type_name, - next_stage_index, - next_stage, - rest, - ), - None => Ok(()), - } -} - -pub fn infer_type_from_documents( - object_type_name: &ObjectTypeName, - documents: &[Document], -) -> ObjectTypes { - let mut collected_object_types = vec![]; - for document in documents { - let object_types = make_object_type(object_type_name, document, false, false); - collected_object_types = if collected_object_types.is_empty() { - object_types - } else { - unify_object_types(collected_object_types, object_types) - }; - } - collected_object_types - .into_iter() - .map(|type_with_name| (type_with_name.name, type_with_name.value)) - .collect() -} - -fn infer_type_from_group_stage( - context: &mut PipelineTypeContext<'_>, - desired_object_type_name: &str, - key_expression: &Bson, - accumulators: &BTreeMap, -) -> Result { - let group_key_expression_type = infer_type_from_aggregation_expression( - context, - &format!("{desired_object_type_name}_id"), - key_expression.clone(), - )?; - - let group_expression_field: (FieldName, ObjectField) = ( - "_id".into(), - ObjectField { - r#type: group_key_expression_type.clone(), - description: None, - }, - ); - let accumulator_fields = accumulators.iter().map(|(key, accumulator)| { - let accumulator_type = match accumulator { - Accumulator::Count => Type::Scalar(BsonScalarType::Int), - Accumulator::Min(expr) => infer_type_from_aggregation_expression( - context, - &format!("{desired_object_type_name}_min"), - expr.clone(), - )?, - Accumulator::Max(expr) => infer_type_from_aggregation_expression( - context, - &format!("{desired_object_type_name}_min"), - expr.clone(), - )?, - Accumulator::Push(expr) => { - let t = infer_type_from_aggregation_expression( - context, - &format!("{desired_object_type_name}_push"), - expr.clone(), - )?; - Type::ArrayOf(Box::new(t)) - } - Accumulator::Avg(expr) => { - let t = infer_type_from_aggregation_expression( - context, - &format!("{desired_object_type_name}_avg"), - expr.clone(), - )?; - match t { - Type::ExtendedJSON => t, - Type::Scalar(scalar_type) if scalar_type.is_numeric() => t, - _ => Type::Nullable(Box::new(Type::Scalar(BsonScalarType::Int))), - } - } - Accumulator::Sum(expr) => { - let t = infer_type_from_aggregation_expression( - context, - &format!("{desired_object_type_name}_push"), - expr.clone(), - )?; - match t { - Type::ExtendedJSON => t, - Type::Scalar(scalar_type) if scalar_type.is_numeric() => t, - _ => Type::Scalar(BsonScalarType::Int), - } - } - }; - Ok::<_, Error>(( - key.clone().into(), - ObjectField { - r#type: accumulator_type, - description: None, - }, - )) - }); - let fields = once(Ok(group_expression_field)) - .chain(accumulator_fields) - .collect::>()?; - - let object_type = ObjectType { - fields, - description: None, - }; - let object_type_name = context.unique_type_name(desired_object_type_name); - context.insert_object_type(object_type_name.clone(), object_type); - Ok(object_type_name) -} - -fn infer_type_from_unwind_stage( - context: &mut PipelineTypeContext<'_>, - desired_object_type_name: &str, - path: &str, - include_array_index: Option<&str>, - _preserve_null_and_empty_arrays: Option, -) -> Result { - let field_to_unwind = parse_reference_shorthand(path)?; - let Reference::InputDocumentField { name, nested_path } = field_to_unwind else { - return Err(Error::ExpectedStringPath(path.into())); - }; - - let field_type = infer_type_from_reference_shorthand(context, path)?; - let Type::ArrayOf(field_element_type) = field_type else { - return Err(Error::ExpectedArrayReference { - reference: path.into(), - referenced_type: field_type, - }); - }; - - let nested_path_iter = nested_path.into_iter(); - - let mut doc_type = context.get_input_document_type()?.into_owned(); - if let Some(index_field_name) = include_array_index { - doc_type.fields.insert( - index_field_name.into(), - ObjectField { - r#type: Type::Scalar(BsonScalarType::Long), - description: Some(format!("index of unwound array elements in {name}")), - }, - ); - } - - // If `path` includes a nested_path then the type for the unwound field will be nested - // objects - fn build_nested_types( - context: &mut PipelineTypeContext<'_>, - ultimate_field_type: Type, - parent_object_type: &mut ObjectType, - desired_object_type_name: &str, - field_name: FieldName, - mut rest: impl Iterator, - ) { - match rest.next() { - Some(next_field_name) => { - let object_type_name = context.unique_type_name(desired_object_type_name); - let mut object_type = ObjectType { - fields: Default::default(), - description: None, - }; - build_nested_types( - context, - ultimate_field_type, - &mut object_type, - &format!("{desired_object_type_name}_{next_field_name}"), - next_field_name, - rest, - ); - context.insert_object_type(object_type_name.clone(), object_type); - parent_object_type.fields.insert( - field_name, - ObjectField { - r#type: Type::Object(object_type_name.into()), - description: None, - }, - ); - } - None => { - parent_object_type.fields.insert( - field_name, - ObjectField { - r#type: ultimate_field_type, - description: None, - }, - ); - } - } - } - build_nested_types( - context, - *field_element_type, - &mut doc_type, - desired_object_type_name, - name, - nested_path_iter, - ); - - let object_type_name = context.unique_type_name(desired_object_type_name); - context.insert_object_type(object_type_name.clone(), doc_type); - - Ok(object_type_name) -} - -#[cfg(test)] -mod tests { - use configuration::schema::{ObjectField, ObjectType, Type}; - use mongodb::bson::doc; - use mongodb_support::{ - aggregate::{Pipeline, Selection, Stage}, - BsonScalarType, - }; - use pretty_assertions::assert_eq; - use test_helpers::configuration::mflix_config; - - use crate::native_query::pipeline_type_context::PipelineTypeContext; - - use super::{infer_result_type, infer_type_from_unwind_stage}; - - type Result = anyhow::Result; - - #[test] - fn infers_type_from_documents_stage() -> Result<()> { - let pipeline = Pipeline::new(vec![Stage::Documents(vec![ - doc! { "foo": 1 }, - doc! { "bar": 2 }, - ])]); - let config = mflix_config(); - let pipeline_types = infer_result_type(&config, "documents", None, &pipeline).unwrap(); - let expected = [( - "documents_documents".into(), - ObjectType { - fields: [ - ( - "foo".into(), - ObjectField { - r#type: Type::Nullable(Box::new(Type::Scalar(BsonScalarType::Int))), - description: None, - }, - ), - ( - "bar".into(), - ObjectField { - r#type: Type::Nullable(Box::new(Type::Scalar(BsonScalarType::Int))), - description: None, - }, - ), - ] - .into(), - description: None, - }, - )] - .into(); - let actual = pipeline_types.object_types; - assert_eq!(actual, expected); - Ok(()) - } - - #[test] - fn infers_type_from_replace_with_stage() -> Result<()> { - let pipeline = Pipeline::new(vec![Stage::ReplaceWith(Selection::new(doc! { - "selected_title": "$title" - }))]); - let config = mflix_config(); - let pipeline_types = infer_result_type( - &config, - "movies_selection", - Some(&("movies".into())), - &pipeline, - ) - .unwrap(); - let expected = [( - "movies_selection".into(), - ObjectType { - fields: [( - "selected_title".into(), - ObjectField { - r#type: Type::Scalar(BsonScalarType::String), - description: None, - }, - )] - .into(), - description: None, - }, - )] - .into(); - let actual = pipeline_types.object_types; - assert_eq!(actual, expected); - Ok(()) - } - - #[test] - fn infers_type_from_unwind_stage() -> Result<()> { - let config = mflix_config(); - let mut context = PipelineTypeContext::new(&config, None); - context.insert_object_type( - "words_doc".into(), - ObjectType { - fields: [( - "words".into(), - ObjectField { - r#type: Type::ArrayOf(Box::new(Type::Scalar(BsonScalarType::String))), - description: None, - }, - )] - .into(), - description: None, - }, - ); - context.set_stage_doc_type("words_doc".into(), Default::default()); - - let inferred_type_name = infer_type_from_unwind_stage( - &mut context, - "unwind_stage", - "$words", - Some("idx"), - Some(false), - )?; - - assert_eq!( - context - .get_object_type(&inferred_type_name) - .unwrap() - .into_owned(), - ObjectType { - fields: [ - ( - "words".into(), - ObjectField { - r#type: Type::Scalar(BsonScalarType::String), - description: None, - } - ), - ( - "idx".into(), - ObjectField { - r#type: Type::Scalar(BsonScalarType::Long), - description: Some("index of unwound array elements in words".into()), - } - ), - ] - .into(), - description: None, - } - ); - Ok(()) - } -} diff --git a/crates/cli/src/native_query/pipeline/mod.rs b/crates/cli/src/native_query/pipeline/mod.rs index acc80046..12e2b347 100644 --- a/crates/cli/src/native_query/pipeline/mod.rs +++ b/crates/cli/src/native_query/pipeline/mod.rs @@ -341,7 +341,7 @@ mod tests { aggregate::{Pipeline, Selection, Stage}, BsonScalarType, }; - use nonempty::nonempty; + use nonempty::NonEmpty; use pretty_assertions::assert_eq; use test_helpers::configuration::mflix_config; @@ -462,7 +462,7 @@ mod tests { Some(TypeConstraint::ElementOf(Box::new( TypeConstraint::FieldOf { target_type: Box::new(TypeConstraint::Variable(input_doc_variable)), - path: nonempty!["words".into()], + path: NonEmpty::singleton("words".into()), } ))) ) diff --git a/crates/cli/src/native_query/pipeline/project_stage.rs b/crates/cli/src/native_query/pipeline/project_stage.rs index 05bdea41..427d9c55 100644 --- a/crates/cli/src/native_query/pipeline/project_stage.rs +++ b/crates/cli/src/native_query/pipeline/project_stage.rs @@ -7,7 +7,7 @@ use itertools::Itertools as _; use mongodb::bson::{Bson, Decimal128, Document}; use mongodb_support::BsonScalarType; use ndc_models::{FieldName, ObjectTypeName}; -use nonempty::{nonempty, NonEmpty}; +use nonempty::NonEmpty; use crate::native_query::{ aggregation_expression::infer_type_from_aggregation_expression, @@ -89,7 +89,7 @@ fn projection_tree_into_field_overrides( ProjectionTree::Object(sub_specs) => { let original_field_type = TypeConstraint::FieldOf { target_type: Box::new(input_type.clone()), - path: nonempty![name.clone()], + path: NonEmpty::singleton(name.clone()), }; Some(projection_tree_into_field_overrides( original_field_type, @@ -265,7 +265,7 @@ fn path_collision_error(path: impl IntoIterator) mod tests { use mongodb::bson::doc; use mongodb_support::BsonScalarType; - use nonempty::nonempty; + use nonempty::{nonempty, NonEmpty}; use pretty_assertions::assert_eq; use test_helpers::configuration::mflix_config; @@ -310,7 +310,7 @@ mod tests { "title".into(), TypeConstraint::FieldOf { target_type: Box::new(input_type.clone()), - path: nonempty!["title".into()], + path: NonEmpty::singleton("title".into()), }, ), ( @@ -321,7 +321,7 @@ mod tests { "releaseDate".into(), TypeConstraint::FieldOf { target_type: Box::new(input_type.clone()), - path: nonempty!["released".into()], + path: NonEmpty::singleton("released".into()), }, ), ] @@ -410,7 +410,7 @@ mod tests { augmented_object_type_name: "Movie_project_tomatoes".into(), target_type: Box::new(TypeConstraint::FieldOf { target_type: Box::new(input_type.clone()), - path: nonempty!["tomatoes".into()], + path: NonEmpty::singleton("tomatoes".into()), }), fields: [ ("lastUpdated".into(), None), @@ -422,9 +422,9 @@ mod tests { target_type: Box::new(TypeConstraint::FieldOf { target_type: Box::new(TypeConstraint::FieldOf { target_type: Box::new(input_type.clone()), - path: nonempty!["tomatoes".into()], + path: NonEmpty::singleton("tomatoes".into()), }), - path: nonempty!["critic".into()], + path: NonEmpty::singleton("critic".into()), }), fields: [("rating".into(), None), ("meter".into(), None),] .into(), diff --git a/crates/cli/src/native_query/type_solver/mod.rs b/crates/cli/src/native_query/type_solver/mod.rs index bc7a8f38..5c40a9cc 100644 --- a/crates/cli/src/native_query/type_solver/mod.rs +++ b/crates/cli/src/native_query/type_solver/mod.rs @@ -147,7 +147,7 @@ mod tests { use anyhow::Result; use configuration::schema::{ObjectField, ObjectType, Type}; use mongodb_support::BsonScalarType; - use nonempty::nonempty; + use nonempty::NonEmpty; use pretty_assertions::assert_eq; use test_helpers::configuration::mflix_config; @@ -252,7 +252,7 @@ mod tests { "selected_title".into(), TypeConstraint::FieldOf { target_type: Box::new(TypeConstraint::Variable(var0)), - path: nonempty!["title".into()], + path: NonEmpty::singleton("title".into()), }, )] .into(), diff --git a/crates/cli/src/native_query/type_solver/simplify.rs b/crates/cli/src/native_query/type_solver/simplify.rs index be8cc41d..9187dba0 100644 --- a/crates/cli/src/native_query/type_solver/simplify.rs +++ b/crates/cli/src/native_query/type_solver/simplify.rs @@ -530,7 +530,7 @@ mod tests { use googletest::prelude::*; use mongodb_support::BsonScalarType; - use nonempty::nonempty; + use nonempty::NonEmpty; use test_helpers::configuration::mflix_config; use crate::native_query::{ @@ -592,7 +592,7 @@ mod tests { Some(TypeVariable::new(1, Variance::Covariant)), [TypeConstraint::FieldOf { target_type: Box::new(TypeConstraint::Object("movies".into())), - path: nonempty!["title".into()], + path: NonEmpty::singleton("title".into()), }], ); expect_that!( diff --git a/crates/configuration/src/configuration.rs b/crates/configuration/src/configuration.rs index 729b680b..ffb93863 100644 --- a/crates/configuration/src/configuration.rs +++ b/crates/configuration/src/configuration.rs @@ -276,7 +276,6 @@ fn collection_to_collection_info( collection_type: collection.r#type, description: collection.description, arguments: Default::default(), - foreign_keys: Default::default(), uniqueness_constraints: BTreeMap::from_iter(pk_constraint), } } @@ -298,7 +297,6 @@ fn native_query_to_collection_info( collection_type: native_query.result_document_type.clone(), description: native_query.description.clone(), arguments: arguments_to_ndc_arguments(native_query.arguments.clone()), - foreign_keys: Default::default(), uniqueness_constraints: BTreeMap::from_iter(pk_constraint), } } diff --git a/crates/configuration/src/mongo_scalar_type.rs b/crates/configuration/src/mongo_scalar_type.rs index 9641ce9f..1876c260 100644 --- a/crates/configuration/src/mongo_scalar_type.rs +++ b/crates/configuration/src/mongo_scalar_type.rs @@ -20,6 +20,12 @@ impl MongoScalarType { } } +impl From for MongoScalarType { + fn from(value: BsonScalarType) -> Self { + Self::Bson(value) + } +} + impl TryFrom<&ndc_models::ScalarTypeName> for MongoScalarType { type Error = QueryPlanError; diff --git a/crates/configuration/src/schema/mod.rs b/crates/configuration/src/schema/mod.rs index 3b43e173..1c46e192 100644 --- a/crates/configuration/src/schema/mod.rs +++ b/crates/configuration/src/schema/mod.rs @@ -185,6 +185,7 @@ impl From for ndc_models::ObjectType { .into_iter() .map(|(name, field)| (name, field.into())) .collect(), + foreign_keys: Default::default(), } } } diff --git a/crates/integration-tests/src/tests/expressions.rs b/crates/integration-tests/src/tests/expressions.rs index ff527bd3..584cbd69 100644 --- a/crates/integration-tests/src/tests/expressions.rs +++ b/crates/integration-tests/src/tests/expressions.rs @@ -61,6 +61,7 @@ async fn evaluates_exists_with_predicate() -> anyhow::Result<()> { query() .predicate(exists( ExistsInCollection::Related { + field_path: Default::default(), relationship: "albums".into(), arguments: Default::default(), }, @@ -74,7 +75,10 @@ async fn evaluates_exists_with_predicate() -> anyhow::Result<()> { ]).order_by([asc!("Title")])) ]), ) - .relationships([("albums", relationship("Album", [("ArtistId", "ArtistId")]))]) + .relationships([( + "albums", + relationship("Album", [("ArtistId", &["ArtistId"])]) + )]) ) .await? ); diff --git a/crates/integration-tests/src/tests/filtering.rs b/crates/integration-tests/src/tests/filtering.rs index d0f68a68..27501987 100644 --- a/crates/integration-tests/src/tests/filtering.rs +++ b/crates/integration-tests/src/tests/filtering.rs @@ -1,5 +1,7 @@ use insta::assert_yaml_snapshot; -use ndc_test_helpers::{binop, field, query, query_request, target, variable}; +use ndc_test_helpers::{ + array_contains, binop, field, is_empty, query, query_request, target, value, variable, +}; use crate::{connector::Connector, graphql_query, run_connector_query}; @@ -67,21 +69,53 @@ async fn filters_by_comparisons_on_elements_of_array_field() -> anyhow::Result<( } #[tokio::test] -async fn filters_by_comparisons_on_elements_of_array_of_scalars_against_variable( -) -> anyhow::Result<()> { +async fn filters_by_comparison_with_a_variable() -> anyhow::Result<()> { assert_yaml_snapshot!( run_connector_query( Connector::SampleMflix, query_request() - .variables([[("cast_member", "Albert Austin")]]) + .variables([[("title", "The Blue Bird")]]) .collection("movies") .query( query() - .predicate(binop("_eq", target!("cast"), variable!(cast_member))) - .fields([field!("title"), field!("cast")]), + .predicate(binop("_eq", target!("title"), variable!(title))) + .fields([field!("title")]), ) ) .await? ); Ok(()) } + +#[tokio::test] +async fn filters_by_array_comparison_contains() -> anyhow::Result<()> { + assert_yaml_snapshot!( + run_connector_query( + Connector::SampleMflix, + query_request().collection("movies").query( + query() + .predicate(array_contains(target!("cast"), value!("Albert Austin"))) + .fields([field!("title"), field!("cast")]), + ) + ) + .await? + ); + Ok(()) +} + +#[tokio::test] +async fn filters_by_array_comparison_is_empty() -> anyhow::Result<()> { + assert_yaml_snapshot!( + run_connector_query( + Connector::SampleMflix, + query_request().collection("movies").query( + query() + .predicate(is_empty(target!("writers"))) + .fields([field!("writers")]) + .limit(1), + ) + ) + .await? + ); + Ok(()) +} diff --git a/crates/integration-tests/src/tests/local_relationship.rs b/crates/integration-tests/src/tests/local_relationship.rs index a9997d04..5906d8eb 100644 --- a/crates/integration-tests/src/tests/local_relationship.rs +++ b/crates/integration-tests/src/tests/local_relationship.rs @@ -1,6 +1,9 @@ use crate::{connector::Connector, graphql_query, run_connector_query}; use insta::assert_yaml_snapshot; -use ndc_test_helpers::{asc, field, query, query_request, relation_field, relationship}; +use ndc_test_helpers::{ + asc, binop, exists, field, query, query_request, related, relation_field, + relationship, target, value, +}; #[tokio::test] async fn joins_local_relationships() -> anyhow::Result<()> { @@ -203,7 +206,37 @@ async fn joins_on_field_names_that_require_escaping() -> anyhow::Result<()> { ) .relationships([( "join", - relationship("weird_field_names", [("$invalid.name", "$invalid.name")]) + relationship("weird_field_names", [("$invalid.name", &["$invalid.name"])]) + )]) + ) + .await? + ); + Ok(()) +} + +#[tokio::test] +async fn joins_relationships_on_nested_key() -> anyhow::Result<()> { + assert_yaml_snapshot!( + run_connector_query( + Connector::TestCases, + query_request() + .collection("departments") + .query( + query() + .predicate(exists( + related!("schools_departments"), + binop("_eq", target!("name"), value!("West Valley")) + )) + .fields([ + relation_field!("departments" => "schools_departments", query().fields([ + field!("name") + ])) + ]) + .order_by([asc!("_id")]) + ) + .relationships([( + "schools_departments", + relationship("schools", [("_id", &["departments", "math_department_id"])]) )]) ) .await? diff --git a/crates/integration-tests/src/tests/mod.rs b/crates/integration-tests/src/tests/mod.rs index 1956d231..de65332f 100644 --- a/crates/integration-tests/src/tests/mod.rs +++ b/crates/integration-tests/src/tests/mod.rs @@ -14,6 +14,7 @@ mod filtering; mod local_relationship; mod native_mutation; mod native_query; +mod nested_collection; mod permissions; mod remote_relationship; mod sorting; diff --git a/crates/integration-tests/src/tests/nested_collection.rs b/crates/integration-tests/src/tests/nested_collection.rs new file mode 100644 index 00000000..eee65140 --- /dev/null +++ b/crates/integration-tests/src/tests/nested_collection.rs @@ -0,0 +1,28 @@ +use crate::{connector::Connector, run_connector_query}; +use insta::assert_yaml_snapshot; +use ndc_test_helpers::{ + array, asc, binop, exists, exists_in_nested, field, object, query, query_request, target, value, +}; + +#[tokio::test] +async fn exists_in_nested_collection() -> anyhow::Result<()> { + assert_yaml_snapshot!( + run_connector_query( + Connector::TestCases, + query_request().collection("nested_collection").query( + query() + .predicate(exists( + exists_in_nested("staff"), + binop("_eq", target!("name"), value!("Alyx")) + )) + .fields([ + field!("institution"), + field!("staff" => "staff", array!(object!([field!("name")]))), + ]) + .order_by([asc!("_id")]) + ) + ) + .await? + ); + Ok(()) +} diff --git a/crates/integration-tests/src/tests/snapshots/integration_tests__tests__aggregation__runs_aggregation_over_top_level_fields.snap b/crates/integration-tests/src/tests/snapshots/integration_tests__tests__aggregation__runs_aggregation_over_top_level_fields.snap index b3a603b1..3fb73855 100644 --- a/crates/integration-tests/src/tests/snapshots/integration_tests__tests__aggregation__runs_aggregation_over_top_level_fields.snap +++ b/crates/integration-tests/src/tests/snapshots/integration_tests__tests__aggregation__runs_aggregation_over_top_level_fields.snap @@ -26,7 +26,7 @@ data: avg: 333925.875 max: 436453 min: 221701 - sum: 2671407 + sum: "2671407" unitPrice: _count: 8 _count_distinct: 1 diff --git a/crates/integration-tests/src/tests/snapshots/integration_tests__tests__filtering__filters_by_array_comparison_contains.snap b/crates/integration-tests/src/tests/snapshots/integration_tests__tests__filtering__filters_by_array_comparison_contains.snap new file mode 100644 index 00000000..43711a77 --- /dev/null +++ b/crates/integration-tests/src/tests/snapshots/integration_tests__tests__filtering__filters_by_array_comparison_contains.snap @@ -0,0 +1,11 @@ +--- +source: crates/integration-tests/src/tests/filtering.rs +expression: "run_connector_query(Connector::SampleMflix,\nquery_request().collection(\"movies\").query(query().predicate(array_contains(target!(\"cast\"),\nvalue!(\"Albert Austin\"))).fields([field!(\"title\"), field!(\"cast\")]),)).await?" +--- +- rows: + - cast: + - Charles Chaplin + - Edna Purviance + - Eric Campbell + - Albert Austin + title: The Immigrant diff --git a/crates/integration-tests/src/tests/snapshots/integration_tests__tests__filtering__filters_by_array_comparison_is_empty.snap b/crates/integration-tests/src/tests/snapshots/integration_tests__tests__filtering__filters_by_array_comparison_is_empty.snap new file mode 100644 index 00000000..5285af75 --- /dev/null +++ b/crates/integration-tests/src/tests/snapshots/integration_tests__tests__filtering__filters_by_array_comparison_is_empty.snap @@ -0,0 +1,6 @@ +--- +source: crates/integration-tests/src/tests/filtering.rs +expression: "run_connector_query(Connector::SampleMflix,\nquery_request().collection(\"movies\").query(query().predicate(is_empty(target!(\"writers\"))).fields([field!(\"writers\")]).limit(1),)).await?" +--- +- rows: + - writers: [] diff --git a/crates/integration-tests/src/tests/snapshots/integration_tests__tests__filtering__filters_by_comparison_with_a_variable.snap b/crates/integration-tests/src/tests/snapshots/integration_tests__tests__filtering__filters_by_comparison_with_a_variable.snap new file mode 100644 index 00000000..d2b39ddc --- /dev/null +++ b/crates/integration-tests/src/tests/snapshots/integration_tests__tests__filtering__filters_by_comparison_with_a_variable.snap @@ -0,0 +1,6 @@ +--- +source: crates/integration-tests/src/tests/filtering.rs +expression: "run_connector_query(Connector::SampleMflix,\nquery_request().variables([[(\"title\",\n\"The Blue Bird\")]]).collection(\"movies\").query(query().predicate(binop(\"_eq\",\ntarget!(\"title\"), variable!(title))).fields([field!(\"title\")]),)).await?" +--- +- rows: + - title: The Blue Bird diff --git a/crates/integration-tests/src/tests/snapshots/integration_tests__tests__filtering__filters_by_comparisons_on_elements_of_array_of_scalars_against_variable.snap b/crates/integration-tests/src/tests/snapshots/integration_tests__tests__filtering__filters_by_comparisons_on_elements_of_array_of_scalars_against_variable.snap deleted file mode 100644 index 46425908..00000000 --- a/crates/integration-tests/src/tests/snapshots/integration_tests__tests__filtering__filters_by_comparisons_on_elements_of_array_of_scalars_against_variable.snap +++ /dev/null @@ -1,11 +0,0 @@ ---- -source: crates/integration-tests/src/tests/filtering.rs -expression: "run_connector_query(Connector::SampleMflix,\n query_request().variables([[(\"cast_member\",\n \"Albert Austin\")]]).collection(\"movies\").query(query().predicate(binop(\"_eq\",\n target!(\"cast\"),\n variable!(cast_member))).fields([field!(\"title\"),\n field!(\"cast\")]))).await?" ---- -- rows: - - cast: - - Charles Chaplin - - Edna Purviance - - Eric Campbell - - Albert Austin - title: The Immigrant diff --git a/crates/integration-tests/src/tests/snapshots/integration_tests__tests__local_relationship__joins_relationships_on_nested_key.snap b/crates/integration-tests/src/tests/snapshots/integration_tests__tests__local_relationship__joins_relationships_on_nested_key.snap new file mode 100644 index 00000000..2200e9e1 --- /dev/null +++ b/crates/integration-tests/src/tests/snapshots/integration_tests__tests__local_relationship__joins_relationships_on_nested_key.snap @@ -0,0 +1,8 @@ +--- +source: crates/integration-tests/src/tests/local_relationship.rs +expression: "run_connector_query(Connector::TestCases,\nquery_request().collection(\"departments\").query(query().predicate(exists(related!(\"schools_departments\"),\nbinop(\"_eq\", target!(\"name\"),\nvalue!(\"West Valley\")))).fields([relation_field!(\"departments\" =>\n\"schools_departments\",\nquery().fields([field!(\"name\")]))]).order_by([asc!(\"_id\")])).relationships([(\"schools_departments\",\nrelationship(\"schools\",\n[(\"_id\", &[\"departments\", \"math_department_id\"])]))])).await?" +--- +- rows: + - departments: + rows: + - name: West Valley diff --git a/crates/integration-tests/src/tests/snapshots/integration_tests__tests__nested_collection__exists_in_nested_collection.snap b/crates/integration-tests/src/tests/snapshots/integration_tests__tests__nested_collection__exists_in_nested_collection.snap new file mode 100644 index 00000000..5283509a --- /dev/null +++ b/crates/integration-tests/src/tests/snapshots/integration_tests__tests__nested_collection__exists_in_nested_collection.snap @@ -0,0 +1,10 @@ +--- +source: crates/integration-tests/src/tests/nested_collection.rs +expression: "run_connector_query(Connector::TestCases,\nquery_request().collection(\"nested_collection\").query(query().predicate(exists(nested(\"staff\"),\nbinop(\"_eq\", target!(\"name\"),\nvalue!(\"Alyx\")))).fields([field!(\"institution\"),\nfield!(\"staff\" => \"staff\",\narray!(object!([field!(\"name\")]))),]).order_by([asc!(\"_id\")]))).await?" +--- +- rows: + - institution: City 17 + staff: + - name: Alyx + - name: Freeman + - name: Breen diff --git a/crates/mongodb-agent-common/Cargo.toml b/crates/mongodb-agent-common/Cargo.toml index 52511d7e..639d00ef 100644 --- a/crates/mongodb-agent-common/Cargo.toml +++ b/crates/mongodb-agent-common/Cargo.toml @@ -28,6 +28,7 @@ lazy_static = "^1.4.0" mockall = { version = "^0.13.1", optional = true } mongodb = { workspace = true } ndc-models = { workspace = true } +nonempty = { workspace = true } once_cell = "1" pretty_assertions = { version = "1", optional = true } regex = "1" diff --git a/crates/mongodb-agent-common/src/comparison_function.rs b/crates/mongodb-agent-common/src/comparison_function.rs index 842df44e..5ed5ca82 100644 --- a/crates/mongodb-agent-common/src/comparison_function.rs +++ b/crates/mongodb-agent-common/src/comparison_function.rs @@ -1,14 +1,12 @@ use enum_iterator::{all, Sequence}; use mongodb::bson::{doc, Bson, Document}; +use ndc_models as ndc; /// Supported binary comparison operators. This type provides GraphQL names, MongoDB operator /// names, and aggregation pipeline code for each operator. Argument types are defined in /// mongodb-agent-common/src/scalar_types_capabilities.rs. #[derive(Copy, Clone, Debug, PartialEq, Eq, Sequence)] pub enum ComparisonFunction { - // Equality and inequality operators (except for `NotEqual`) are built into the v2 spec, but - // the only built-in operator in v3 is `Equal`. So we need at minimum definitions for - // inequality operators here. LessThan, LessThanOrEqual, GreaterThan, @@ -58,6 +56,33 @@ impl ComparisonFunction { } } + pub fn ndc_definition( + self, + argument_type: impl FnOnce(Self) -> ndc::Type, + ) -> ndc::ComparisonOperatorDefinition { + use ndc::ComparisonOperatorDefinition as NDC; + match self { + C::Equal => NDC::Equal, + C::In => NDC::In, + C::LessThan => NDC::LessThan, + C::LessThanOrEqual => NDC::LessThanOrEqual, + C::GreaterThan => NDC::GreaterThan, + C::GreaterThanOrEqual => NDC::GreaterThanOrEqual, + C::NotEqual => NDC::Custom { + argument_type: argument_type(self), + }, + C::NotIn => NDC::Custom { + argument_type: argument_type(self), + }, + C::Regex => NDC::Custom { + argument_type: argument_type(self), + }, + C::IRegex => NDC::Custom { + argument_type: argument_type(self), + }, + } + } + pub fn from_graphql_name(s: &str) -> Result { all::() .find(|variant| variant.graphql_name() == s) diff --git a/crates/mongodb-agent-common/src/mongo_query_plan/mod.rs b/crates/mongodb-agent-common/src/mongo_query_plan/mod.rs index f3312356..8c6e128e 100644 --- a/crates/mongodb-agent-common/src/mongo_query_plan/mod.rs +++ b/crates/mongodb-agent-common/src/mongo_query_plan/mod.rs @@ -3,7 +3,7 @@ use std::collections::BTreeMap; use configuration::{ native_mutation::NativeMutation, native_query::NativeQuery, Configuration, MongoScalarType, }; -use mongodb_support::{ExtendedJsonMode, EXTENDED_JSON_TYPE_NAME}; +use mongodb_support::{BsonScalarType, ExtendedJsonMode, EXTENDED_JSON_TYPE_NAME}; use ndc_models as ndc; use ndc_query_plan::{ConnectorTypes, QueryContext, QueryPlanError}; @@ -32,6 +32,14 @@ impl ConnectorTypes for MongoConfiguration { type AggregateFunction = AggregationFunction; type ComparisonOperator = ComparisonFunction; type ScalarType = MongoScalarType; + + fn count_aggregate_type() -> ndc_query_plan::Type { + ndc_query_plan::Type::scalar(BsonScalarType::Int) + } + + fn string_type() -> ndc_query_plan::Type { + ndc_query_plan::Type::scalar(BsonScalarType::String) + } } impl QueryContext for MongoConfiguration { @@ -102,6 +110,7 @@ fn scalar_type_name(t: &Type) -> Option<&'static str> { pub type Aggregate = ndc_query_plan::Aggregate; pub type Argument = ndc_query_plan::Argument; pub type Arguments = ndc_query_plan::Arguments; +pub type ArrayComparison = ndc_query_plan::ArrayComparison; pub type ComparisonTarget = ndc_query_plan::ComparisonTarget; pub type ComparisonValue = ndc_query_plan::ComparisonValue; pub type ExistsInCollection = ndc_query_plan::ExistsInCollection; @@ -113,6 +122,7 @@ pub type MutationProcedureArgument = ndc_query_plan::MutationProcedureArgument; pub type NestedArray = ndc_query_plan::NestedArray; pub type NestedObject = ndc_query_plan::NestedObject; +pub type ObjectField = ndc_query_plan::ObjectField; pub type ObjectType = ndc_query_plan::ObjectType; pub type OrderBy = ndc_query_plan::OrderBy; pub type OrderByTarget = ndc_query_plan::OrderByTarget; diff --git a/crates/mongodb-agent-common/src/mongodb/selection.rs b/crates/mongodb-agent-common/src/mongodb/selection.rs index 614594c1..fbc3f0bf 100644 --- a/crates/mongodb-agent-common/src/mongodb/selection.rs +++ b/crates/mongodb-agent-common/src/mongodb/selection.rs @@ -2,6 +2,7 @@ use indexmap::IndexMap; use mongodb::bson::{doc, Bson, Document}; use mongodb_support::aggregate::Selection; use ndc_models::FieldName; +use nonempty::NonEmpty; use crate::{ interface_types::MongoAgentError, @@ -52,7 +53,7 @@ fn selection_for_field( .. } => { let col_ref = nested_column_reference(parent, column); - let col_ref_or_null = value_or_null(col_ref.into_aggregate_expression()); + let col_ref_or_null = value_or_null(col_ref.into_aggregate_expression().into_bson()); Ok(col_ref_or_null) } Field::Column { @@ -90,7 +91,8 @@ fn selection_for_field( field_name.to_string(), ColumnRef::variable("this") .into_nested_field(field_name) - .into_aggregate_expression(), + .into_aggregate_expression() + .into_bson(), ) }) .collect() @@ -171,7 +173,7 @@ fn nested_column_reference<'a>( ) -> ColumnRef<'a> { match parent { Some(parent) => parent.into_nested_field(column), - None => ColumnRef::from_field_path([column]), + None => ColumnRef::from_field_path(NonEmpty::singleton(column)), } } @@ -296,7 +298,7 @@ mod tests { ])) .relationships([( "class_students", - relationship("students", [("_id", "classId")]), + relationship("students", [("_id", &["classId"])]), )]) .into(); diff --git a/crates/mongodb-agent-common/src/procedure/interpolated_command.rs b/crates/mongodb-agent-common/src/procedure/interpolated_command.rs index ac6775a3..131cee38 100644 --- a/crates/mongodb-agent-common/src/procedure/interpolated_command.rs +++ b/crates/mongodb-agent-common/src/procedure/interpolated_command.rs @@ -159,7 +159,7 @@ mod tests { use serde_json::json; use crate::{ - mongo_query_plan::{ObjectType, Type}, + mongo_query_plan::{ObjectField, ObjectType, Type}, procedure::arguments_to_mongodb_expressions::arguments_to_mongodb_expressions, }; @@ -170,7 +170,11 @@ mod tests { let native_mutation = NativeMutation { result_type: Type::Object(ObjectType { name: Some("InsertArtist".into()), - fields: [("ok".into(), Type::Scalar(MongoScalarType::Bson(S::Bool)))].into(), + fields: [( + "ok".into(), + ObjectField::new(Type::Scalar(MongoScalarType::Bson(S::Bool))), + )] + .into(), }), command: doc! { "insert": "Artist", @@ -224,11 +228,11 @@ mod tests { fields: [ ( "ArtistId".into(), - Type::Scalar(MongoScalarType::Bson(S::Int)), + ObjectField::new(Type::Scalar(MongoScalarType::Bson(S::Int))), ), ( "Name".into(), - Type::Scalar(MongoScalarType::Bson(S::String)), + ObjectField::new(Type::Scalar(MongoScalarType::Bson(S::String))), ), ] .into(), @@ -237,7 +241,11 @@ mod tests { let native_mutation = NativeMutation { result_type: Type::Object(ObjectType { name: Some("InsertArtist".into()), - fields: [("ok".into(), Type::Scalar(MongoScalarType::Bson(S::Bool)))].into(), + fields: [( + "ok".into(), + ObjectField::new(Type::Scalar(MongoScalarType::Bson(S::Bool))), + )] + .into(), }), command: doc! { "insert": "Artist", @@ -287,7 +295,11 @@ mod tests { let native_mutation = NativeMutation { result_type: Type::Object(ObjectType { name: Some("Insert".into()), - fields: [("ok".into(), Type::Scalar(MongoScalarType::Bson(S::Bool)))].into(), + fields: [( + "ok".into(), + ObjectField::new(Type::Scalar(MongoScalarType::Bson(S::Bool))), + )] + .into(), }), command: doc! { "insert": "{{prefix}}-{{basename}}", @@ -334,7 +346,11 @@ mod tests { let native_mutation = NativeMutation { result_type: Type::Object(ObjectType { name: Some("InsertArtist".into()), - fields: [("ok".into(), Type::Scalar(MongoScalarType::Bson(S::Bool)))].into(), + fields: [( + "ok".into(), + ObjectField::new(Type::Scalar(MongoScalarType::Bson(S::Bool))), + )] + .into(), }), command: doc! { "insert": "Artist", diff --git a/crates/mongodb-agent-common/src/query/column_ref.rs b/crates/mongodb-agent-common/src/query/column_ref.rs index fc95f652..43f26ca4 100644 --- a/crates/mongodb-agent-common/src/query/column_ref.rs +++ b/crates/mongodb-agent-common/src/query/column_ref.rs @@ -5,7 +5,9 @@ use std::{borrow::Cow, iter::once}; use mongodb::bson::{doc, Bson}; +use ndc_models::FieldName; use ndc_query_plan::Scope; +use nonempty::NonEmpty; use crate::{ interface_types::MongoAgentError, @@ -13,6 +15,8 @@ use crate::{ mongodb::sanitize::is_name_safe, }; +use super::make_selector::AggregationExpression; + /// Reference to a document field, or a nested property of a document field. There are two contexts /// where we reference columns: /// @@ -44,8 +48,7 @@ pub enum ColumnRef<'a> { impl<'a> ColumnRef<'a> { /// Given a column target returns a string that can be used in a MongoDB match query that /// references the corresponding field, either in the target collection of a query request, or - /// in the related collection. Resolves nested fields and root collection references, but does - /// not traverse relationships. + /// in the related collection. /// /// If the given target cannot be represented as a match query key, falls back to providing an /// aggregation expression referencing the column. @@ -53,21 +56,26 @@ impl<'a> ColumnRef<'a> { from_comparison_target(column) } + pub fn from_column_and_field_path<'b>( + name: &'b FieldName, + field_path: Option<&'b Vec>, + ) -> ColumnRef<'b> { + from_column_and_field_path(name, field_path) + } + /// TODO: This will hopefully become infallible once ENG-1011 & ENG-1010 are implemented. pub fn from_order_by_target(target: &OrderByTarget) -> Result, MongoAgentError> { from_order_by_target(target) } - pub fn from_field_path<'b>( - field_path: impl IntoIterator, - ) -> ColumnRef<'b> { + pub fn from_field_path(field_path: NonEmpty<&ndc_models::FieldName>) -> ColumnRef<'_> { from_path( None, field_path .into_iter() .map(|field_name| field_name.as_ref() as &str), ) - .unwrap() + .expect("field_path is not empty") // safety: NonEmpty cannot be empty } pub fn from_field(field_name: &ndc_models::FieldName) -> ColumnRef<'_> { @@ -91,65 +99,54 @@ impl<'a> ColumnRef<'a> { fold_path_element(Some(self), field_name.as_ref()) } - pub fn into_aggregate_expression(self) -> Bson { - match self { + pub fn into_aggregate_expression(self) -> AggregationExpression { + let bson = match self { ColumnRef::MatchKey(key) => format!("${key}").into(), ColumnRef::ExpressionStringShorthand(key) => key.to_string().into(), ColumnRef::Expression(expr) => expr, + }; + AggregationExpression(bson) + } + + pub fn into_match_key(self) -> Option> { + match self { + ColumnRef::MatchKey(key) => Some(key), + _ => None, } } } fn from_comparison_target(column: &ComparisonTarget) -> ColumnRef<'_> { match column { - // We exclude `path` (the relationship path) from the resulting ColumnRef because MongoDB - // field references are not relationship-aware. Traversing relationship references is - // handled upstream. ComparisonTarget::Column { name, field_path, .. - } => { - let name_and_path = once(name.as_ref() as &str).chain( - field_path - .iter() - .flatten() - .map(|field_name| field_name.as_ref() as &str), - ); - // The None case won't come up if the input to [from_target_helper] has at least - // one element, and we know it does because we start the iterable with `name` - from_path(None, name_and_path).unwrap() - } - ComparisonTarget::ColumnInScope { - name, - field_path, - scope, - .. - } => { - // "$$ROOT" is not actually a valid match key, but cheating here makes the - // implementation much simpler. This match branch produces a ColumnRef::Expression - // in all cases. - let init = ColumnRef::variable(name_from_scope(scope)); - from_path( - Some(init), - once(name.as_ref() as &str).chain( - field_path - .iter() - .flatten() - .map(|field_name| field_name.as_ref() as &str), - ), - ) - // The None case won't come up if the input to [from_target_helper] has at least - // one element, and we know it does because we start the iterable with `name` - .unwrap() - } + } => from_column_and_field_path(name, field_path.as_ref()), } } +fn from_column_and_field_path<'a>( + name: &'a FieldName, + field_path: Option<&'a Vec>, +) -> ColumnRef<'a> { + let name_and_path = once(name.as_ref() as &str).chain( + field_path + .iter() + .copied() + .flatten() + .map(|field_name| field_name.as_ref() as &str), + ); + // The None case won't come up if the input to [from_target_helper] has at least + // one element, and we know it does because we start the iterable with `name` + from_path(None, name_and_path).unwrap() +} + fn from_order_by_target(target: &OrderByTarget) -> Result, MongoAgentError> { match target { OrderByTarget::Column { + path, name, field_path, - path, + .. } => { let name_and_path = path .iter() @@ -165,17 +162,9 @@ fn from_order_by_target(target: &OrderByTarget) -> Result, MongoAg // one element, and we know it does because we start the iterable with `name` Ok(from_path(None, name_and_path).unwrap()) } - OrderByTarget::SingleColumnAggregate { .. } => { + OrderByTarget::Aggregate { .. } => { // TODO: ENG-1011 - Err(MongoAgentError::NotImplemented( - "ordering by single column aggregate".into(), - )) - } - OrderByTarget::StarCountAggregate { .. } => { - // TODO: ENG-1010 - Err(MongoAgentError::NotImplemented( - "ordering by star count aggregate".into(), - )) + Err(MongoAgentError::NotImplemented("order by aggregate".into())) } } } @@ -232,7 +221,9 @@ fn fold_path_element<'a>( /// Unlike `column_ref` this expression cannot be used as a match query key - it can only be used /// as an expression. pub fn column_expression(column: &ComparisonTarget) -> Bson { - ColumnRef::from_comparison_target(column).into_aggregate_expression() + ColumnRef::from_comparison_target(column) + .into_aggregate_expression() + .into_bson() } #[cfg(test)] @@ -240,7 +231,6 @@ mod tests { use configuration::MongoScalarType; use mongodb::bson::doc; use mongodb_support::BsonScalarType; - use ndc_query_plan::Scope; use pretty_assertions::assert_eq; use crate::mongo_query_plan::{ComparisonTarget, Type}; @@ -251,9 +241,9 @@ mod tests { fn produces_match_query_key() -> anyhow::Result<()> { let target = ComparisonTarget::Column { name: "imdb".into(), + arguments: Default::default(), field_path: Some(vec!["rating".into()]), field_type: Type::Scalar(MongoScalarType::Bson(BsonScalarType::Double)), - path: Default::default(), }; let actual = ColumnRef::from_comparison_target(&target); let expected = ColumnRef::MatchKey("imdb.rating".into()); @@ -265,9 +255,9 @@ mod tests { fn escapes_nested_field_name_with_dots() -> anyhow::Result<()> { let target = ComparisonTarget::Column { name: "subtitles".into(), + arguments: Default::default(), field_path: Some(vec!["english.us".into()]), field_type: Type::Scalar(MongoScalarType::Bson(BsonScalarType::String)), - path: Default::default(), }; let actual = ColumnRef::from_comparison_target(&target); let expected = ColumnRef::Expression( @@ -287,9 +277,9 @@ mod tests { fn escapes_top_level_field_name_with_dots() -> anyhow::Result<()> { let target = ComparisonTarget::Column { name: "meta.subtitles".into(), + arguments: Default::default(), field_path: Some(vec!["english_us".into()]), field_type: Type::Scalar(MongoScalarType::Bson(BsonScalarType::String)), - path: Default::default(), }; let actual = ColumnRef::from_comparison_target(&target); let expected = ColumnRef::Expression( @@ -309,9 +299,9 @@ mod tests { fn escapes_multiple_unsafe_nested_field_names() -> anyhow::Result<()> { let target = ComparisonTarget::Column { name: "meta".into(), + arguments: Default::default(), field_path: Some(vec!["$unsafe".into(), "$also_unsafe".into()]), field_type: Type::Scalar(MongoScalarType::Bson(BsonScalarType::String)), - path: Default::default(), }; let actual = ColumnRef::from_comparison_target(&target); let expected = ColumnRef::Expression( @@ -336,9 +326,9 @@ mod tests { fn traverses_multiple_field_names_before_escaping() -> anyhow::Result<()> { let target = ComparisonTarget::Column { name: "valid_key".into(), + arguments: Default::default(), field_path: Some(vec!["also_valid".into(), "$not_valid".into()]), field_type: Type::Scalar(MongoScalarType::Bson(BsonScalarType::String)), - path: Default::default(), }; let actual = ColumnRef::from_comparison_target(&target); let expected = ColumnRef::Expression( @@ -354,117 +344,121 @@ mod tests { Ok(()) } - #[test] - fn produces_dot_separated_root_column_reference() -> anyhow::Result<()> { - let target = ComparisonTarget::ColumnInScope { - name: "field".into(), - field_path: Some(vec!["prop1".into(), "prop2".into()]), - field_type: Type::Scalar(MongoScalarType::Bson(BsonScalarType::String)), - scope: Scope::Root, - }; - let actual = ColumnRef::from_comparison_target(&target); - let expected = - ColumnRef::ExpressionStringShorthand("$$scope_root.field.prop1.prop2".into()); - assert_eq!(actual, expected); - Ok(()) - } - - #[test] - fn escapes_unsafe_field_name_in_root_column_reference() -> anyhow::Result<()> { - let target = ComparisonTarget::ColumnInScope { - name: "$field".into(), - field_path: Default::default(), - field_type: Type::Scalar(MongoScalarType::Bson(BsonScalarType::String)), - scope: Scope::Named("scope_0".into()), - }; - let actual = ColumnRef::from_comparison_target(&target); - let expected = ColumnRef::Expression( - doc! { - "$getField": { - "input": "$$scope_0", - "field": { "$literal": "$field" }, - } - } - .into(), - ); - assert_eq!(actual, expected); - Ok(()) - } - - #[test] - fn escapes_unsafe_nested_property_name_in_root_column_reference() -> anyhow::Result<()> { - let target = ComparisonTarget::ColumnInScope { - name: "field".into(), - field_path: Some(vec!["$unsafe_name".into()]), - field_type: Type::Scalar(MongoScalarType::Bson(BsonScalarType::String)), - scope: Scope::Root, - }; - let actual = ColumnRef::from_comparison_target(&target); - let expected = ColumnRef::Expression( - doc! { - "$getField": { - "input": "$$scope_root.field", - "field": { "$literal": "$unsafe_name" }, - } - } - .into(), - ); - assert_eq!(actual, expected); - Ok(()) - } - - #[test] - fn escapes_multiple_layers_of_nested_property_names_in_root_column_reference( - ) -> anyhow::Result<()> { - let target = ComparisonTarget::ColumnInScope { - name: "$field".into(), - field_path: Some(vec!["$unsafe_name1".into(), "$unsafe_name2".into()]), - field_type: Type::Scalar(MongoScalarType::Bson(BsonScalarType::String)), - scope: Scope::Root, - }; - let actual = ColumnRef::from_comparison_target(&target); - let expected = ColumnRef::Expression( - doc! { - "$getField": { - "input": { - "$getField": { - "input": { - "$getField": { - "input": "$$scope_root", - "field": { "$literal": "$field" }, - } - }, - "field": { "$literal": "$unsafe_name1" }, - } - }, - "field": { "$literal": "$unsafe_name2" }, - } - } - .into(), - ); - assert_eq!(actual, expected); - Ok(()) - } - - #[test] - fn escapes_unsafe_deeply_nested_property_name_in_root_column_reference() -> anyhow::Result<()> { - let target = ComparisonTarget::ColumnInScope { - name: "field".into(), - field_path: Some(vec!["prop1".into(), "$unsafe_name".into()]), - field_type: Type::Scalar(MongoScalarType::Bson(BsonScalarType::String)), - scope: Scope::Root, - }; - let actual = ColumnRef::from_comparison_target(&target); - let expected = ColumnRef::Expression( - doc! { - "$getField": { - "input": "$$scope_root.field.prop1", - "field": { "$literal": "$unsafe_name" }, - } - } - .into(), - ); - assert_eq!(actual, expected); - Ok(()) - } + // TODO: ENG-1487 `ComparisonTarget::ColumnInScope` is gone, but there is new, similar + // functionality in the form of named scopes. It will be useful to modify these tests when + // named scopes are supported in this connector. + + // #[test] + // fn produces_dot_separated_root_column_reference() -> anyhow::Result<()> { + // let target = ComparisonTarget::ColumnInScope { + // name: "field".into(), + // field_path: Some(vec!["prop1".into(), "prop2".into()]), + // field_type: Type::Scalar(MongoScalarType::Bson(BsonScalarType::String)), + // scope: Scope::Root, + // }; + // let actual = ColumnRef::from_comparison_target(&target); + // let expected = + // ColumnRef::ExpressionStringShorthand("$$scope_root.field.prop1.prop2".into()); + // assert_eq!(actual, expected); + // Ok(()) + // } + + // #[test] + // fn escapes_unsafe_field_name_in_root_column_reference() -> anyhow::Result<()> { + // let target = ComparisonTarget::ColumnInScope { + // name: "$field".into(), + // field_path: Default::default(), + // field_type: Type::Scalar(MongoScalarType::Bson(BsonScalarType::String)), + // scope: Scope::Named("scope_0".into()), + // }; + // let actual = ColumnRef::from_comparison_target(&target); + // let expected = ColumnRef::Expression( + // doc! { + // "$getField": { + // "input": "$$scope_0", + // "field": { "$literal": "$field" }, + // } + // } + // .into(), + // ); + // assert_eq!(actual, expected); + // Ok(()) + // } + + // #[test] + // fn escapes_unsafe_nested_property_name_in_root_column_reference() -> anyhow::Result<()> { + // let target = ComparisonTarget::ColumnInScope { + // name: "field".into(), + // field_path: Some(vec!["$unsafe_name".into()]), + // field_type: Type::Scalar(MongoScalarType::Bson(BsonScalarType::String)), + // scope: Scope::Root, + // }; + // let actual = ColumnRef::from_comparison_target(&target); + // let expected = ColumnRef::Expression( + // doc! { + // "$getField": { + // "input": "$$scope_root.field", + // "field": { "$literal": "$unsafe_name" }, + // } + // } + // .into(), + // ); + // assert_eq!(actual, expected); + // Ok(()) + // } + + // #[test] + // fn escapes_multiple_layers_of_nested_property_names_in_root_column_reference( + // ) -> anyhow::Result<()> { + // let target = ComparisonTarget::ColumnInScope { + // name: "$field".into(), + // field_path: Some(vec!["$unsafe_name1".into(), "$unsafe_name2".into()]), + // field_type: Type::Scalar(MongoScalarType::Bson(BsonScalarType::String)), + // scope: Scope::Root, + // }; + // let actual = ColumnRef::from_comparison_target(&target); + // let expected = ColumnRef::Expression( + // doc! { + // "$getField": { + // "input": { + // "$getField": { + // "input": { + // "$getField": { + // "input": "$$scope_root", + // "field": { "$literal": "$field" }, + // } + // }, + // "field": { "$literal": "$unsafe_name1" }, + // } + // }, + // "field": { "$literal": "$unsafe_name2" }, + // } + // } + // .into(), + // ); + // assert_eq!(actual, expected); + // Ok(()) + // } + + // #[test] + // fn escapes_unsafe_deeply_nested_property_name_in_root_column_reference() -> anyhow::Result<()> { + // let target = ComparisonTarget::ColumnInScope { + // name: "field".into(), + // field_path: Some(vec!["prop1".into(), "$unsafe_name".into()]), + // field_type: Type::Scalar(MongoScalarType::Bson(BsonScalarType::String)), + // scope: Scope::Root, + // }; + // let actual = ColumnRef::from_comparison_target(&target); + // let expected = ColumnRef::Expression( + // doc! { + // "$getField": { + // "input": "$$scope_root.field.prop1", + // "field": { "$literal": "$unsafe_name" }, + // } + // } + // .into(), + // ); + // assert_eq!(actual, expected); + // Ok(()) + // } } diff --git a/crates/mongodb-agent-common/src/query/make_selector/make_aggregation_expression.rs b/crates/mongodb-agent-common/src/query/make_selector/make_aggregation_expression.rs index 7ea14c76..4f17d6cd 100644 --- a/crates/mongodb-agent-common/src/query/make_selector/make_aggregation_expression.rs +++ b/crates/mongodb-agent-common/src/query/make_selector/make_aggregation_expression.rs @@ -1,5 +1,3 @@ -use std::iter::once; - use anyhow::anyhow; use itertools::Itertools as _; use mongodb::bson::{self, doc, Bson}; @@ -8,7 +6,9 @@ use ndc_models::UnaryComparisonOperator; use crate::{ comparison_function::ComparisonFunction, interface_types::MongoAgentError, - mongo_query_plan::{ComparisonTarget, ComparisonValue, ExistsInCollection, Expression, Type}, + mongo_query_plan::{ + ArrayComparison, ComparisonTarget, ComparisonValue, ExistsInCollection, Expression, Type, + }, query::{ column_ref::{column_expression, ColumnRef}, query_variable_name::query_variable_name, @@ -22,11 +22,21 @@ use super::Result; pub struct AggregationExpression(pub Bson); impl AggregationExpression { - fn into_bson(self) -> Bson { + pub fn new(expression: impl Into) -> Self { + Self(expression.into()) + } + + pub fn into_bson(self) -> Bson { self.0 } } +impl From for Bson { + fn from(value: AggregationExpression) -> Self { + value.into_bson() + } +} + pub fn make_aggregation_expression(expr: &Expression) -> Result { match expr { Expression::And { expressions } => { @@ -71,8 +81,11 @@ pub fn make_aggregation_expression(expr: &Expression) -> Result make_binary_comparison_selector(column, operator, value), + Expression::ArrayComparison { column, comparison } => { + make_array_comparison_selector(column, comparison) + } Expression::UnaryComparisonOperator { column, operator } => { - make_unary_comparison_selector(column, *operator) + Ok(make_unary_comparison_selector(column, *operator)) } } } @@ -118,7 +131,7 @@ pub fn make_aggregation_expression_for_exists( }, Some(predicate), ) => { - let column_ref = ColumnRef::from_field_path(field_path.iter().chain(once(column_name))); + let column_ref = ColumnRef::from_column_and_field_path(column_name, Some(field_path)); exists_in_array(column_ref, predicate)? } ( @@ -129,7 +142,29 @@ pub fn make_aggregation_expression_for_exists( }, None, ) => { - let column_ref = ColumnRef::from_field_path(field_path.iter().chain(once(column_name))); + let column_ref = ColumnRef::from_column_and_field_path(column_name, Some(field_path)); + exists_in_array_no_predicate(column_ref) + } + ( + ExistsInCollection::NestedScalarCollection { + column_name, + field_path, + .. + }, + Some(predicate), + ) => { + let column_ref = ColumnRef::from_column_and_field_path(column_name, Some(field_path)); + exists_in_array(column_ref, predicate)? // TODO: ENG-1488 predicate expects objects with a __value field + } + ( + ExistsInCollection::NestedScalarCollection { + column_name, + field_path, + .. + }, + None, + ) => { + let column_ref = ColumnRef::from_column_and_field_path(column_name, Some(field_path)); exists_in_array_no_predicate(column_ref) } }; @@ -146,7 +181,7 @@ fn exists_in_array( "$anyElementTrue": { "$map": { "input": array_ref.into_aggregate_expression(), - "as": "CURRENT", // implicitly changes the document root in `exp` to be the array element + "as": "CURRENT", // implicitly changes the document root in `sub_expression` to be the array element "in": sub_expression, } } @@ -156,14 +191,9 @@ fn exists_in_array( } fn exists_in_array_no_predicate(array_ref: ColumnRef<'_>) -> AggregationExpression { - let index_zero = "0".into(); - let first_element_ref = array_ref.into_nested_field(&index_zero); - AggregationExpression( - doc! { - "$ne": [first_element_ref.into_aggregate_expression(), null] - } - .into(), - ) + AggregationExpression::new(doc! { + "$gt": [{ "$size": array_ref.into_aggregate_expression() }, 0] + }) } fn make_binary_comparison_selector( @@ -171,102 +201,78 @@ fn make_binary_comparison_selector( operator: &ComparisonFunction, value: &ComparisonValue, ) -> Result { - let aggregation_expression = match value { + let left_operand = ColumnRef::from_comparison_target(target_column).into_aggregate_expression(); + let right_operand = value_expression(value)?; + let expr = AggregationExpression( + operator + .mongodb_aggregation_expression(left_operand, right_operand) + .into(), + ); + Ok(expr) +} + +fn make_unary_comparison_selector( + target_column: &ndc_query_plan::ComparisonTarget, + operator: UnaryComparisonOperator, +) -> AggregationExpression { + match operator { + UnaryComparisonOperator::IsNull => AggregationExpression( + doc! { + "$eq": [column_expression(target_column), null] + } + .into(), + ), + } +} + +fn make_array_comparison_selector( + column: &ComparisonTarget, + comparison: &ArrayComparison, +) -> Result { + let doc = match comparison { + ArrayComparison::Contains { value } => doc! { + "$in": [value_expression(value)?, column_expression(column)] + }, + ArrayComparison::IsEmpty => doc! { + "$eq": [{ "$size": column_expression(column) }, 0] + }, + }; + Ok(AggregationExpression(doc.into())) +} + +fn value_expression(value: &ComparisonValue) -> Result { + match value { ComparisonValue::Column { - column: value_column, + path, + name, + field_path, + scope: _, // We'll need to reference scope for ENG-1153 + .. } => { // TODO: ENG-1153 Do we want an implicit exists in the value relationship? If both // target and value reference relationships do we want an exists in a Cartesian product // of the two? - if !value_column.relationship_path().is_empty() { + if !path.is_empty() { return Err(MongoAgentError::NotImplemented("binary comparisons where the right-side of the comparison references a relationship".into())); } - let left_operand = ColumnRef::from_comparison_target(target_column); - let right_operand = ColumnRef::from_comparison_target(value_column); - AggregationExpression( - operator - .mongodb_aggregation_expression( - left_operand.into_aggregate_expression(), - right_operand.into_aggregate_expression(), - ) - .into(), - ) + let value_ref = ColumnRef::from_column_and_field_path(name, field_path.as_ref()); + Ok(value_ref.into_aggregate_expression()) } ComparisonValue::Scalar { value, value_type } => { let comparison_value = bson_from_scalar_value(value, value_type)?; - - // Special case for array-to-scalar comparisons - this is required because implicit - // existential quantification over arrays for scalar comparisons does not work in - // aggregation expressions. - let expression_doc = if target_column.get_field_type().is_array() - && !value_type.is_array() - { - doc! { - "$reduce": { - "input": column_expression(target_column), - "initialValue": false, - "in": operator.mongodb_aggregation_expression("$$this", comparison_value) - }, - } - } else { - operator.mongodb_aggregation_expression( - column_expression(target_column), - comparison_value, - ) - }; - AggregationExpression(expression_doc.into()) + Ok(AggregationExpression::new(doc! { + "$literal": comparison_value + })) } ComparisonValue::Variable { name, variable_type, } => { let comparison_value = variable_to_mongo_expression(name, variable_type); - let expression_doc = - // Special case for array-to-scalar comparisons - this is required because implicit - // existential quantification over arrays for scalar comparisons does not work in - // aggregation expressions. - if target_column.get_field_type().is_array() && !variable_type.is_array() { - doc! { - "$reduce": { - "input": column_expression(target_column), - "initialValue": false, - "in": operator.mongodb_aggregation_expression("$$this", comparison_value.into_aggregate_expression()) - }, - } - } else { - operator.mongodb_aggregation_expression( - column_expression(target_column), - comparison_value.into_aggregate_expression() - ) - }; - AggregationExpression(expression_doc.into()) + Ok(comparison_value.into_aggregate_expression()) } - }; - - let implicit_exists_over_relationship = - traverse_relationship_path(target_column.relationship_path(), aggregation_expression); - - Ok(implicit_exists_over_relationship) -} - -fn make_unary_comparison_selector( - target_column: &ndc_query_plan::ComparisonTarget, - operator: UnaryComparisonOperator, -) -> std::result::Result { - let aggregation_expression = match operator { - UnaryComparisonOperator::IsNull => AggregationExpression( - doc! { - "$eq": [column_expression(target_column), null] - } - .into(), - ), - }; - - let implicit_exists_over_relationship = - traverse_relationship_path(target_column.relationship_path(), aggregation_expression); - - Ok(implicit_exists_over_relationship) + } } /// Convert a JSON Value into BSON using the provided type information. @@ -275,26 +281,6 @@ fn bson_from_scalar_value(value: &serde_json::Value, value_type: &Type) -> Resul json_to_bson(value_type, value.clone()).map_err(|e| MongoAgentError::BadQuery(anyhow!(e))) } -fn traverse_relationship_path( - relationship_path: &[ndc_models::RelationshipName], - AggregationExpression(mut expression): AggregationExpression, -) -> AggregationExpression { - for path_element in relationship_path.iter().rev() { - let path_element_ref = ColumnRef::from_relationship(path_element); - expression = doc! { - "$anyElementTrue": { - "$map": { - "input": path_element_ref.into_aggregate_expression(), - "as": "CURRENT", // implicitly changes the document root in `exp` to be the array element - "in": expression, - } - } - } - .into() - } - AggregationExpression(expression) -} - fn variable_to_mongo_expression( variable: &ndc_models::VariableName, value_type: &Type, diff --git a/crates/mongodb-agent-common/src/query/make_selector/make_query_document.rs b/crates/mongodb-agent-common/src/query/make_selector/make_query_document.rs index 916c586f..df766662 100644 --- a/crates/mongodb-agent-common/src/query/make_selector/make_query_document.rs +++ b/crates/mongodb-agent-common/src/query/make_selector/make_query_document.rs @@ -1,14 +1,14 @@ -use std::iter::once; - use anyhow::anyhow; use itertools::Itertools as _; -use mongodb::bson::{self, doc}; +use mongodb::bson::{self, doc, Bson}; use ndc_models::UnaryComparisonOperator; use crate::{ comparison_function::ComparisonFunction, interface_types::MongoAgentError, - mongo_query_plan::{ComparisonTarget, ComparisonValue, ExistsInCollection, Expression, Type}, + mongo_query_plan::{ + ArrayComparison, ComparisonTarget, ComparisonValue, ExistsInCollection, Expression, Type, + }, query::{column_ref::ColumnRef, serialization::json_to_bson}, }; @@ -73,6 +73,9 @@ pub fn make_query_document(expr: &Expression) -> Result> { Expression::UnaryComparisonOperator { column, operator } => { make_unary_comparison_selector(column, operator) } + Expression::ArrayComparison { column, comparison } => { + make_array_comparison_selector(column, comparison) + } } } @@ -102,7 +105,7 @@ fn make_query_document_for_exists( }, Some(predicate), ) => { - let column_ref = ColumnRef::from_field_path(field_path.iter().chain(once(column_name))); + let column_ref = ColumnRef::from_column_and_field_path(column_name, Some(field_path)); exists_in_array(column_ref, predicate)? } ( @@ -113,7 +116,29 @@ fn make_query_document_for_exists( }, None, ) => { - let column_ref = ColumnRef::from_field_path(field_path.iter().chain(once(column_name))); + let column_ref = ColumnRef::from_column_and_field_path(column_name, Some(field_path)); + exists_in_array_no_predicate(column_ref) + } + ( + ExistsInCollection::NestedScalarCollection { + column_name, + field_path, + .. + }, + Some(predicate), + ) => { + let column_ref = ColumnRef::from_column_and_field_path(column_name, Some(field_path)); + exists_in_array(column_ref, predicate)? // TODO: predicate expects objects with a __value field + } + ( + ExistsInCollection::NestedScalarCollection { + column_name, + field_path, + .. + }, + None, + ) => { + let column_ref = ColumnRef::from_column_and_field_path(column_name, Some(field_path)); exists_in_array_no_predicate(column_ref) } }; @@ -151,25 +176,16 @@ fn make_binary_comparison_selector( operator: &ComparisonFunction, value: &ComparisonValue, ) -> Result> { - let query_doc = match value { - ComparisonValue::Scalar { value, value_type } => { - let comparison_value = bson_from_scalar_value(value, value_type)?; + let selector = + value_expression(value)?.and_then(|value| { match ColumnRef::from_comparison_target(target_column) { - ColumnRef::MatchKey(key) => Some(QueryDocument( - operator.mongodb_match_query(key, comparison_value), - )), + ColumnRef::MatchKey(key) => { + Some(QueryDocument(operator.mongodb_match_query(key, value))) + } _ => None, } - } - ComparisonValue::Column { .. } => None, - // Variables cannot be referenced in match documents - ComparisonValue::Variable { .. } => None, - }; - - let implicit_exists_over_relationship = - query_doc.and_then(|d| traverse_relationship_path(target_column.relationship_path(), d)); - - Ok(implicit_exists_over_relationship) + }); + Ok(selector) } fn make_unary_comparison_selector( @@ -184,35 +200,43 @@ fn make_unary_comparison_selector( _ => None, }, }; + Ok(query_doc) +} - let implicit_exists_over_relationship = - query_doc.and_then(|d| traverse_relationship_path(target_column.relationship_path(), d)); - - Ok(implicit_exists_over_relationship) +fn make_array_comparison_selector( + column: &ComparisonTarget, + comparison: &ArrayComparison, +) -> Result> { + let column_ref = ColumnRef::from_comparison_target(column); + let ColumnRef::MatchKey(key) = column_ref else { + return Ok(None); + }; + let doc = match comparison { + ArrayComparison::Contains { value } => value_expression(value)?.map(|value| { + doc! { + key: { "$elemMatch": { "$eq": value } } + } + }), + ArrayComparison::IsEmpty => Some(doc! { + key: { "$size": 0 } + }), + }; + Ok(doc.map(QueryDocument)) } -/// For simple cases the target of an expression is a field reference. But if the target is -/// a column of a related collection then we're implicitly making an array comparison (because -/// related documents always come as an array, even for object relationships), so we have to wrap -/// the starting expression with an `$elemMatch` for each relationship that is traversed to reach -/// the target column. -fn traverse_relationship_path( - path: &[ndc_models::RelationshipName], - QueryDocument(expression): QueryDocument, -) -> Option { - let mut expression = Some(expression); - for path_element in path.iter().rev() { - let path_element_ref = ColumnRef::from_relationship(path_element); - expression = expression.and_then(|expr| match path_element_ref { - ColumnRef::MatchKey(key) => Some(doc! { - key: { - "$elemMatch": expr - } - }), - _ => None, - }); - } - expression.map(QueryDocument) +/// Only scalar comparison values can be represented in query documents. This function returns such +/// a representation if there is a legal way to do so. +fn value_expression(value: &ComparisonValue) -> Result> { + let expression = match value { + ComparisonValue::Scalar { value, value_type } => { + let bson_value = bson_from_scalar_value(value, value_type)?; + Some(bson_value) + } + ComparisonValue::Column { .. } => None, + // Variables cannot be referenced in match documents + ComparisonValue::Variable { .. } => None, + }; + Ok(expression) } /// Convert a JSON Value into BSON using the provided type information. diff --git a/crates/mongodb-agent-common/src/query/make_selector/mod.rs b/crates/mongodb-agent-common/src/query/make_selector/mod.rs index 2f28b1d0..4dcf9d00 100644 --- a/crates/mongodb-agent-common/src/query/make_selector/mod.rs +++ b/crates/mongodb-agent-common/src/query/make_selector/mod.rs @@ -32,14 +32,9 @@ pub fn make_selector(expr: &Expression) -> Result { #[cfg(test)] mod tests { use configuration::MongoScalarType; - use mongodb::bson::{self, bson, doc}; + use mongodb::bson::doc; use mongodb_support::BsonScalarType; use ndc_models::UnaryComparisonOperator; - use ndc_query_plan::{plan_for_query_request, Scope}; - use ndc_test_helpers::{ - binop, column_value, path_element, query, query_request, relation_field, root, target, - value, - }; use pretty_assertions::assert_eq; use crate::{ @@ -47,8 +42,6 @@ mod tests { mongo_query_plan::{ ComparisonTarget, ComparisonValue, ExistsInCollection, Expression, Type, }, - query::pipeline_for_query_request, - test_helpers::{chinook_config, chinook_relationships}, }; use super::make_selector; @@ -56,18 +49,26 @@ mod tests { #[test] fn compares_fields_of_related_documents_using_elem_match_in_binary_comparison( ) -> anyhow::Result<()> { - let selector = make_selector(&Expression::BinaryComparisonOperator { - column: ComparisonTarget::Column { - name: "Name".into(), - field_path: None, - field_type: Type::Scalar(MongoScalarType::Bson(BsonScalarType::String)), - path: vec!["Albums".into(), "Tracks".into()], - }, - operator: ComparisonFunction::Equal, - value: ComparisonValue::Scalar { - value: "Helter Skelter".into(), - value_type: Type::Scalar(MongoScalarType::Bson(BsonScalarType::String)), + let selector = make_selector(&Expression::Exists { + in_collection: ExistsInCollection::Related { + relationship: "Albums".into(), }, + predicate: Some(Box::new(Expression::Exists { + in_collection: ExistsInCollection::Related { + relationship: "Tracks".into(), + }, + predicate: Some(Box::new(Expression::BinaryComparisonOperator { + column: ComparisonTarget::column( + "Name", + Type::Scalar(MongoScalarType::Bson(BsonScalarType::String)), + ), + operator: ComparisonFunction::Equal, + value: ComparisonValue::Scalar { + value: "Helter Skelter".into(), + value_type: Type::Scalar(MongoScalarType::Bson(BsonScalarType::String)), + }, + })), + })), })?; let expected = doc! { @@ -89,14 +90,22 @@ mod tests { #[test] fn compares_fields_of_related_documents_using_elem_match_in_unary_comparison( ) -> anyhow::Result<()> { - let selector = make_selector(&Expression::UnaryComparisonOperator { - column: ComparisonTarget::Column { - name: "Name".into(), - field_path: None, - field_type: Type::Scalar(MongoScalarType::Bson(BsonScalarType::String)), - path: vec!["Albums".into(), "Tracks".into()], + let selector = make_selector(&Expression::Exists { + in_collection: ExistsInCollection::Related { + relationship: "Albums".into(), }, - operator: UnaryComparisonOperator::IsNull, + predicate: Some(Box::new(Expression::Exists { + in_collection: ExistsInCollection::Related { + relationship: "Tracks".into(), + }, + predicate: Some(Box::new(Expression::UnaryComparisonOperator { + column: ComparisonTarget::column( + "Name", + Type::Scalar(MongoScalarType::Bson(BsonScalarType::String)), + ), + operator: UnaryComparisonOperator::IsNull, + })), + })), })?; let expected = doc! { @@ -118,21 +127,15 @@ mod tests { #[test] fn compares_two_columns() -> anyhow::Result<()> { let selector = make_selector(&Expression::BinaryComparisonOperator { - column: ComparisonTarget::Column { - name: "Name".into(), - field_path: None, - field_type: Type::Scalar(MongoScalarType::Bson(BsonScalarType::String)), - path: Default::default(), - }, + column: ComparisonTarget::column( + "Name", + Type::Scalar(MongoScalarType::Bson(BsonScalarType::String)), + ), operator: ComparisonFunction::Equal, - value: ComparisonValue::Column { - column: ComparisonTarget::Column { - name: "Title".into(), - field_path: None, - field_type: Type::Scalar(MongoScalarType::Bson(BsonScalarType::String)), - path: Default::default(), - }, - }, + value: ComparisonValue::column( + "Title", + Type::Scalar(MongoScalarType::Bson(BsonScalarType::String)), + ), })?; let expected = doc! { @@ -145,119 +148,120 @@ mod tests { Ok(()) } - #[test] - fn compares_root_collection_column_to_scalar() -> anyhow::Result<()> { - let selector = make_selector(&Expression::BinaryComparisonOperator { - column: ComparisonTarget::ColumnInScope { - name: "Name".into(), - field_path: None, - field_type: Type::Scalar(MongoScalarType::Bson(BsonScalarType::String)), - scope: Scope::Named("scope_0".to_string()), - }, - operator: ComparisonFunction::Equal, - value: ComparisonValue::Scalar { - value: "Lady Gaga".into(), - value_type: Type::Scalar(MongoScalarType::Bson(BsonScalarType::String)), - }, - })?; + // TODO: ENG-1487 modify this test for the new named scopes feature + // #[test] + // fn compares_root_collection_column_to_scalar() -> anyhow::Result<()> { + // let selector = make_selector(&Expression::BinaryComparisonOperator { + // column: ComparisonTarget::ColumnInScope { + // name: "Name".into(), + // field_path: None, + // field_type: Type::Scalar(MongoScalarType::Bson(BsonScalarType::String)), + // scope: Scope::Named("scope_0".to_string()), + // }, + // operator: ComparisonFunction::Equal, + // value: ComparisonValue::Scalar { + // value: "Lady Gaga".into(), + // value_type: Type::Scalar(MongoScalarType::Bson(BsonScalarType::String)), + // }, + // })?; + // + // let expected = doc! { + // "$expr": { + // "$eq": ["$$scope_0.Name", "Lady Gaga"] + // } + // }; + // + // assert_eq!(selector, expected); + // Ok(()) + // } - let expected = doc! { - "$expr": { - "$eq": ["$$scope_0.Name", "Lady Gaga"] - } - }; - - assert_eq!(selector, expected); - Ok(()) - } - - #[test] - fn root_column_reference_refereces_column_of_nearest_query() -> anyhow::Result<()> { - let request = query_request() - .collection("Artist") - .query( - query().fields([relation_field!("Albums" => "Albums", query().predicate( - binop( - "_gt", - target!("Milliseconds", relations: [ - path_element("Tracks".into()).predicate( - binop("_eq", target!("Name"), column_value!(root("Title"))) - ), - ]), - value!(30_000), - ) - ))]), - ) - .relationships(chinook_relationships()) - .into(); - - let config = chinook_config(); - let plan = plan_for_query_request(&config, request)?; - let pipeline = pipeline_for_query_request(&config, &plan)?; - - let expected_pipeline = bson!([ - { - "$lookup": { - "from": "Album", - "localField": "ArtistId", - "foreignField": "ArtistId", - "as": "Albums", - "let": { - "scope_root": "$$ROOT", - }, - "pipeline": [ - { - "$lookup": { - "from": "Track", - "localField": "AlbumId", - "foreignField": "AlbumId", - "as": "Tracks", - "let": { - "scope_0": "$$ROOT", - }, - "pipeline": [ - { - "$match": { - "$expr": { "$eq": ["$Name", "$$scope_0.Title"] }, - }, - }, - { - "$replaceWith": { - "Milliseconds": { "$ifNull": ["$Milliseconds", null] } - } - }, - ] - } - }, - { - "$match": { - "Tracks": { - "$elemMatch": { - "Milliseconds": { "$gt": 30_000 } - } - } - } - }, - { - "$replaceWith": { - "Tracks": { "$getField": { "$literal": "Tracks" } } - } - }, - ], - }, - }, - { - "$replaceWith": { - "Albums": { - "rows": [] - } - } - }, - ]); - - assert_eq!(bson::to_bson(&pipeline).unwrap(), expected_pipeline); - Ok(()) - } + // #[test] + // fn root_column_reference_refereces_column_of_nearest_query() -> anyhow::Result<()> { + // let request = query_request() + // .collection("Artist") + // .query( + // query().fields([relation_field!("Albums" => "Albums", query().predicate( + // binop( + // "_gt", + // target!("Milliseconds", relations: [ + // path_element("Tracks".into()).predicate( + // binop("_eq", target!("Name"), column_value!(root("Title"))) + // ), + // ]), + // value!(30_000), + // ) + // ))]), + // ) + // .relationships(chinook_relationships()) + // .into(); + // + // let config = chinook_config(); + // let plan = plan_for_query_request(&config, request)?; + // let pipeline = pipeline_for_query_request(&config, &plan)?; + // + // let expected_pipeline = bson!([ + // { + // "$lookup": { + // "from": "Album", + // "localField": "ArtistId", + // "foreignField": "ArtistId", + // "as": "Albums", + // "let": { + // "scope_root": "$$ROOT", + // }, + // "pipeline": [ + // { + // "$lookup": { + // "from": "Track", + // "localField": "AlbumId", + // "foreignField": "AlbumId", + // "as": "Tracks", + // "let": { + // "scope_0": "$$ROOT", + // }, + // "pipeline": [ + // { + // "$match": { + // "$expr": { "$eq": ["$Name", "$$scope_0.Title"] }, + // }, + // }, + // { + // "$replaceWith": { + // "Milliseconds": { "$ifNull": ["$Milliseconds", null] } + // } + // }, + // ] + // } + // }, + // { + // "$match": { + // "Tracks": { + // "$elemMatch": { + // "Milliseconds": { "$gt": 30_000 } + // } + // } + // } + // }, + // { + // "$replaceWith": { + // "Tracks": { "$getField": { "$literal": "Tracks" } } + // } + // }, + // ], + // }, + // }, + // { + // "$replaceWith": { + // "Albums": { + // "rows": [] + // } + // } + // }, + // ]); + // + // assert_eq!(bson::to_bson(&pipeline).unwrap(), expected_pipeline); + // Ok(()) + // } #[test] fn compares_value_to_elements_of_array_field() -> anyhow::Result<()> { @@ -268,12 +272,10 @@ mod tests { field_path: Default::default(), }, predicate: Some(Box::new(Expression::BinaryComparisonOperator { - column: ComparisonTarget::Column { - name: "last_name".into(), - field_type: Type::Scalar(MongoScalarType::Bson(BsonScalarType::String)), - field_path: Default::default(), - path: Default::default(), - }, + column: ComparisonTarget::column( + "last_name", + Type::Scalar(MongoScalarType::Bson(BsonScalarType::String)), + ), operator: ComparisonFunction::Equal, value: ComparisonValue::Scalar { value: "Hughes".into(), @@ -303,12 +305,10 @@ mod tests { field_path: vec!["site_info".into()], }, predicate: Some(Box::new(Expression::BinaryComparisonOperator { - column: ComparisonTarget::Column { - name: "last_name".into(), - field_type: Type::Scalar(MongoScalarType::Bson(BsonScalarType::String)), - field_path: Default::default(), - path: Default::default(), - }, + column: ComparisonTarget::column( + "last_name", + Type::Scalar(MongoScalarType::Bson(BsonScalarType::String)), + ), operator: ComparisonFunction::Equal, value: ComparisonValue::Scalar { value: "Hughes".into(), @@ -318,7 +318,7 @@ mod tests { })?; let expected = doc! { - "site_info.staff": { + "staff.site_info": { "$elemMatch": { "last_name": { "$eq": "Hughes" } } diff --git a/crates/mongodb-agent-common/src/query/make_sort.rs b/crates/mongodb-agent-common/src/query/make_sort.rs index 7adad5a8..5046ea6b 100644 --- a/crates/mongodb-agent-common/src/query/make_sort.rs +++ b/crates/mongodb-agent-common/src/query/make_sort.rs @@ -27,7 +27,7 @@ pub fn make_sort_stages(order_by: &OrderBy) -> Result> { if !required_aliases.is_empty() { let fields = required_aliases .into_iter() - .map(|(alias, expression)| (alias, expression.into_aggregate_expression())) + .map(|(alias, expression)| (alias, expression.into_aggregate_expression().into_bson())) .collect(); let stage = Stage::AddFields(fields); stages.push(stage); @@ -80,6 +80,7 @@ fn safe_alias(target: &OrderByTarget) -> Result { name, field_path, path, + .. } => { let name_and_path = once("__sort_key_") .chain(path.iter().map(|n| n.as_str())) @@ -95,17 +96,9 @@ fn safe_alias(target: &OrderByTarget) -> Result { &combine_all_elements_into_one_name, )) } - ndc_query_plan::OrderByTarget::SingleColumnAggregate { .. } => { - // TODO: ENG-1011 - Err(MongoAgentError::NotImplemented( - "ordering by single column aggregate".into(), - )) - } - ndc_query_plan::OrderByTarget::StarCountAggregate { .. } => { - // TODO: ENG-1010 - Err(MongoAgentError::NotImplemented( - "ordering by star count aggregate".into(), - )) + ndc_query_plan::OrderByTarget::Aggregate { .. } => { + // TODO: ENG-1010, ENG-1011 + Err(MongoAgentError::NotImplemented("order by aggregate".into())) } } } @@ -116,6 +109,7 @@ mod tests { use mongodb_support::aggregate::SortDocument; use ndc_models::{FieldName, OrderDirection}; use ndc_query_plan::OrderByElement; + use nonempty::{nonempty, NonEmpty}; use pretty_assertions::assert_eq; use crate::{mongo_query_plan::OrderBy, query::column_ref::ColumnRef}; @@ -131,10 +125,11 @@ mod tests { name: "$schema".into(), field_path: Default::default(), path: Default::default(), + arguments: Default::default(), }, }], }; - let path: [FieldName; 1] = ["$schema".into()]; + let path: NonEmpty = NonEmpty::singleton("$schema".into()); let actual = make_sort(&order_by)?; let expected_sort_doc = SortDocument(doc! { @@ -142,7 +137,7 @@ mod tests { }); let expected_aliases = [( "__sort_key__·24schema".into(), - ColumnRef::from_field_path(path.iter()), + ColumnRef::from_field_path(path.as_ref()), )] .into(); assert_eq!(actual, (expected_sort_doc, expected_aliases)); @@ -158,10 +153,11 @@ mod tests { name: "configuration".into(), field_path: Some(vec!["$schema".into()]), path: Default::default(), + arguments: Default::default(), }, }], }; - let path: [FieldName; 2] = ["configuration".into(), "$schema".into()]; + let path: NonEmpty = nonempty!["configuration".into(), "$schema".into()]; let actual = make_sort(&order_by)?; let expected_sort_doc = SortDocument(doc! { @@ -169,7 +165,7 @@ mod tests { }); let expected_aliases = [( "__sort_key__configuration_·24schema".into(), - ColumnRef::from_field_path(path.iter()), + ColumnRef::from_field_path(path.as_ref()), )] .into(); assert_eq!(actual, (expected_sort_doc, expected_aliases)); diff --git a/crates/mongodb-agent-common/src/query/mod.rs b/crates/mongodb-agent-common/src/query/mod.rs index d6094ca6..8d5b5372 100644 --- a/crates/mongodb-agent-common/src/query/mod.rs +++ b/crates/mongodb-agent-common/src/query/mod.rs @@ -287,6 +287,7 @@ mod tests { let expected_response = QueryResponse(vec![RowSet { aggregates: None, rows: Some(vec![]), + groups: Default::default(), }]); let db = mock_collection_aggregate_response("comments", bson!([])); diff --git a/crates/mongodb-agent-common/src/query/pipeline.rs b/crates/mongodb-agent-common/src/query/pipeline.rs index f89d2c8f..6174de15 100644 --- a/crates/mongodb-agent-common/src/query/pipeline.rs +++ b/crates/mongodb-agent-common/src/query/pipeline.rs @@ -252,9 +252,9 @@ fn pipeline_for_aggregate( fn mk_target_field(name: FieldName, field_path: Option>) -> ComparisonTarget { ComparisonTarget::Column { name, + arguments: Default::default(), field_path, field_type: Type::Scalar(MongoScalarType::ExtendedJSON), // type does not matter here - path: Default::default(), } } @@ -278,6 +278,7 @@ fn pipeline_for_aggregate( column, field_path, distinct, + .. } if distinct => { let target_field = mk_target_field(column, field_path); Pipeline::from_iter( @@ -286,7 +287,8 @@ fn pipeline_for_aggregate( limit.map(Into::into).map(Stage::Limit), Some(Stage::Group { key_expression: ColumnRef::from_comparison_target(&target_field) - .into_aggregate_expression(), + .into_aggregate_expression() + .into_bson(), accumulators: [].into(), }), Some(Stage::Count(RESULT_FIELD.to_string())), @@ -296,10 +298,12 @@ fn pipeline_for_aggregate( ) } + // TODO: ENG-1465 count by distinct Aggregate::ColumnCount { column, field_path, distinct: _, + .. } => Pipeline::from_iter( [ Some(filter_to_documents_with_value(mk_target_field( @@ -316,18 +320,19 @@ fn pipeline_for_aggregate( column, field_path, function, - result_type: _, + .. } => { use AggregationFunction::*; let target_field = ComparisonTarget::Column { name: column.clone(), - field_path, + arguments: Default::default(), + field_path: field_path.clone(), field_type: Type::Scalar(MongoScalarType::Bson(BsonScalarType::Null)), // type does not matter here - path: Default::default(), }; - let field_ref = - ColumnRef::from_comparison_target(&target_field).into_aggregate_expression(); + let field_ref = ColumnRef::from_column_and_field_path(&column, field_path.as_ref()) + .into_aggregate_expression() + .into_bson(); let accumulator = match function { Avg => Accumulator::Avg(field_ref), diff --git a/crates/mongodb-agent-common/src/query/query_variable_name.rs b/crates/mongodb-agent-common/src/query/query_variable_name.rs index bacaccbe..ee910b34 100644 --- a/crates/mongodb-agent-common/src/query/query_variable_name.rs +++ b/crates/mongodb-agent-common/src/query/query_variable_name.rs @@ -34,7 +34,7 @@ fn type_name(input_type: &Type) -> Cow<'static, str> { fn object_type_name(obj: &ObjectType) -> String { let mut output = "{".to_string(); for (key, t) in &obj.fields { - output.push_str(&format!("{key}:{}", type_name(t))); + output.push_str(&format!("{key}:{}", type_name(&t.r#type))); } output.push('}'); output diff --git a/crates/mongodb-agent-common/src/query/relations.rs b/crates/mongodb-agent-common/src/query/relations.rs index 44efcc6f..fb24809f 100644 --- a/crates/mongodb-agent-common/src/query/relations.rs +++ b/crates/mongodb-agent-common/src/query/relations.rs @@ -4,6 +4,7 @@ use itertools::Itertools as _; use mongodb::bson::{doc, Document}; use mongodb_support::aggregate::{Pipeline, Stage}; use ndc_query_plan::Scope; +use nonempty::NonEmpty; use crate::mongo_query_plan::{MongoConfiguration, Query, QueryPlan}; use crate::query::column_ref::name_from_scope; @@ -59,7 +60,7 @@ pub fn pipeline_for_relations( fn make_lookup_stage( from: ndc_models::CollectionName, - column_mapping: &BTreeMap, + column_mapping: &BTreeMap>, r#as: ndc_models::RelationshipName, lookup_pipeline: Pipeline, scope: Option<&Scope>, @@ -67,41 +68,29 @@ fn make_lookup_stage( // If there is a single column mapping, and the source and target field references can be // expressed as match keys (we don't need to escape field names), then we can use a concise // correlated subquery. Otherwise we need to fall back to an uncorrelated subquery. - let safe_single_column_mapping = if column_mapping.len() == 1 { - // Safe to unwrap because we just checked the hashmap size - let (source_selector, target_selector) = column_mapping.iter().next().unwrap(); - - let source_ref = ColumnRef::from_field(source_selector); - let target_ref = ColumnRef::from_field(target_selector); - - match (source_ref, target_ref) { - (ColumnRef::MatchKey(source_key), ColumnRef::MatchKey(target_key)) => { - Some((source_key.to_string(), target_key.to_string())) - } - - // If the source and target refs cannot be expressed in required syntax then we need to - // fall back to a lookup pipeline that con compare arbitrary expressions. - // [multiple_column_mapping_lookup] does this. - _ => None, - } + let single_mapping = if column_mapping.len() == 1 { + column_mapping.iter().next() } else { None }; - - match safe_single_column_mapping { - Some((source_selector_key, target_selector_key)) => { - lookup_with_concise_correlated_subquery( - from, - source_selector_key, - target_selector_key, - r#as, - lookup_pipeline, - scope, - ) - } - None => { - lookup_with_uncorrelated_subquery(from, column_mapping, r#as, lookup_pipeline, scope) - } + let source_selector = single_mapping.map(|(field_name, _)| field_name); + let target_selector = single_mapping.map(|(_, target_path)| target_path); + + let source_key = source_selector.and_then(|f| ColumnRef::from_field(f).into_match_key()); + let target_key = + target_selector.and_then(|path| ColumnRef::from_field_path(path.as_ref()).into_match_key()); + + match (source_key, target_key) { + (Some(source_key), Some(target_key)) => lookup_with_concise_correlated_subquery( + from, + source_key.into_owned(), + target_key.into_owned(), + r#as, + lookup_pipeline, + scope, + ), + + _ => lookup_with_uncorrelated_subquery(from, column_mapping, r#as, lookup_pipeline, scope), } } @@ -138,7 +127,7 @@ fn lookup_with_concise_correlated_subquery( /// cases like joining on field names that require escaping. fn lookup_with_uncorrelated_subquery( from: ndc_models::CollectionName, - column_mapping: &BTreeMap, + column_mapping: &BTreeMap>, r#as: ndc_models::RelationshipName, lookup_pipeline: Pipeline, scope: Option<&Scope>, @@ -148,7 +137,9 @@ fn lookup_with_uncorrelated_subquery( .map(|local_field| { ( variable(local_field.as_str()), - ColumnRef::from_field(local_field).into_aggregate_expression(), + ColumnRef::from_field(local_field) + .into_aggregate_expression() + .into_bson(), ) }) .collect(); @@ -160,16 +151,16 @@ fn lookup_with_uncorrelated_subquery( // Creating an intermediate Vec and sorting it is done just to help with testing. // A stable order for matchers makes it easier to assert equality between actual // and expected pipelines. - let mut column_pairs: Vec<(&ndc_models::FieldName, &ndc_models::FieldName)> = + let mut column_pairs: Vec<(&ndc_models::FieldName, &NonEmpty)> = column_mapping.iter().collect(); column_pairs.sort(); let matchers: Vec = column_pairs .into_iter() - .map(|(local_field, remote_field)| { + .map(|(local_field, remote_field_path)| { doc! { "$eq": [ ColumnRef::variable(variable(local_field.as_str())).into_aggregate_expression(), - ColumnRef::from_field(remote_field).into_aggregate_expression(), + ColumnRef::from_field_path(remote_field_path.as_ref()).into_aggregate_expression(), ] } }) .collect(); @@ -223,7 +214,7 @@ mod tests { ])) .relationships([( "class_students", - relationship("students", [("_id", "classId")]), + relationship("students", [("_id", &["classId"])]), )]) .into(); @@ -306,7 +297,7 @@ mod tests { ])) .relationships([( "student_class", - relationship("classes", [("classId", "_id")]), + relationship("classes", [("classId", &["_id"])]), )]) .into(); @@ -398,7 +389,10 @@ mod tests { ])) .relationships([( "students", - relationship("students", [("title", "class_title"), ("year", "year")]), + relationship( + "students", + [("title", &["class_title"]), ("year", &["year"])], + ), )]) .into(); @@ -489,7 +483,7 @@ mod tests { ])) .relationships([( "join", - relationship("weird_field_names", [("$invalid.name", "$invalid.name")]), + relationship("weird_field_names", [("$invalid.name", &["$invalid.name"])]), )]) .into(); @@ -562,10 +556,13 @@ mod tests { ])), ])) .relationships([ - ("students", relationship("students", [("_id", "class_id")])), + ( + "students", + relationship("students", [("_id", &["class_id"])]), + ), ( "assignments", - relationship("assignments", [("_id", "student_id")]), + relationship("assignments", [("_id", &["student_id"])]), ), ]) .into(); @@ -694,7 +691,10 @@ mod tests { star_count_aggregate!("aggregate_count") ])), ])) - .relationships([("students", relationship("students", [("_id", "classId")]))]) + .relationships([( + "students", + relationship("students", [("_id", &["classId"])]), + )]) .into(); let expected_response = row_set() @@ -800,6 +800,7 @@ mod tests { ndc_models::ExistsInCollection::Related { relationship: "movie".into(), arguments: Default::default(), + field_path: Default::default(), }, binop( "_eq", @@ -810,7 +811,7 @@ mod tests { ) .relationships([( "movie", - relationship("movies", [("movie_id", "_id")]).object_type(), + relationship("movies", [("movie_id", &["_id"])]).object_type(), )]) .into(); @@ -913,6 +914,7 @@ mod tests { ndc_models::ExistsInCollection::Related { relationship: "movie".into(), arguments: Default::default(), + field_path: Default::default(), }, binop( "_eq", @@ -921,7 +923,7 @@ mod tests { ), )), ) - .relationships([("movie", relationship("movies", [("movie_id", "_id")]))]) + .relationships([("movie", relationship("movies", [("movie_id", &["_id"])]))]) .into(); let expected_response: QueryResponse = row_set() diff --git a/crates/mongodb-agent-common/src/query/response.rs b/crates/mongodb-agent-common/src/query/response.rs index cec6f1b8..714b4559 100644 --- a/crates/mongodb-agent-common/src/query/response.rs +++ b/crates/mongodb-agent-common/src/query/response.rs @@ -12,8 +12,8 @@ use tracing::instrument; use crate::{ mongo_query_plan::{ - Aggregate, Field, NestedArray, NestedField, NestedObject, ObjectType, Query, QueryPlan, - Type, + Aggregate, Field, NestedArray, NestedField, NestedObject, ObjectField, ObjectType, Query, + QueryPlan, Type, }, query::serialization::{bson_to_json, BsonToJsonError}, }; @@ -106,6 +106,7 @@ fn serialize_row_set_rows_only( Ok(RowSet { aggregates: None, rows, + groups: None, // TODO: ENG-1486 implement group by }) } @@ -129,7 +130,11 @@ fn serialize_row_set_with_aggregates( .map(|fields| serialize_rows(mode, path, fields, row_set.rows)) .transpose()?; - Ok(RowSet { aggregates, rows }) + Ok(RowSet { + aggregates, + rows, + groups: None, // TODO: ENG-1486 implement group by + }) } fn serialize_aggregates( @@ -182,19 +187,31 @@ fn type_for_row_set( aggregates: &Option>, fields: &Option>, ) -> Result { - let mut type_fields = BTreeMap::new(); + let mut object_fields = BTreeMap::new(); if let Some(aggregates) = aggregates { - type_fields.insert("aggregates".into(), type_for_aggregates(aggregates)); + object_fields.insert( + "aggregates".into(), + ObjectField { + r#type: type_for_aggregates(aggregates), + parameters: Default::default(), + }, + ); } if let Some(query_fields) = fields { let row_type = type_for_row(path, query_fields)?; - type_fields.insert("rows".into(), Type::ArrayOf(Box::new(row_type))); + object_fields.insert( + "rows".into(), + ObjectField { + r#type: Type::ArrayOf(Box::new(row_type)), + parameters: Default::default(), + }, + ); } Ok(Type::Object(ObjectType { - fields: type_fields, + fields: object_fields, name: None, })) } @@ -203,16 +220,20 @@ fn type_for_aggregates(query_aggregates: &IndexMap { + Type::Scalar(MongoScalarType::Bson(mongodb_support::BsonScalarType::Int)) + } + Aggregate::StarCount => { + Type::Scalar(MongoScalarType::Bson(mongodb_support::BsonScalarType::Int)) + } + Aggregate::SingleColumn { result_type, .. } => result_type.clone(), + }; ( field_name.to_string().into(), - match aggregate { - Aggregate::ColumnCount { .. } => { - Type::Scalar(MongoScalarType::Bson(mongodb_support::BsonScalarType::Int)) - } - Aggregate::StarCount => { - Type::Scalar(MongoScalarType::Bson(mongodb_support::BsonScalarType::Int)) - } - Aggregate::SingleColumn { result_type, .. } => result_type.clone(), + ObjectField { + r#type: result_type, + parameters: Default::default(), }, ) }) @@ -231,7 +252,11 @@ fn type_for_row( &append_to_path(path, [field_name.as_str()]), field_definition, )?; - Ok((field_name.clone(), field_type)) + let object_field = ObjectField { + r#type: field_type, + parameters: Default::default(), + }; + Ok((field_name.clone(), object_field)) }) .try_collect::<_, _, QueryResponseError>()?; Ok(Type::Object(ObjectType { fields, name: None })) @@ -379,6 +404,7 @@ mod tests { })) )] .into()]), + groups: Default::default(), }]) ); Ok(()) @@ -417,6 +443,7 @@ mod tests { ])) )] .into()]), + groups: Default::default(), }]) ); Ok(()) @@ -473,6 +500,7 @@ mod tests { ) ] .into()]), + groups: Default::default(), }]) ); Ok(()) @@ -525,6 +553,7 @@ mod tests { ), ] .into()]), + groups: Default::default(), }]) ); Ok(()) @@ -588,6 +617,7 @@ mod tests { })) )] .into()]), + groups: Default::default(), }]) ); Ok(()) @@ -651,6 +681,7 @@ mod tests { })) )] .into()]), + groups: Default::default(), }]) ); Ok(()) @@ -661,7 +692,7 @@ mod tests { let collection_name = "appearances"; let request: QueryRequest = query_request() .collection(collection_name) - .relationships([("author", relationship("authors", [("authorId", "id")]))]) + .relationships([("author", relationship("authors", [("authorId", &["id"])]))]) .query( query().fields([relation_field!("presenter" => "author", query().fields([ field!("addr" => "address", object!([ @@ -686,45 +717,50 @@ mod tests { &query_plan.query.fields, )?; - let expected = Type::Object(ObjectType { - name: None, - fields: [ - ("rows".into(), Type::ArrayOf(Box::new(Type::Object(ObjectType { - name: None, - fields: [ - ("presenter".into(), Type::Object(ObjectType { - name: None, - fields: [ - ("rows".into(), Type::ArrayOf(Box::new(Type::Object(ObjectType { - name: None, - fields: [ - ("addr".into(), Type::Object(ObjectType { - name: None, - fields: [ - ("geocode".into(), Type::Nullable(Box::new(Type::Object(ObjectType { - name: None, - fields: [ - ("latitude".into(), Type::Scalar(MongoScalarType::Bson(BsonScalarType::Double))), - ("long".into(), Type::Scalar(MongoScalarType::Bson(BsonScalarType::Double))), - ].into(), - })))), - ("street".into(), Type::Scalar(MongoScalarType::Bson(BsonScalarType::String))), - ].into(), - })), - ("articles".into(), Type::ArrayOf(Box::new(Type::Object(ObjectType { - name: None, - fields: [ - ("article_title".into(), Type::Scalar(MongoScalarType::Bson(BsonScalarType::String))), - ].into(), - })))), - ].into(), - })))) - ].into(), - })) - ].into() - })))) - ].into(), - }); + let expected = Type::object([( + "rows", + Type::array_of(Type::Object(ObjectType::new([( + "presenter", + Type::object([( + "rows", + Type::array_of(Type::object([ + ( + "addr", + Type::object([ + ( + "geocode", + Type::nullable(Type::object([ + ( + "latitude", + Type::Scalar(MongoScalarType::Bson( + BsonScalarType::Double, + )), + ), + ( + "long", + Type::Scalar(MongoScalarType::Bson( + BsonScalarType::Double, + )), + ), + ])), + ), + ( + "street", + Type::Scalar(MongoScalarType::Bson(BsonScalarType::String)), + ), + ]), + ), + ( + "articles", + Type::array_of(Type::object([( + "article_title", + Type::Scalar(MongoScalarType::Bson(BsonScalarType::String)), + )])), + ), + ])), + )]), + )]))), + )]); assert_eq!(row_set_type, expected); Ok(()) diff --git a/crates/mongodb-agent-common/src/query/serialization/bson_to_json.rs b/crates/mongodb-agent-common/src/query/serialization/bson_to_json.rs index ead29d93..05943140 100644 --- a/crates/mongodb-agent-common/src/query/serialization/bson_to_json.rs +++ b/crates/mongodb-agent-common/src/query/serialization/bson_to_json.rs @@ -71,7 +71,9 @@ fn bson_scalar_to_json( (BsonScalarType::Double, v) => convert_small_number(expected_type, v), (BsonScalarType::Int, v) => convert_small_number(expected_type, v), (BsonScalarType::Long, Bson::Int64(n)) => Ok(Value::String(n.to_string())), + (BsonScalarType::Long, Bson::Int32(n)) => Ok(Value::String(n.to_string())), (BsonScalarType::Decimal, Bson::Decimal128(n)) => Ok(Value::String(n.to_string())), + (BsonScalarType::Decimal, Bson::Double(n)) => Ok(Value::String(n.to_string())), (BsonScalarType::String, Bson::String(s)) => Ok(Value::String(s)), (BsonScalarType::Symbol, Bson::Symbol(s)) => Ok(Value::String(s)), (BsonScalarType::Date, Bson::DateTime(date)) => convert_date(date), @@ -230,16 +232,13 @@ mod tests { #[test] fn serializes_document_with_missing_nullable_field() -> anyhow::Result<()> { - let expected_type = Type::Object(ObjectType { - name: Some("test_object".into()), - fields: [( - "field".into(), - Type::Nullable(Box::new(Type::Scalar(MongoScalarType::Bson( - BsonScalarType::String, - )))), - )] - .into(), - }); + let expected_type = Type::named_object( + "test_object", + [( + "field", + Type::nullable(Type::Scalar(MongoScalarType::Bson(BsonScalarType::String))), + )], + ); let value = bson::doc! {}; let actual = bson_to_json(ExtendedJsonMode::Canonical, &expected_type, value.into())?; assert_eq!(actual, json!({})); diff --git a/crates/mongodb-agent-common/src/query/serialization/json_formats.rs b/crates/mongodb-agent-common/src/query/serialization/json_formats.rs index 9ab6c8d0..85a435f9 100644 --- a/crates/mongodb-agent-common/src/query/serialization/json_formats.rs +++ b/crates/mongodb-agent-common/src/query/serialization/json_formats.rs @@ -6,6 +6,25 @@ use mongodb::bson::{self, Bson}; use serde::{Deserialize, Serialize}; use serde_with::{base64::Base64, hex::Hex, serde_as}; +#[derive(Debug, Serialize, Deserialize)] +#[serde(untagged)] +pub enum Either { + Left(T), + Right(U), +} + +impl Either { + pub fn into_left(self) -> T + where + T: From, + { + match self { + Either::Left(l) => l, + Either::Right(r) => r.into(), + } + } +} + #[serde_as] #[derive(Deserialize, Serialize)] #[serde(rename_all = "camelCase")] @@ -84,6 +103,15 @@ impl From for Regex { } } +impl From for Regex { + fn from(value: String) -> Self { + Regex { + pattern: value, + options: String::new(), + } + } +} + #[derive(Deserialize, Serialize)] pub struct Timestamp { t: u32, diff --git a/crates/mongodb-agent-common/src/query/serialization/json_to_bson.rs b/crates/mongodb-agent-common/src/query/serialization/json_to_bson.rs index 5dff0be0..dc866039 100644 --- a/crates/mongodb-agent-common/src/query/serialization/json_to_bson.rs +++ b/crates/mongodb-agent-common/src/query/serialization/json_to_bson.rs @@ -105,7 +105,11 @@ pub fn json_to_bson_scalar(expected_type: BsonScalarType, value: Value) -> Resul Value::Null => Bson::Undefined, _ => incompatible_scalar_type(BsonScalarType::Undefined, value)?, }, - BsonScalarType::Regex => deserialize::(expected_type, value)?.into(), + BsonScalarType::Regex => { + deserialize::>(expected_type, value)? + .into_left() + .into() + } BsonScalarType::Javascript => Bson::JavaScriptCode(deserialize(expected_type, value)?), BsonScalarType::JavascriptWithScope => { deserialize::(expected_type, value)?.into() @@ -236,35 +240,32 @@ mod tests { use super::json_to_bson; + use BsonScalarType as S; + #[test] #[allow(clippy::approx_constant)] fn deserializes_specialized_scalar_types() -> anyhow::Result<()> { - let object_type = ObjectType { - name: Some("scalar_test".into()), - fields: [ - ("double", BsonScalarType::Double), - ("int", BsonScalarType::Int), - ("long", BsonScalarType::Long), - ("decimal", BsonScalarType::Decimal), - ("string", BsonScalarType::String), - ("date", BsonScalarType::Date), - ("timestamp", BsonScalarType::Timestamp), - ("binData", BsonScalarType::BinData), - ("objectId", BsonScalarType::ObjectId), - ("bool", BsonScalarType::Bool), - ("null", BsonScalarType::Null), - ("undefined", BsonScalarType::Undefined), - ("regex", BsonScalarType::Regex), - ("javascript", BsonScalarType::Javascript), - ("javascriptWithScope", BsonScalarType::JavascriptWithScope), - ("minKey", BsonScalarType::MinKey), - ("maxKey", BsonScalarType::MaxKey), - ("symbol", BsonScalarType::Symbol), - ] - .into_iter() - .map(|(name, t)| (name.into(), Type::Scalar(MongoScalarType::Bson(t)))) - .collect(), - }; + let object_type = ObjectType::new([ + ("double", Type::scalar(S::Double)), + ("int", Type::scalar(S::Int)), + ("long", Type::scalar(S::Long)), + ("decimal", Type::scalar(S::Decimal)), + ("string", Type::scalar(S::String)), + ("date", Type::scalar(S::Date)), + ("timestamp", Type::scalar(S::Timestamp)), + ("binData", Type::scalar(S::BinData)), + ("objectId", Type::scalar(S::ObjectId)), + ("bool", Type::scalar(S::Bool)), + ("null", Type::scalar(S::Null)), + ("undefined", Type::scalar(S::Undefined)), + ("regex", Type::scalar(S::Regex)), + ("javascript", Type::scalar(S::Javascript)), + ("javascriptWithScope", Type::scalar(S::JavascriptWithScope)), + ("minKey", Type::scalar(S::MinKey)), + ("maxKey", Type::scalar(S::MaxKey)), + ("symbol", Type::scalar(S::Symbol)), + ]) + .named("scalar_test"); let input = json!({ "double": 3.14159, @@ -367,16 +368,13 @@ mod tests { #[test] fn deserializes_object_with_missing_nullable_field() -> anyhow::Result<()> { - let expected_type = Type::Object(ObjectType { - name: Some("test_object".into()), - fields: [( - "field".into(), - Type::Nullable(Box::new(Type::Scalar(MongoScalarType::Bson( - BsonScalarType::String, - )))), - )] - .into(), - }); + let expected_type = Type::named_object( + "test_object", + [( + "field", + Type::nullable(Type::scalar(BsonScalarType::String)), + )], + ); let value = json!({}); let actual = json_to_bson(&expected_type, value)?; assert_eq!(actual, bson!({})); diff --git a/crates/mongodb-agent-common/src/scalar_types_capabilities.rs b/crates/mongodb-agent-common/src/scalar_types_capabilities.rs index ea7d2352..56b2fd35 100644 --- a/crates/mongodb-agent-common/src/scalar_types_capabilities.rs +++ b/crates/mongodb-agent-common/src/scalar_types_capabilities.rs @@ -38,19 +38,32 @@ fn extended_json_scalar_type() -> (ndc_models::ScalarTypeName, ScalarType) { ( mongodb_support::EXTENDED_JSON_TYPE_NAME.into(), ScalarType { - representation: Some(TypeRepresentation::JSON), + representation: TypeRepresentation::JSON, aggregate_functions: aggregation_functions .into_iter() .map(|aggregation_function| { + use AggregateFunctionDefinition as NDC; + use AggregationFunction as Plan; let name = aggregation_function.graphql_name().into(); - let result_type = match aggregation_function { - AggregationFunction::Avg => ext_json_type.clone(), - AggregationFunction::Count => bson_to_named_type(S::Int), - AggregationFunction::Min => ext_json_type.clone(), - AggregationFunction::Max => ext_json_type.clone(), - AggregationFunction::Sum => ext_json_type.clone(), + let definition = match aggregation_function { + // Using custom instead of standard aggregations because we want the result + // types to be ExtendedJSON instead of specific numeric types + Plan::Avg => NDC::Custom { + result_type: Type::Named { + name: mongodb_support::EXTENDED_JSON_TYPE_NAME.into(), + }, + }, + Plan::Count => NDC::Custom { + result_type: bson_to_named_type(S::Int), + }, + Plan::Min => NDC::Min, + Plan::Max => NDC::Max, + Plan::Sum => NDC::Custom { + result_type: Type::Named { + name: mongodb_support::EXTENDED_JSON_TYPE_NAME.into(), + }, + }, }; - let definition = AggregateFunctionDefinition { result_type }; (name, definition) }) .collect(), @@ -58,16 +71,22 @@ fn extended_json_scalar_type() -> (ndc_models::ScalarTypeName, ScalarType) { .into_iter() .map(|comparison_fn| { let name = comparison_fn.graphql_name().into(); - let definition = match comparison_fn { - C::Equal => ComparisonOperatorDefinition::Equal, - C::Regex | C::IRegex => ComparisonOperatorDefinition::Custom { - argument_type: bson_to_named_type(S::String), + let ndc_definition = comparison_fn.ndc_definition(|func| match func { + C::Equal => ext_json_type.clone(), + C::In => Type::Array { + element_type: Box::new(ext_json_type.clone()), }, - _ => ComparisonOperatorDefinition::Custom { - argument_type: ext_json_type.clone(), + C::LessThan => ext_json_type.clone(), + C::LessThanOrEqual => ext_json_type.clone(), + C::GreaterThan => ext_json_type.clone(), + C::GreaterThanOrEqual => ext_json_type.clone(), + C::NotEqual => ext_json_type.clone(), + C::NotIn => Type::Array { + element_type: Box::new(ext_json_type.clone()), }, - }; - (name, definition) + C::Regex | C::IRegex => bson_to_named_type(S::Regex), + }); + (name, ndc_definition) }) .collect(), }, @@ -84,27 +103,28 @@ fn make_scalar_type(bson_scalar_type: BsonScalarType) -> (ndc_models::ScalarType (scalar_type_name.into(), scalar_type) } -fn bson_scalar_type_representation(bson_scalar_type: BsonScalarType) -> Option { +fn bson_scalar_type_representation(bson_scalar_type: BsonScalarType) -> TypeRepresentation { + use TypeRepresentation as R; match bson_scalar_type { - BsonScalarType::Double => Some(TypeRepresentation::Float64), - BsonScalarType::Decimal => Some(TypeRepresentation::BigDecimal), // Not quite.... Mongo Decimal is 128-bit, BigDecimal is unlimited - BsonScalarType::Int => Some(TypeRepresentation::Int32), - BsonScalarType::Long => Some(TypeRepresentation::Int64), - BsonScalarType::String => Some(TypeRepresentation::String), - BsonScalarType::Date => Some(TypeRepresentation::Timestamp), // Mongo Date is milliseconds since unix epoch - BsonScalarType::Timestamp => None, // Internal Mongo timestamp type - BsonScalarType::BinData => None, - BsonScalarType::ObjectId => Some(TypeRepresentation::String), // Mongo ObjectId is usually expressed as a 24 char hex string (12 byte number) - BsonScalarType::Bool => Some(TypeRepresentation::Boolean), - BsonScalarType::Null => None, - BsonScalarType::Regex => None, - BsonScalarType::Javascript => None, - BsonScalarType::JavascriptWithScope => None, - BsonScalarType::MinKey => None, - BsonScalarType::MaxKey => None, - BsonScalarType::Undefined => None, - BsonScalarType::DbPointer => None, - BsonScalarType::Symbol => None, + S::Double => R::Float64, + S::Decimal => R::BigDecimal, // Not quite.... Mongo Decimal is 128-bit, BigDecimal is unlimited + S::Int => R::Int32, + S::Long => R::Int64, + S::String => R::String, + S::Date => R::TimestampTZ, // Mongo Date is milliseconds since unix epoch, but we serialize to JSON as an ISO string + S::Timestamp => R::JSON, // Internal Mongo timestamp type + S::BinData => R::JSON, + S::ObjectId => R::String, // Mongo ObjectId is usually expressed as a 24 char hex string (12 byte number) - not using R::Bytes because that expects base64 + S::Bool => R::Boolean, + S::Null => R::JSON, + S::Regex => R::JSON, + S::Javascript => R::String, + S::JavascriptWithScope => R::JSON, + S::MinKey => R::JSON, + S::MaxKey => R::JSON, + S::Undefined => R::JSON, + S::DbPointer => R::JSON, + S::Symbol => R::String, } } @@ -114,14 +134,7 @@ fn bson_comparison_operators( comparison_operators(bson_scalar_type) .map(|(comparison_fn, argument_type)| { let fn_name = comparison_fn.graphql_name().into(); - match comparison_fn { - ComparisonFunction::Equal => (fn_name, ComparisonOperatorDefinition::Equal), - ComparisonFunction::In => (fn_name, ComparisonOperatorDefinition::In), - _ => ( - fn_name, - ComparisonOperatorDefinition::Custom { argument_type }, - ), - } + (fn_name, comparison_fn.ndc_definition(|_| argument_type)) }) .collect() } @@ -130,8 +143,7 @@ fn bson_aggregation_functions( bson_scalar_type: BsonScalarType, ) -> BTreeMap { aggregate_functions(bson_scalar_type) - .map(|(fn_name, result_type)| { - let aggregation_definition = AggregateFunctionDefinition { result_type }; + .map(|(fn_name, aggregation_definition)| { (fn_name.graphql_name().into(), aggregation_definition) }) .collect() @@ -143,26 +155,47 @@ fn bson_to_named_type(bson_scalar_type: BsonScalarType) -> Type { } } -pub fn aggregate_functions( +fn bson_to_scalar_type_name(bson_scalar_type: BsonScalarType) -> ndc_models::ScalarTypeName { + bson_scalar_type.graphql_name().into() +} + +fn aggregate_functions( scalar_type: BsonScalarType, -) -> impl Iterator { - let nullable_scalar_type = move || Type::Nullable { - underlying_type: Box::new(bson_to_named_type(scalar_type)), - }; - [(A::Count, bson_to_named_type(S::Int))] - .into_iter() - .chain(iter_if( - scalar_type.is_orderable(), - [A::Min, A::Max] - .into_iter() - .map(move |op| (op, nullable_scalar_type())), - )) - .chain(iter_if( - scalar_type.is_numeric(), - [A::Avg, A::Sum] - .into_iter() - .map(move |op| (op, nullable_scalar_type())), - )) +) -> impl Iterator { + use AggregateFunctionDefinition as NDC; + [( + A::Count, + NDC::Custom { + result_type: bson_to_named_type(S::Int), + }, + )] + .into_iter() + .chain(iter_if( + scalar_type.is_orderable(), + [(A::Min, NDC::Min), (A::Max, NDC::Max)].into_iter(), + )) + .chain(iter_if( + scalar_type.is_numeric(), + [ + ( + A::Avg, + NDC::Average { + result_type: bson_to_scalar_type_name(S::Double), + }, + ), + ( + A::Sum, + NDC::Sum { + result_type: bson_to_scalar_type_name(if scalar_type.is_fractional() { + S::Double + } else { + S::Long + }), + }, + ), + ] + .into_iter(), + )) } pub fn comparison_operators( @@ -203,8 +236,8 @@ pub fn comparison_operators( .chain(match scalar_type { S::String => Box::new( [ - (C::Regex, bson_to_named_type(S::String)), - (C::IRegex, bson_to_named_type(S::String)), + (C::Regex, bson_to_named_type(S::Regex)), + (C::IRegex, bson_to_named_type(S::Regex)), ] .into_iter(), ), diff --git a/crates/mongodb-agent-common/src/test_helpers.rs b/crates/mongodb-agent-common/src/test_helpers.rs index c8cd2ccd..38f31651 100644 --- a/crates/mongodb-agent-common/src/test_helpers.rs +++ b/crates/mongodb-agent-common/src/test_helpers.rs @@ -20,7 +20,6 @@ pub fn make_nested_schema() -> MongoConfiguration { collection_type: "Author".into(), arguments: Default::default(), uniqueness_constraints: make_primary_key_uniqueness_constraint("authors"), - foreign_keys: Default::default(), }, ), collection("appearances"), // new helper gives more concise syntax @@ -87,6 +86,7 @@ pub fn make_nested_schema() -> MongoConfiguration { } /// Configuration for a MongoDB database with Chinook test data +#[allow(dead_code)] pub fn chinook_config() -> MongoConfiguration { MongoConfiguration(Configuration { collections: [ @@ -139,19 +139,20 @@ pub fn chinook_config() -> MongoConfiguration { }) } +#[allow(dead_code)] pub fn chinook_relationships() -> BTreeMap { [ ( "Albums", - ndc_test_helpers::relationship("Album", [("ArtistId", "ArtistId")]), + ndc_test_helpers::relationship("Album", [("ArtistId", &["ArtistId"])]), ), ( "Tracks", - ndc_test_helpers::relationship("Track", [("AlbumId", "AlbumId")]), + ndc_test_helpers::relationship("Track", [("AlbumId", &["AlbumId"])]), ), ( "Genre", - ndc_test_helpers::relationship("Genre", [("GenreId", "GenreId")]).object_type(), + ndc_test_helpers::relationship("Genre", [("GenreId", &["GenreId"])]).object_type(), ), ] .into_iter() diff --git a/crates/mongodb-connector/src/capabilities.rs b/crates/mongodb-connector/src/capabilities.rs index 8fc7cdf2..5ab5f8ea 100644 --- a/crates/mongodb-connector/src/capabilities.rs +++ b/crates/mongodb-connector/src/capabilities.rs @@ -1,21 +1,34 @@ use ndc_sdk::models::{ - Capabilities, ExistsCapabilities, LeafCapability, NestedFieldCapabilities, QueryCapabilities, - RelationshipCapabilities, + AggregateCapabilities, Capabilities, ExistsCapabilities, LeafCapability, + NestedArrayFilterByCapabilities, NestedFieldCapabilities, NestedFieldFilterByCapabilities, + QueryCapabilities, RelationshipCapabilities, }; pub fn mongo_capabilities() -> Capabilities { Capabilities { query: QueryCapabilities { - aggregates: Some(LeafCapability {}), + aggregates: Some(AggregateCapabilities { + filter_by: None, + group_by: None, + }), variables: Some(LeafCapability {}), explain: Some(LeafCapability {}), nested_fields: NestedFieldCapabilities { - filter_by: Some(LeafCapability {}), + filter_by: Some(NestedFieldFilterByCapabilities { + nested_arrays: Some(NestedArrayFilterByCapabilities { + contains: Some(LeafCapability {}), + is_empty: Some(LeafCapability {}), + }), + }), order_by: Some(LeafCapability {}), aggregates: Some(LeafCapability {}), + nested_collections: None, // TODO: ENG-1464 }, exists: ExistsCapabilities { + named_scopes: None, // TODO: ENG-1487 + unrelated: Some(LeafCapability {}), nested_collections: Some(LeafCapability {}), + nested_scalar_collections: None, // TODO: ENG-1488 }, }, mutation: ndc_sdk::models::MutationCapabilities { @@ -25,6 +38,7 @@ pub fn mongo_capabilities() -> Capabilities { relationships: Some(RelationshipCapabilities { relation_comparisons: Some(LeafCapability {}), order_by_aggregate: None, + nested: None, // TODO: ENG-1490 }), } } diff --git a/crates/mongodb-connector/src/mongo_connector.rs b/crates/mongodb-connector/src/mongo_connector.rs index 3545621f..648b5548 100644 --- a/crates/mongodb-connector/src/mongo_connector.rs +++ b/crates/mongodb-connector/src/mongo_connector.rs @@ -31,7 +31,7 @@ impl ConnectorSetup for MongoConnector { #[instrument(err, skip_all)] async fn parse_configuration( &self, - configuration_dir: impl AsRef + Send, + configuration_dir: &Path, ) -> connector::Result { let configuration = Configuration::parse_configuration(configuration_dir) .await diff --git a/crates/mongodb-connector/src/schema.rs b/crates/mongodb-connector/src/schema.rs index 1e92d403..bdc922f5 100644 --- a/crates/mongodb-connector/src/schema.rs +++ b/crates/mongodb-connector/src/schema.rs @@ -1,6 +1,7 @@ use mongodb_agent_common::{ mongo_query_plan::MongoConfiguration, scalar_types_capabilities::SCALAR_TYPES, }; +use mongodb_support::BsonScalarType; use ndc_query_plan::QueryContext as _; use ndc_sdk::{connector, models as ndc}; @@ -20,6 +21,13 @@ pub async fn get_schema(config: &MongoConfiguration) -> connector::Result bool { + match self { + S::Double => true, + S::Decimal => true, + S::Int => false, + S::Long => false, + S::String => false, + S::Date => false, + S::Timestamp => false, + S::BinData => false, + S::ObjectId => false, + S::Bool => false, + S::Null => false, + S::Regex => false, + S::Javascript => false, + S::JavascriptWithScope => false, + S::MinKey => false, + S::MaxKey => false, + S::Undefined => false, + S::DbPointer => false, + S::Symbol => false, + } + } + pub fn is_comparable(self) -> bool { match self { S::Double => true, diff --git a/crates/ndc-query-plan/Cargo.toml b/crates/ndc-query-plan/Cargo.toml index 732640c9..63ab6865 100644 --- a/crates/ndc-query-plan/Cargo.toml +++ b/crates/ndc-query-plan/Cargo.toml @@ -9,7 +9,7 @@ indent = "^0.1" indexmap = { workspace = true } itertools = { workspace = true } ndc-models = { workspace = true } -nonempty = "^0.10" +nonempty = { workspace = true } serde_json = { workspace = true } thiserror = "1" ref-cast = { workspace = true } diff --git a/crates/ndc-query-plan/src/lib.rs b/crates/ndc-query-plan/src/lib.rs index 725ba0cd..3af97eca 100644 --- a/crates/ndc-query-plan/src/lib.rs +++ b/crates/ndc-query-plan/src/lib.rs @@ -12,4 +12,4 @@ pub use plan_for_query_request::{ type_annotated_field::{type_annotated_field, type_annotated_nested_field}, }; pub use query_plan::*; -pub use type_system::{inline_object_types, ObjectType, Type}; +pub use type_system::{inline_object_types, ObjectField, ObjectType, Type}; diff --git a/crates/ndc-query-plan/src/plan_for_query_request/helpers.rs b/crates/ndc-query-plan/src/plan_for_query_request/helpers.rs index e88e0a2b..e8503f07 100644 --- a/crates/ndc-query-plan/src/plan_for_query_request/helpers.rs +++ b/crates/ndc-query-plan/src/plan_for_query_request/helpers.rs @@ -1,6 +1,6 @@ use std::collections::BTreeMap; -use ndc_models as ndc; +use ndc_models::{self as ndc}; use crate::{self as plan}; @@ -11,7 +11,7 @@ type Result = std::result::Result; pub fn find_object_field<'a, S>( object_type: &'a plan::ObjectType, field_name: &ndc::FieldName, -) -> Result<&'a plan::Type> { +) -> Result<&'a plan::ObjectField> { object_type.fields.get(field_name).ok_or_else(|| { QueryPlanError::UnknownObjectTypeField { object_type: object_type.name.clone(), @@ -21,28 +21,29 @@ pub fn find_object_field<'a, S>( }) } -pub fn find_object_field_path<'a, S>( +pub fn get_object_field_by_path<'a, S>( object_type: &'a plan::ObjectType, field_name: &ndc::FieldName, - field_path: Option<&Vec>, -) -> Result<&'a plan::Type> { + field_path: Option<&[ndc::FieldName]>, +) -> Result<&'a plan::ObjectField> { match field_path { None => find_object_field(object_type, field_name), - Some(field_path) => find_object_field_path_helper(object_type, field_name, field_path), + Some(field_path) => get_object_field_by_path_helper(object_type, field_name, field_path), } } -fn find_object_field_path_helper<'a, S>( +fn get_object_field_by_path_helper<'a, S>( object_type: &'a plan::ObjectType, field_name: &ndc::FieldName, field_path: &[ndc::FieldName], -) -> Result<&'a plan::Type> { - let field_type = find_object_field(object_type, field_name)?; +) -> Result<&'a plan::ObjectField> { + let object_field = find_object_field(object_type, field_name)?; + let field_type = &object_field.r#type; match field_path { - [] => Ok(field_type), + [] => Ok(object_field), [nested_field_name, rest @ ..] => { let o = find_object_type(field_type, &object_type.name, field_name)?; - find_object_field_path_helper(o, nested_field_name, rest) + get_object_field_by_path_helper(o, nested_field_name, rest) } } } @@ -68,35 +69,41 @@ fn find_object_type<'a, S>( } } -/// Given the type of a collection and a field path returns the object type of the nested object at -/// that path. +/// Given the type of a collection and a field path returns the type of the nested values in an +/// array field at that path. pub fn find_nested_collection_type( collection_object_type: plan::ObjectType, field_path: &[ndc::FieldName], -) -> Result> +) -> Result> where - S: Clone, + S: Clone + std::fmt::Debug, { - fn normalize_object_type( - field_path: &[ndc::FieldName], - t: plan::Type, - ) -> Result> { - match t { - plan::Type::Object(t) => Ok(t), - plan::Type::ArrayOf(t) => normalize_object_type(field_path, *t), - plan::Type::Nullable(t) => normalize_object_type(field_path, *t), - _ => Err(QueryPlanError::ExpectedObject { - path: field_path.iter().map(|f| f.to_string()).collect(), - }), + let nested_field = match field_path { + [field_name] => get_object_field_by_path(&collection_object_type, field_name, None), + [field_name, rest_of_path @ ..] => { + get_object_field_by_path(&collection_object_type, field_name, Some(rest_of_path)) } - } + [] => Err(QueryPlanError::UnknownCollection(field_path.join("."))), + }?; + let element_type = nested_field.r#type.clone().into_array_element_type()?; + Ok(element_type) +} - field_path - .iter() - .try_fold(collection_object_type, |obj_type, field_name| { - let field_type = find_object_field(&obj_type, field_name)?.clone(); - normalize_object_type(field_path, field_type) - }) +/// Given the type of a collection and a field path returns the object type of the nested object at +/// that path. +/// +/// This function differs from [find_nested_collection_type] in that it this one returns +/// [plan::ObjectType] instead of [plan::Type], and returns an error if the nested type is not an +/// object type. +pub fn find_nested_collection_object_type( + collection_object_type: plan::ObjectType, + field_path: &[ndc::FieldName], +) -> Result> +where + S: Clone + std::fmt::Debug, +{ + let collection_element_type = find_nested_collection_type(collection_object_type, field_path)?; + collection_element_type.into_object_type() } pub fn lookup_relationship<'a>( @@ -107,45 +114,3 @@ pub fn lookup_relationship<'a>( .get(relationship) .ok_or_else(|| QueryPlanError::UnspecifiedRelation(relationship.to_owned())) } - -/// Special case handling for array comparisons! Normally we assume that the right operand of Equal -/// is the same type as the left operand. BUT MongoDB allows comparing arrays to scalar values in -/// which case the condition passes if any array element is equal to the given scalar value. So -/// this function needs to return a scalar type if the user is expecting array-to-scalar -/// comparison, or an array type if the user is expecting array-to-array comparison. Or if the -/// column does not have an array type we fall back to the default assumption that the value type -/// should be the same as the column type. -/// -/// For now this assumes that if the column has an array type, the value type is a scalar type. -/// That's the simplest option since we don't support array-to-array comparisons yet. -/// -/// TODO: When we do support array-to-array comparisons we will need to either: -/// -/// - input the [ndc::ComparisonValue] into this function, and any query request variables; check -/// that the given JSON value or variable values are not array values, and if so assume the value -/// type should be a scalar type -/// - or get the GraphQL Engine to include a type with [ndc::ComparisonValue] in which case we can -/// use that as the value type -/// -/// It is important that queries behave the same when given an inline value or variables. So we -/// can't just check the value of an [ndc::ComparisonValue::Scalar], and punt on an -/// [ndc::ComparisonValue::Variable] input. The latter requires accessing query request variables, -/// and it will take a little more work to thread those through the code to make them available -/// here. -pub fn value_type_in_possible_array_equality_comparison( - column_type: plan::Type, -) -> plan::Type -where - S: Clone, -{ - match column_type { - plan::Type::ArrayOf(t) => *t, - plan::Type::Nullable(t) => match *t { - v @ plan::Type::ArrayOf(_) => { - value_type_in_possible_array_equality_comparison(v.clone()) - } - t => plan::Type::Nullable(Box::new(t)), - }, - _ => column_type, - } -} diff --git a/crates/ndc-query-plan/src/plan_for_query_request/mod.rs b/crates/ndc-query-plan/src/plan_for_query_request/mod.rs index 1faa0045..71020d93 100644 --- a/crates/ndc-query-plan/src/plan_for_query_request/mod.rs +++ b/crates/ndc-query-plan/src/plan_for_query_request/mod.rs @@ -15,17 +15,20 @@ mod tests; use std::{collections::VecDeque, iter::once}; use crate::{self as plan, type_annotated_field, ObjectType, QueryPlan, Scope}; -use helpers::{find_nested_collection_type, value_type_in_possible_array_equality_comparison}; +use helpers::find_nested_collection_type; use indexmap::IndexMap; use itertools::Itertools; use ndc::{ExistsInCollection, QueryRequest}; -use ndc_models as ndc; +use ndc_models::{self as ndc}; use query_plan_state::QueryPlanInfo; pub use self::plan_for_mutation_request::plan_for_mutation_request; use self::{ - helpers::{find_object_field, find_object_field_path, lookup_relationship}, - plan_for_arguments::plan_for_arguments, + helpers::{ + find_nested_collection_object_type, find_object_field, get_object_field_by_path, + lookup_relationship, + }, + plan_for_arguments::{plan_arguments_from_plan_parameters, plan_for_arguments}, query_context::QueryContext, query_plan_error::QueryPlanError, query_plan_state::QueryPlanState, @@ -100,7 +103,7 @@ pub fn plan_for_query( let mut plan_state = plan_state.state_for_subquery(); let aggregates = - plan_for_aggregates(plan_state.context, collection_object_type, query.aggregates)?; + plan_for_aggregates(&mut plan_state, collection_object_type, query.aggregates)?; let fields = plan_for_fields( &mut plan_state, root_collection_object_type, @@ -149,7 +152,7 @@ pub fn plan_for_query( } fn plan_for_aggregates( - context: &T, + plan_state: &mut QueryPlanState<'_, T>, collection_object_type: &plan::ObjectType, ndc_aggregates: Option>, ) -> Result>>> { @@ -160,7 +163,7 @@ fn plan_for_aggregates( .map(|(name, aggregate)| { Ok(( name, - plan_for_aggregate(context, collection_object_type, aggregate)?, + plan_for_aggregate(plan_state, collection_object_type, aggregate)?, )) }) .collect() @@ -169,32 +172,50 @@ fn plan_for_aggregates( } fn plan_for_aggregate( - context: &T, + plan_state: &mut QueryPlanState<'_, T>, collection_object_type: &plan::ObjectType, aggregate: ndc::Aggregate, ) -> Result> { match aggregate { ndc::Aggregate::ColumnCount { column, + arguments, distinct, field_path, - } => Ok(plan::Aggregate::ColumnCount { - column, - field_path, - distinct, - }), + } => { + let object_field = collection_object_type.get(&column)?; + let plan_arguments = plan_arguments_from_plan_parameters( + plan_state, + &object_field.parameters, + arguments, + )?; + Ok(plan::Aggregate::ColumnCount { + column, + arguments: plan_arguments, + distinct, + field_path, + }) + } ndc::Aggregate::SingleColumn { column, + arguments, function, field_path, } => { - let object_type_field_type = - find_object_field_path(collection_object_type, &column, field_path.as_ref())?; - // let column_scalar_type_name = get_scalar_type_name(&object_type_field.r#type)?; - let (function, definition) = - context.find_aggregation_function_definition(object_type_field_type, &function)?; + let nested_object_field = + get_object_field_by_path(collection_object_type, &column, field_path.as_deref())?; + let object_field = collection_object_type.get(&column)?; + let plan_arguments = plan_arguments_from_plan_parameters( + plan_state, + &object_field.parameters, + arguments, + )?; + let (function, definition) = plan_state + .context + .find_aggregation_function_definition(&nested_object_field.r#type, &function)?; Ok(plan::Aggregate::SingleColumn { column, + arguments: plan_arguments, field_path, function, result_type: definition.result_type.clone(), @@ -260,55 +281,126 @@ fn plan_for_order_by_element( ) -> Result> { let target = match element.target { ndc::OrderByTarget::Column { + path, name, + arguments, field_path, + } => { + let (relationship_names, collection_object_type) = plan_for_relationship_path( + plan_state, + root_collection_object_type, + object_type, + path, + vec![name.clone()], + )?; + let object_field = collection_object_type.get(&name)?; + + let plan_arguments = plan_arguments_from_plan_parameters( + plan_state, + &object_field.parameters, + arguments, + )?; + + plan::OrderByTarget::Column { + path: relationship_names, + name: name.clone(), + arguments: plan_arguments, + field_path, + } + } + ndc::OrderByTarget::Aggregate { path, - } => plan::OrderByTarget::Column { - name: name.clone(), - field_path, - path: plan_for_relationship_path( + aggregate: + ndc::Aggregate::ColumnCount { + column, + arguments, + field_path, + distinct, + }, + } => { + let (plan_path, collection_object_type) = plan_for_relationship_path( plan_state, root_collection_object_type, object_type, path, - vec![name], - )? - .0, - }, - ndc::OrderByTarget::SingleColumnAggregate { - column, - function, + vec![], // TODO: ENG-1019 propagate requested aggregate to relationship query + )?; + + let object_field = collection_object_type.get(&column)?; + + let plan_arguments = plan_arguments_from_plan_parameters( + plan_state, + &object_field.parameters, + arguments, + )?; + + plan::OrderByTarget::Aggregate { + path: plan_path, + aggregate: plan::Aggregate::ColumnCount { + column, + arguments: plan_arguments, + field_path, + distinct, + }, + } + } + ndc::OrderByTarget::Aggregate { path, - field_path: _, + aggregate: + ndc::Aggregate::SingleColumn { + column, + arguments, + field_path, + function, + }, } => { - let (plan_path, target_object_type) = plan_for_relationship_path( + let (plan_path, collection_object_type) = plan_for_relationship_path( plan_state, root_collection_object_type, object_type, path, - vec![], // TODO: MDB-156 propagate requested aggregate to relationship query + vec![], // TODO: ENG-1019 propagate requested aggregate to relationship query + )?; + + let object_field = collection_object_type.get(&column)?; + + let plan_arguments = plan_arguments_from_plan_parameters( + plan_state, + &object_field.parameters, + arguments, )?; - let column_type = find_object_field(&target_object_type, &column)?; + + let object_field = find_object_field(&collection_object_type, &column)?; let (function, function_definition) = plan_state .context - .find_aggregation_function_definition(column_type, &function)?; + .find_aggregation_function_definition(&object_field.r#type, &function)?; - plan::OrderByTarget::SingleColumnAggregate { - column, - function, - result_type: function_definition.result_type.clone(), + plan::OrderByTarget::Aggregate { path: plan_path, + aggregate: plan::Aggregate::SingleColumn { + column, + arguments: plan_arguments, + field_path, + function, + result_type: function_definition.result_type.clone(), + }, } } - ndc::OrderByTarget::StarCountAggregate { path } => { + ndc::OrderByTarget::Aggregate { + path, + aggregate: ndc::Aggregate::StarCount {}, + } => { let (plan_path, _) = plan_for_relationship_path( plan_state, root_collection_object_type, object_type, path, - vec![], // TODO: MDB-157 propagate requested aggregate to relationship query + vec![], // TODO: ENG-1019 propagate requested aggregate to relationship query )?; - plan::OrderByTarget::StarCountAggregate { path: plan_path } + plan::OrderByTarget::Aggregate { + path: plan_path, + aggregate: plan::Aggregate::StarCount, + } } }; @@ -374,6 +466,7 @@ fn plan_for_relationship_path_helper( let is_last = tail.is_empty(); let ndc::PathElement { + field_path: _, // TODO: ENG-1458 support nested relationships relationship, arguments, predicate, @@ -392,14 +485,14 @@ fn plan_for_relationship_path_helper( let fields = requested_columns .into_iter() .map(|column_name| { - let column_type = + let object_field = find_object_field(&related_collection_type, &column_name)?.clone(); Ok(( column_name.clone(), plan::Field::Column { column: column_name, fields: None, - column_type, + column_type: object_field.r#type, }, )) }) @@ -475,12 +568,7 @@ fn plan_for_expression( }), ndc::Expression::UnaryComparisonOperator { column, operator } => { Ok(plan::Expression::UnaryComparisonOperator { - column: plan_for_comparison_target( - plan_state, - root_collection_object_type, - object_type, - column, - )?, + column: plan_for_comparison_target(plan_state, object_type, column)?, operator, }) } @@ -496,6 +584,13 @@ fn plan_for_expression( operator, value, ), + ndc::Expression::ArrayComparison { column, comparison } => plan_for_array_comparison( + plan_state, + root_collection_object_type, + object_type, + column, + comparison, + ), ndc::Expression::Exists { in_collection, predicate, @@ -516,21 +611,11 @@ fn plan_for_binary_comparison( operator: ndc::ComparisonOperatorName, value: ndc::ComparisonValue, ) -> Result> { - let comparison_target = - plan_for_comparison_target(plan_state, root_collection_object_type, object_type, column)?; + let comparison_target = plan_for_comparison_target(plan_state, object_type, column)?; let (operator, operator_definition) = plan_state .context - .find_comparison_operator(comparison_target.get_field_type(), &operator)?; - let value_type = match operator_definition { - plan::ComparisonOperatorDefinition::Equal => { - let column_type = comparison_target.get_field_type().clone(); - value_type_in_possible_array_equality_comparison(column_type) - } - plan::ComparisonOperatorDefinition::In => { - plan::Type::ArrayOf(Box::new(comparison_target.get_field_type().clone())) - } - plan::ComparisonOperatorDefinition::Custom { argument_type } => argument_type.clone(), - }; + .find_comparison_operator(comparison_target.target_type(), &operator)?; + let value_type = operator_definition.argument_type(comparison_target.target_type()); Ok(plan::Expression::BinaryComparisonOperator { operator, value: plan_for_comparison_value( @@ -544,44 +629,67 @@ fn plan_for_binary_comparison( }) } -fn plan_for_comparison_target( +fn plan_for_array_comparison( plan_state: &mut QueryPlanState<'_, T>, root_collection_object_type: &plan::ObjectType, object_type: &plan::ObjectType, + column: ndc::ComparisonTarget, + comparison: ndc::ArrayComparison, +) -> Result> { + let comparison_target = plan_for_comparison_target(plan_state, object_type, column)?; + let plan_comparison = match comparison { + ndc::ArrayComparison::Contains { value } => { + let array_element_type = comparison_target + .target_type() + .clone() + .into_array_element_type()?; + let value = plan_for_comparison_value( + plan_state, + root_collection_object_type, + object_type, + array_element_type, + value, + )?; + plan::ArrayComparison::Contains { value } + } + ndc::ArrayComparison::IsEmpty => plan::ArrayComparison::IsEmpty, + }; + Ok(plan::Expression::ArrayComparison { + column: comparison_target, + comparison: plan_comparison, + }) +} + +fn plan_for_comparison_target( + plan_state: &mut QueryPlanState<'_, T>, + object_type: &plan::ObjectType, target: ndc::ComparisonTarget, ) -> Result> { match target { ndc::ComparisonTarget::Column { name, + arguments, field_path, - path, } => { - let requested_columns = vec![name.clone()]; - let (path, target_object_type) = plan_for_relationship_path( + let object_field = + get_object_field_by_path(object_type, &name, field_path.as_deref())?.clone(); + let plan_arguments = plan_arguments_from_plan_parameters( plan_state, - root_collection_object_type, - object_type, - path, - requested_columns, + &object_field.parameters, + arguments, )?; - let field_type = - find_object_field_path(&target_object_type, &name, field_path.as_ref())?.clone(); Ok(plan::ComparisonTarget::Column { name, + arguments: plan_arguments, field_path, - path, - field_type, + field_type: object_field.r#type, }) } - ndc::ComparisonTarget::RootCollectionColumn { name, field_path } => { - let field_type = - find_object_field_path(root_collection_object_type, &name, field_path.as_ref())?.clone(); - Ok(plan::ComparisonTarget::ColumnInScope { - name, - field_path, - field_type, - scope: plan_state.scope.clone(), - }) + ndc::ComparisonTarget::Aggregate { .. } => { + // TODO: ENG-1457 implement query.aggregates.filter_by + Err(QueryPlanError::NotImplemented( + "filter by aggregate".to_string(), + )) } } } @@ -594,14 +702,35 @@ fn plan_for_comparison_value( value: ndc::ComparisonValue, ) -> Result> { match value { - ndc::ComparisonValue::Column { column } => Ok(plan::ComparisonValue::Column { - column: plan_for_comparison_target( + ndc::ComparisonValue::Column { + path, + name, + arguments, + field_path, + scope, + } => { + let (plan_path, collection_object_type) = plan_for_relationship_path( plan_state, root_collection_object_type, object_type, - column, - )?, - }), + path, + vec![name.clone()], + )?; + let object_field = collection_object_type.get(&name)?; + let plan_arguments = plan_arguments_from_plan_parameters( + plan_state, + &object_field.parameters, + arguments, + )?; + Ok(plan::ComparisonValue::Column { + path: plan_path, + name, + arguments: plan_arguments, + field_path, + field_type: object_field.r#type.clone(), + scope, + }) + } ndc::ComparisonValue::Scalar { value } => Ok(plan::ComparisonValue::Scalar { value, value_type: expected_type, @@ -628,6 +757,7 @@ fn plan_for_exists( ndc::ExistsInCollection::Related { relationship, arguments, + field_path: _, // TODO: ENG-1490 requires propagating this, probably through the `register_relationship` call } => { let ndc_relationship = lookup_relationship(plan_state.collection_relationships, &relationship)?; @@ -646,19 +776,28 @@ fn plan_for_exists( }) .transpose()?; + // TODO: ENG-1457 When we implement query.aggregates.filter_by we'll need to collect aggregates + // here as well as fields. let fields = predicate.as_ref().map(|p| { - p.query_local_comparison_targets() - .map(|comparison_target| { - ( - comparison_target.column_name().to_owned(), + let mut fields = IndexMap::new(); + for comparison_target in p.query_local_comparison_targets() { + match comparison_target.into_owned() { + plan::ComparisonTarget::Column { + name, + arguments: _, + field_type, + .. + } => fields.insert( + name.clone(), plan::Field::Column { - column: comparison_target.column_name().clone(), - column_type: comparison_target.get_field_type().clone(), + column: name, fields: None, + column_type: field_type, }, - ) - }) - .collect() + ), + }; + } + fields }); let relationship_query = plan::Query { @@ -713,18 +852,52 @@ fn plan_for_exists( arguments, field_path, } => { - let arguments = if arguments.is_empty() { - Default::default() - } else { - Err(QueryPlanError::NotImplemented( - "arguments on nested fields".to_string(), - ))? + let object_field = root_collection_object_type.get(&column_name)?; + let plan_arguments = plan_arguments_from_plan_parameters( + &mut nested_state, + &object_field.parameters, + arguments, + )?; + + let nested_collection_type = find_nested_collection_object_type( + root_collection_object_type.clone(), + &field_path + .clone() + .into_iter() + .chain(once(column_name.clone())) + .collect_vec(), + )?; + + let in_collection = plan::ExistsInCollection::NestedCollection { + column_name, + arguments: plan_arguments, + field_path, }; - // To support field arguments here we need a way to look up field parameters (a map of - // supported argument names to types). When we have that replace the above `arguments` - // assignment with this one: - // let arguments = plan_for_arguments(plan_state, parameters, arguments)?; + let predicate = predicate + .map(|expression| { + plan_for_expression( + &mut nested_state, + root_collection_object_type, + &nested_collection_type, + *expression, + ) + }) + .transpose()?; + + Ok((in_collection, predicate)) + } + ExistsInCollection::NestedScalarCollection { + column_name, + arguments, + field_path, + } => { + let object_field = root_collection_object_type.get(&column_name)?; + let plan_arguments = plan_arguments_from_plan_parameters( + &mut nested_state, + &object_field.parameters, + arguments, + )?; let nested_collection_type = find_nested_collection_type( root_collection_object_type.clone(), @@ -735,9 +908,21 @@ fn plan_for_exists( .collect_vec(), )?; - let in_collection = plan::ExistsInCollection::NestedCollection { + let virtual_object_type = plan::ObjectType { + name: None, + fields: [( + "__value".into(), + plan::ObjectField { + r#type: nested_collection_type, + parameters: Default::default(), + }, + )] + .into(), + }; + + let in_collection = plan::ExistsInCollection::NestedScalarCollection { column_name, - arguments, + arguments: plan_arguments, field_path, }; @@ -746,7 +931,7 @@ fn plan_for_exists( plan_for_expression( &mut nested_state, root_collection_object_type, - &nested_collection_type, + &virtual_object_type, *expression, ) }) diff --git a/crates/ndc-query-plan/src/plan_for_query_request/plan_for_arguments.rs b/crates/ndc-query-plan/src/plan_for_query_request/plan_for_arguments.rs index 6f485448..b15afb1c 100644 --- a/crates/ndc-query-plan/src/plan_for_query_request/plan_for_arguments.rs +++ b/crates/ndc-query-plan/src/plan_for_query_request/plan_for_arguments.rs @@ -44,7 +44,7 @@ pub fn plan_for_mutation_procedure_arguments( ) } -/// Convert maps of [ndc::Argument] values to maps of [plan::Argument] +/// Convert maps of [ndc::RelationshipArgument] values to maps of [plan::RelationshipArgument] pub fn plan_for_relationship_arguments( plan_state: &mut QueryPlanState<'_, T>, parameters: &BTreeMap, @@ -70,17 +70,54 @@ pub fn plan_for_relationship_arguments( Ok(arguments) } +/// Create a map of plan arguments when we already have plan types for parameters. +pub fn plan_arguments_from_plan_parameters( + plan_state: &mut QueryPlanState<'_, T>, + parameters: &BTreeMap>, + arguments: BTreeMap, +) -> Result>> { + let arguments = plan_for_arguments_generic( + plan_state, + parameters, + arguments, + |_plan_state, plan_type, argument| match argument { + ndc::Argument::Variable { name } => Ok(plan::Argument::Variable { + name, + argument_type: plan_type.clone(), + }), + ndc::Argument::Literal { value } => Ok(plan::Argument::Literal { + value, + argument_type: plan_type.clone(), + }), + }, + )?; + + for argument in arguments.values() { + if let plan::Argument::Variable { + name, + argument_type, + } = argument + { + plan_state.register_variable_use(name, argument_type.clone()) + } + } + + Ok(arguments) +} + fn plan_for_argument( plan_state: &mut QueryPlanState<'_, T>, - parameter_type: &ndc::Type, + argument_info: &ndc::ArgumentInfo, argument: ndc::Argument, ) -> Result> { match argument { ndc::Argument::Variable { name } => Ok(plan::Argument::Variable { name, - argument_type: plan_state.context.ndc_to_plan_type(parameter_type)?, + argument_type: plan_state + .context + .ndc_to_plan_type(&argument_info.argument_type)?, }), - ndc::Argument::Literal { value } => match parameter_type { + ndc::Argument::Literal { value } => match &argument_info.argument_type { ndc::Type::Predicate { object_type_name } => Ok(plan::Argument::Predicate { expression: plan_for_predicate(plan_state, object_type_name, value)?, }), @@ -94,10 +131,10 @@ fn plan_for_argument( fn plan_for_mutation_procedure_argument( plan_state: &mut QueryPlanState<'_, T>, - parameter_type: &ndc::Type, + argument_info: &ndc::ArgumentInfo, value: serde_json::Value, ) -> Result> { - match parameter_type { + match &argument_info.argument_type { ndc::Type::Predicate { object_type_name } => { Ok(plan::MutationProcedureArgument::Predicate { expression: plan_for_predicate(plan_state, object_type_name, value)?, @@ -112,19 +149,20 @@ fn plan_for_mutation_procedure_argument( fn plan_for_relationship_argument( plan_state: &mut QueryPlanState<'_, T>, - parameter_type: &ndc::Type, + argument_info: &ndc::ArgumentInfo, argument: ndc::RelationshipArgument, ) -> Result> { + let argument_type = &argument_info.argument_type; match argument { ndc::RelationshipArgument::Variable { name } => Ok(plan::RelationshipArgument::Variable { name, - argument_type: plan_state.context.ndc_to_plan_type(parameter_type)?, + argument_type: plan_state.context.ndc_to_plan_type(argument_type)?, }), ndc::RelationshipArgument::Column { name } => Ok(plan::RelationshipArgument::Column { name, - argument_type: plan_state.context.ndc_to_plan_type(parameter_type)?, + argument_type: plan_state.context.ndc_to_plan_type(argument_type)?, }), - ndc::RelationshipArgument::Literal { value } => match parameter_type { + ndc::RelationshipArgument::Literal { value } => match argument_type { ndc::Type::Predicate { object_type_name } => { Ok(plan::RelationshipArgument::Predicate { expression: plan_for_predicate(plan_state, object_type_name, value)?, @@ -151,19 +189,19 @@ fn plan_for_predicate( /// Convert maps of [ndc::Argument] or [ndc::RelationshipArgument] values to [plan::Argument] or /// [plan::RelationshipArgument] respectively. -fn plan_for_arguments_generic( +fn plan_for_arguments_generic( plan_state: &mut QueryPlanState<'_, T>, - parameters: &BTreeMap, + parameters: &BTreeMap, mut arguments: BTreeMap, convert_argument: F, ) -> Result> where - F: Fn(&mut QueryPlanState<'_, T>, &ndc::Type, NdcArgument) -> Result, + F: Fn(&mut QueryPlanState<'_, T>, &Parameter, NdcArgument) -> Result, { validate_no_excess_arguments(parameters, &arguments)?; let (arguments, missing): ( - Vec<(ndc::ArgumentName, NdcArgument, &ndc::ArgumentInfo)>, + Vec<(ndc::ArgumentName, NdcArgument, &Parameter)>, Vec, ) = parameters .iter() @@ -185,7 +223,7 @@ where ) = arguments .into_iter() .map(|(name, argument, argument_info)| { - match convert_argument(plan_state, &argument_info.argument_type, argument) { + match convert_argument(plan_state, argument_info, argument) { Ok(argument) => Ok((name, argument)), Err(err) => Err((name, err)), } @@ -198,8 +236,8 @@ where Ok(resolved) } -pub fn validate_no_excess_arguments( - parameters: &BTreeMap, +pub fn validate_no_excess_arguments( + parameters: &BTreeMap, arguments: &BTreeMap, ) -> Result<()> { let excess: Vec = arguments diff --git a/crates/ndc-query-plan/src/plan_for_query_request/plan_test_helpers/mod.rs b/crates/ndc-query-plan/src/plan_for_query_request/plan_test_helpers/mod.rs index 8518fd90..8f5895af 100644 --- a/crates/ndc-query-plan/src/plan_for_query_request/plan_test_helpers/mod.rs +++ b/crates/ndc-query-plan/src/plan_for_query_request/plan_test_helpers/mod.rs @@ -15,11 +15,10 @@ use ndc_test_helpers::{ use crate::{ConnectorTypes, QueryContext, QueryPlanError, Type}; -#[allow(unused_imports)] pub use self::{ - query::{query, QueryBuilder}, - relationships::{relationship, RelationshipBuilder}, - type_helpers::{date, double, int, object_type, string}, + query::QueryBuilder, + relationships::relationship, + type_helpers::{date, double, int, string}, }; #[derive(Clone, Debug, Default)] @@ -34,6 +33,14 @@ impl ConnectorTypes for TestContext { type AggregateFunction = AggregateFunction; type ComparisonOperator = ComparisonOperator; type ScalarType = ScalarType; + + fn count_aggregate_type() -> Type { + int() + } + + fn string_type() -> Type { + string() + } } impl QueryContext for TestContext { @@ -173,13 +180,11 @@ fn scalar_types() -> BTreeMap { ( ScalarType::Double.name().to_owned(), ndc::ScalarType { - representation: Some(TypeRepresentation::Float64), + representation: TypeRepresentation::Float64, aggregate_functions: [( AggregateFunction::Average.name().into(), - ndc::AggregateFunctionDefinition { - result_type: ndc::Type::Named { - name: ScalarType::Double.name().into(), - }, + ndc::AggregateFunctionDefinition::Average { + result_type: ScalarType::Double.name().into(), }, )] .into(), @@ -193,13 +198,11 @@ fn scalar_types() -> BTreeMap { ( ScalarType::Int.name().to_owned(), ndc::ScalarType { - representation: Some(TypeRepresentation::Int32), + representation: TypeRepresentation::Int32, aggregate_functions: [( AggregateFunction::Average.name().into(), - ndc::AggregateFunctionDefinition { - result_type: ndc::Type::Named { - name: ScalarType::Double.name().into(), - }, + ndc::AggregateFunctionDefinition::Average { + result_type: ScalarType::Double.name().into(), }, )] .into(), @@ -213,7 +216,7 @@ fn scalar_types() -> BTreeMap { ( ScalarType::String.name().to_owned(), ndc::ScalarType { - representation: Some(TypeRepresentation::String), + representation: TypeRepresentation::String, aggregate_functions: Default::default(), comparison_operators: [ ( @@ -249,7 +252,6 @@ pub fn make_flat_schema() -> TestContext { collection_type: "Author".into(), arguments: Default::default(), uniqueness_constraints: make_primary_key_uniqueness_constraint("authors"), - foreign_keys: Default::default(), }, ), ( @@ -260,7 +262,6 @@ pub fn make_flat_schema() -> TestContext { collection_type: "Article".into(), arguments: Default::default(), uniqueness_constraints: make_primary_key_uniqueness_constraint("articles"), - foreign_keys: Default::default(), }, ), ]), @@ -297,7 +298,6 @@ pub fn make_nested_schema() -> TestContext { collection_type: "Author".into(), arguments: Default::default(), uniqueness_constraints: make_primary_key_uniqueness_constraint("authors"), - foreign_keys: Default::default(), }, ), collection("appearances"), // new helper gives more concise syntax diff --git a/crates/ndc-query-plan/src/plan_for_query_request/plan_test_helpers/relationships.rs b/crates/ndc-query-plan/src/plan_for_query_request/plan_test_helpers/relationships.rs index 0ab7cfbd..ab8f3226 100644 --- a/crates/ndc-query-plan/src/plan_for_query_request/plan_test_helpers/relationships.rs +++ b/crates/ndc-query-plan/src/plan_for_query_request/plan_test_helpers/relationships.rs @@ -1,6 +1,7 @@ use std::collections::BTreeMap; -use ndc_models::RelationshipType; +use ndc_models::{FieldName, RelationshipType}; +use nonempty::NonEmpty; use crate::{ConnectorTypes, Field, Relationship, RelationshipArgument}; @@ -8,7 +9,7 @@ use super::QueryBuilder; #[derive(Clone, Debug)] pub struct RelationshipBuilder { - column_mapping: BTreeMap, + column_mapping: BTreeMap>, relationship_type: RelationshipType, target_collection: ndc_models::CollectionName, arguments: BTreeMap>, @@ -42,11 +43,22 @@ impl RelationshipBuilder { pub fn column_mapping( mut self, - column_mapping: impl IntoIterator, + column_mapping: impl IntoIterator< + Item = ( + impl Into, + impl IntoIterator>, + ), + >, ) -> Self { self.column_mapping = column_mapping .into_iter() - .map(|(source, target)| (source.to_string().into(), target.to_string().into())) + .map(|(source, target)| { + ( + source.into(), + NonEmpty::collect(target.into_iter().map(Into::into)) + .expect("target path in relationship column mapping may not be empty"), + ) + }) .collect(); self } diff --git a/crates/ndc-query-plan/src/plan_for_query_request/plan_test_helpers/type_helpers.rs b/crates/ndc-query-plan/src/plan_for_query_request/plan_test_helpers/type_helpers.rs index 7d0dc453..05875471 100644 --- a/crates/ndc-query-plan/src/plan_for_query_request/plan_test_helpers/type_helpers.rs +++ b/crates/ndc-query-plan/src/plan_for_query_request/plan_test_helpers/type_helpers.rs @@ -1,4 +1,4 @@ -use crate::{ObjectType, Type}; +use crate::Type; use super::ScalarType; @@ -17,15 +17,3 @@ pub fn int() -> Type { pub fn string() -> Type { Type::Scalar(ScalarType::String) } - -pub fn object_type( - fields: impl IntoIterator>)>, -) -> Type { - Type::Object(ObjectType { - name: None, - fields: fields - .into_iter() - .map(|(name, field)| (name.to_string().into(), field.into())) - .collect(), - }) -} diff --git a/crates/ndc-query-plan/src/plan_for_query_request/query_context.rs b/crates/ndc-query-plan/src/plan_for_query_request/query_context.rs index 64a947e1..eb180b43 100644 --- a/crates/ndc-query-plan/src/plan_for_query_request/query_context.rs +++ b/crates/ndc-query-plan/src/plan_for_query_request/query_context.rs @@ -54,11 +54,32 @@ pub trait QueryContext: ConnectorTypes { Ok(( func, plan::AggregateFunctionDefinition { - result_type: self.ndc_to_plan_type(&definition.result_type)?, + result_type: self.aggregate_function_result_type(definition, input_type)?, }, )) } + fn aggregate_function_result_type( + &self, + definition: &ndc::AggregateFunctionDefinition, + input_type: &plan::Type, + ) -> Result> { + let t = match definition { + ndc::AggregateFunctionDefinition::Min => input_type.clone().into_nullable(), + ndc::AggregateFunctionDefinition::Max => input_type.clone().into_nullable(), + ndc::AggregateFunctionDefinition::Sum { result_type } + | ndc::AggregateFunctionDefinition::Average { result_type } => { + let scalar_type = Self::lookup_scalar_type(result_type) + .ok_or_else(|| QueryPlanError::UnknownScalarType(result_type.clone()))?; + plan::Type::Scalar(scalar_type).into_nullable() + } + ndc::AggregateFunctionDefinition::Custom { result_type } => { + self.ndc_to_plan_type(result_type)? + } + }; + Ok(t) + } + fn find_comparison_operator( &self, left_operand_type: &Type, @@ -72,15 +93,10 @@ pub trait QueryContext: ConnectorTypes { { let (operator, definition) = Self::lookup_comparison_operator(self, left_operand_type, op_name)?; - let plan_def = match definition { - ndc::ComparisonOperatorDefinition::Equal => plan::ComparisonOperatorDefinition::Equal, - ndc::ComparisonOperatorDefinition::In => plan::ComparisonOperatorDefinition::In, - ndc::ComparisonOperatorDefinition::Custom { argument_type } => { - plan::ComparisonOperatorDefinition::Custom { - argument_type: self.ndc_to_plan_type(argument_type)?, - } - } - }; + let plan_def = + plan::ComparisonOperatorDefinition::from_ndc_definition(definition, |ndc_type| { + self.ndc_to_plan_type(ndc_type) + })?; Ok((operator, plan_def)) } diff --git a/crates/ndc-query-plan/src/plan_for_query_request/query_plan_error.rs b/crates/ndc-query-plan/src/plan_for_query_request/query_plan_error.rs index 4467f802..2283ed1f 100644 --- a/crates/ndc-query-plan/src/plan_for_query_request/query_plan_error.rs +++ b/crates/ndc-query-plan/src/plan_for_query_request/query_plan_error.rs @@ -29,6 +29,11 @@ pub enum QueryPlanError { #[error("not implemented: {}", .0)] NotImplemented(String), + #[error("relationship, {relationship_name}, has an empty target path")] + RelationshipEmptyTarget { + relationship_name: ndc::RelationshipName, + }, + #[error("{0}")] RelationshipUnification(#[from] RelationshipUnificationError), diff --git a/crates/ndc-query-plan/src/plan_for_query_request/query_plan_state.rs b/crates/ndc-query-plan/src/plan_for_query_request/query_plan_state.rs index d82e5183..89ccefb7 100644 --- a/crates/ndc-query-plan/src/plan_for_query_request/query_plan_state.rs +++ b/crates/ndc-query-plan/src/plan_for_query_request/query_plan_state.rs @@ -5,6 +5,7 @@ use std::{ }; use ndc_models as ndc; +use nonempty::NonEmpty; use crate::{ plan_for_query_request::helpers::lookup_relationship, @@ -96,8 +97,23 @@ impl QueryPlanState<'_, T> { Default::default() }; + let column_mapping = ndc_relationship + .column_mapping + .iter() + .map(|(source, target_path)| { + Ok(( + source.clone(), + NonEmpty::collect(target_path.iter().cloned()).ok_or_else(|| { + QueryPlanError::RelationshipEmptyTarget { + relationship_name: ndc_relationship_name.clone(), + } + })?, + )) + }) + .collect::>>()?; + let relationship = Relationship { - column_mapping: ndc_relationship.column_mapping.clone(), + column_mapping, relationship_type: ndc_relationship.relationship_type, target_collection: ndc_relationship.target_collection.clone(), arguments, diff --git a/crates/ndc-query-plan/src/plan_for_query_request/tests.rs b/crates/ndc-query-plan/src/plan_for_query_request/tests.rs index d6ae2409..a9a4f17a 100644 --- a/crates/ndc-query-plan/src/plan_for_query_request/tests.rs +++ b/crates/ndc-query-plan/src/plan_for_query_request/tests.rs @@ -1,507 +1,517 @@ use ndc_models::{self as ndc, OrderByTarget, OrderDirection, RelationshipType}; use ndc_test_helpers::*; +use nonempty::NonEmpty; use pretty_assertions::assert_eq; -use serde_json::json; use crate::{ self as plan, - plan_for_query_request::plan_test_helpers::{ - self, make_flat_schema, make_nested_schema, TestContext, - }, - query_plan::UnrelatedJoin, - ExistsInCollection, Expression, Field, OrderBy, Query, QueryContext, QueryPlan, Relationship, + plan_for_query_request::plan_test_helpers::{self, make_flat_schema, make_nested_schema}, + QueryContext, QueryPlan, }; use super::plan_for_query_request; -#[test] -fn translates_query_request_relationships() -> Result<(), anyhow::Error> { - let request = query_request() - .collection("schools") - .relationships([ - ( - "school_classes", - relationship("classes", [("_id", "school_id")]), - ), - ( - "class_students", - relationship("students", [("_id", "class_id")]), - ), - ( - "class_department", - relationship("departments", [("department_id", "_id")]).object_type(), - ), - ( - "school_directory", - relationship("directory", [("_id", "school_id")]).object_type(), - ), - ( - "student_advisor", - relationship("advisors", [("advisor_id", "_id")]).object_type(), - ), - ( - "existence_check", - relationship("some_collection", [("some_id", "_id")]), - ), - ]) - .query( - query() - .fields([relation_field!("class_name" => "school_classes", query() - .fields([ - relation_field!("student_name" => "class_students") - ]) - )]) - .order_by(vec![ndc::OrderByElement { - order_direction: OrderDirection::Asc, - target: OrderByTarget::Column { - name: "advisor_name".into(), - field_path: None, - path: vec![ - path_element("school_classes".into()) - .predicate(binop( - "Equal", - target!( - "_id", - relations: [ - // path_element("school_classes"), - path_element("class_department".into()), - ], - ), - column_value!( - "math_department_id", - relations: [path_element("school_directory".into())], - ), - )) - .into(), - path_element("class_students".into()).into(), - path_element("student_advisor".into()).into(), - ], - }, - }]) - // The `And` layer checks that we properly recursive into Expressions - .predicate(and([ndc::Expression::Exists { - in_collection: related!("existence_check"), - predicate: None, - }])), - ) - .into(); +// TODO: ENG-1487 we need named scopes to define this query in ndc-spec 0.2 +// #[test] +// fn translates_query_request_relationships() -> Result<(), anyhow::Error> { +// let request = query_request() +// .collection("schools") +// .relationships([ +// ( +// "school_classes", +// relationship("classes", [("_id", &["school_id"])]), +// ), +// ( +// "class_students", +// relationship("students", [("_id", &["class_id"])]), +// ), +// ( +// "class_department", +// relationship("departments", [("department_id", &["_id"])]).object_type(), +// ), +// ( +// "school_directory", +// relationship("directory", [("_id", &["school_id"])]).object_type(), +// ), +// ( +// "student_advisor", +// relationship("advisors", [("advisor_id", &["_id"])]).object_type(), +// ), +// ( +// "existence_check", +// relationship("some_collection", [("some_id", &["_id"])]), +// ), +// ]) +// .query( +// query() +// .fields([relation_field!("class_name" => "school_classes", query() +// .fields([ +// relation_field!("student_name" => "class_students") +// ]) +// )]) +// .order_by(vec![ndc::OrderByElement { +// order_direction: OrderDirection::Asc, +// target: OrderByTarget::Column { +// name: "advisor_name".into(), +// arguments: Default::default(), +// field_path: None, +// path: vec![ +// path_element("school_classes") +// .predicate( +// exists( +// in_related("class_department"), +// binop( +// "Equal", +// target!("_id"), +// column_value("math_department_id") +// .path([path_element("school_directory")]) +// .scope(2) +// .into() +// ), +// ) +// ) +// .into(), +// path_element("class_students").into(), +// path_element("student_advisor").into(), +// ], +// }, +// }]) +// // The `And` layer checks that we properly recurse into Expressions +// .predicate(and([ndc::Expression::Exists { +// in_collection: related!("existence_check"), +// predicate: None, +// }])), +// ) +// .into(); +// +// let expected = QueryPlan { +// collection: "schools".into(), +// arguments: Default::default(), +// variables: None, +// variable_types: Default::default(), +// unrelated_collections: Default::default(), +// query: Query { +// predicate: Some(Expression::And { +// expressions: vec![Expression::Exists { +// in_collection: ExistsInCollection::Related { +// relationship: "existence_check".into(), +// }, +// predicate: None, +// }], +// }), +// order_by: Some(OrderBy { +// elements: [plan::OrderByElement { +// order_direction: OrderDirection::Asc, +// target: plan::OrderByTarget::Column { +// name: "advisor_name".into(), +// arguments: Default::default(), +// field_path: Default::default(), +// path: [ +// "school_classes_0".into(), +// "class_students".into(), +// "student_advisor".into(), +// ] +// .into(), +// }, +// }] +// .into(), +// }), +// relationships: [ +// // We join on the school_classes relationship twice. This one is for the `order_by` +// // comparison in the top-level request query +// ( +// "school_classes_0".into(), +// Relationship { +// column_mapping: [("_id".into(), vec!["school_id".into()])].into(), +// relationship_type: RelationshipType::Array, +// target_collection: "classes".into(), +// arguments: Default::default(), +// query: Query { +// predicate: Some(Expression::Exists { +// in_collection: ExistsInCollection::Related { +// relationship: "school_directory".into(), +// }, +// predicate: Some(Box::new(plan::Expression::BinaryComparisonOperator { +// column: plan::ComparisonTarget::Column { +// name: "_id".into(), +// arguments: Default::default(), +// field_path: None, +// field_type: plan::Type::Scalar( +// plan_test_helpers::ScalarType::Int, +// ), +// }, +// operator: plan_test_helpers::ComparisonOperator::Equal, +// value: plan::ComparisonValue::Column { +// name: "math_department_id".into(), +// arguments: Default::default(), +// field_path: None, +// field_type: plan::Type::Scalar( +// plan_test_helpers::ScalarType::Int, +// ), +// path: vec!["school_directory".into()], +// scope: Default::default(), +// }, +// })) +// }), +// relationships: [( +// "class_department".into(), +// plan::Relationship { +// target_collection: "departments".into(), +// column_mapping: [("department_id".into(), vec!["_id".into()])].into(), +// relationship_type: RelationshipType::Object, +// arguments: Default::default(), +// query: plan::Query { +// fields: Some([ +// ("_id".into(), plan::Field::Column { column: "_id".into(), fields: None, column_type: plan::Type::Scalar(plan_test_helpers::ScalarType::Int) }) +// ].into()), +// ..Default::default() +// }, +// }, +// ), ( +// "class_students".into(), +// plan::Relationship { +// target_collection: "students".into(), +// column_mapping: [("_id".into(), vec!["class_id".into()])].into(), +// relationship_type: RelationshipType::Array, +// arguments: Default::default(), +// query: plan::Query { +// relationships: [( +// "student_advisor".into(), +// plan::Relationship { +// column_mapping: [( +// "advisor_id".into(), +// vec!["_id".into()], +// )] +// .into(), +// relationship_type: RelationshipType::Object, +// target_collection: "advisors".into(), +// arguments: Default::default(), +// query: plan::Query { +// fields: Some( +// [( +// "advisor_name".into(), +// plan::Field::Column { +// column: "advisor_name".into(), +// fields: None, +// column_type: plan::Type::Scalar(plan_test_helpers::ScalarType::String), +// }, +// )] +// .into(), +// ), +// ..Default::default() +// }, +// }, +// )] +// .into(), +// ..Default::default() +// }, +// }, +// ), +// ( +// "school_directory".into(), +// Relationship { +// target_collection: "directory".into(), +// column_mapping: [("_id".into(), vec!["school_id".into()])].into(), +// relationship_type: RelationshipType::Object, +// arguments: Default::default(), +// query: Query { +// fields: Some([ +// ("math_department_id".into(), plan::Field::Column { column: "math_department_id".into(), fields: None, column_type: plan::Type::Scalar(plan_test_helpers::ScalarType::Int) }) +// ].into()), +// ..Default::default() +// }, +// }, +// ), +// ] +// .into(), +// ..Default::default() +// }, +// }, +// ), +// // This is the second join on school_classes - this one provides the relationship +// // field for the top-level request query +// ( +// "school_classes".into(), +// Relationship { +// column_mapping: [("_id".into(), vec!["school_id".into()])].into(), +// relationship_type: RelationshipType::Array, +// target_collection: "classes".into(), +// arguments: Default::default(), +// query: Query { +// fields: Some( +// [( +// "student_name".into(), +// plan::Field::Relationship { +// relationship: "class_students".into(), +// aggregates: None, +// fields: None, +// }, +// )] +// .into(), +// ), +// relationships: [( +// "class_students".into(), +// plan::Relationship { +// target_collection: "students".into(), +// column_mapping: [("_id".into(), vec!["class_id".into()])].into(), +// relationship_type: RelationshipType::Array, +// arguments: Default::default(), +// query: Query { +// scope: Some(plan::Scope::Named("scope_1".into())), +// ..Default::default() +// }, +// }, +// )].into(), +// scope: Some(plan::Scope::Named("scope_0".into())), +// ..Default::default() +// }, +// }, +// ), +// ( +// "existence_check".into(), +// Relationship { +// column_mapping: [("some_id".into(), vec!["_id".into()])].into(), +// relationship_type: RelationshipType::Array, +// target_collection: "some_collection".into(), +// arguments: Default::default(), +// query: Query { +// predicate: None, +// ..Default::default() +// }, +// }, +// ), +// ] +// .into(), +// fields: Some( +// [( +// "class_name".into(), +// Field::Relationship { +// relationship: "school_classes".into(), +// aggregates: None, +// fields: Some( +// [( +// "student_name".into(), +// Field::Relationship { +// relationship: "class_students".into(), +// aggregates: None, +// fields: None, +// }, +// )] +// .into(), +// ), +// }, +// )] +// .into(), +// ), +// scope: Some(plan::Scope::Root), +// ..Default::default() +// }, +// }; +// +// let context = TestContext { +// collections: [ +// collection("schools"), +// collection("classes"), +// collection("students"), +// collection("departments"), +// collection("directory"), +// collection("advisors"), +// collection("some_collection"), +// ] +// .into(), +// object_types: [ +// ("schools".into(), object_type([("_id", named_type("Int"))])), +// ( +// "classes".into(), +// object_type([ +// ("_id", named_type("Int")), +// ("school_id", named_type("Int")), +// ("department_id", named_type("Int")), +// ]), +// ), +// ( +// "students".into(), +// object_type([ +// ("_id", named_type("Int")), +// ("class_id", named_type("Int")), +// ("advisor_id", named_type("Int")), +// ("student_name", named_type("String")), +// ]), +// ), +// ( +// "departments".into(), +// object_type([("_id", named_type("Int"))]), +// ), +// ( +// "directory".into(), +// object_type([ +// ("_id", named_type("Int")), +// ("school_id", named_type("Int")), +// ("math_department_id", named_type("Int")), +// ]), +// ), +// ( +// "advisors".into(), +// object_type([ +// ("_id", named_type("Int")), +// ("advisor_name", named_type("String")), +// ]), +// ), +// ( +// "some_collection".into(), +// object_type([("_id", named_type("Int")), ("some_id", named_type("Int"))]), +// ), +// ] +// .into(), +// ..Default::default() +// }; +// +// let query_plan = plan_for_query_request(&context, request)?; +// +// assert_eq!(query_plan, expected); +// Ok(()) +// } - let expected = QueryPlan { - collection: "schools".into(), - arguments: Default::default(), - variables: None, - variable_types: Default::default(), - unrelated_collections: Default::default(), - query: Query { - predicate: Some(Expression::And { - expressions: vec![Expression::Exists { - in_collection: ExistsInCollection::Related { - relationship: "existence_check".into(), - }, - predicate: None, - }], - }), - order_by: Some(OrderBy { - elements: [plan::OrderByElement { - order_direction: OrderDirection::Asc, - target: plan::OrderByTarget::Column { - name: "advisor_name".into(), - field_path: Default::default(), - path: [ - "school_classes_0".into(), - "class_students".into(), - "student_advisor".into(), - ] - .into(), - }, - }] - .into(), - }), - relationships: [ - ( - "school_classes_0".into(), - Relationship { - column_mapping: [("_id".into(), "school_id".into())].into(), - relationship_type: RelationshipType::Array, - target_collection: "classes".into(), - arguments: Default::default(), - query: Query { - predicate: Some(plan::Expression::BinaryComparisonOperator { - column: plan::ComparisonTarget::Column { - name: "_id".into(), - field_path: None, - field_type: plan::Type::Scalar( - plan_test_helpers::ScalarType::Int, - ), - path: vec!["class_department".into()], - }, - operator: plan_test_helpers::ComparisonOperator::Equal, - value: plan::ComparisonValue::Column { - column: plan::ComparisonTarget::Column { - name: "math_department_id".into(), - field_path: None, - field_type: plan::Type::Scalar( - plan_test_helpers::ScalarType::Int, - ), - path: vec!["school_directory".into()], - }, - }, - }), - relationships: [( - "class_department".into(), - plan::Relationship { - target_collection: "departments".into(), - column_mapping: [("department_id".into(), "_id".into())].into(), - relationship_type: RelationshipType::Object, - arguments: Default::default(), - query: plan::Query { - fields: Some([ - ("_id".into(), plan::Field::Column { column: "_id".into(), fields: None, column_type: plan::Type::Scalar(plan_test_helpers::ScalarType::Int) }) - ].into()), - ..Default::default() - }, - }, - ), ( - "class_students".into(), - plan::Relationship { - target_collection: "students".into(), - column_mapping: [("_id".into(), "class_id".into())].into(), - relationship_type: RelationshipType::Array, - arguments: Default::default(), - query: plan::Query { - relationships: [( - "student_advisor".into(), - plan::Relationship { - column_mapping: [( - "advisor_id".into(), - "_id".into(), - )] - .into(), - relationship_type: RelationshipType::Object, - target_collection: "advisors".into(), - arguments: Default::default(), - query: plan::Query { - fields: Some( - [( - "advisor_name".into(), - plan::Field::Column { - column: "advisor_name".into(), - fields: None, - column_type: plan::Type::Scalar(plan_test_helpers::ScalarType::String), - }, - )] - .into(), - ), - ..Default::default() - }, - }, - )] - .into(), - ..Default::default() - }, - }, - ), - ( - "school_directory".into(), - Relationship { - target_collection: "directory".into(), - column_mapping: [("_id".into(), "school_id".into())].into(), - relationship_type: RelationshipType::Object, - arguments: Default::default(), - query: Query { - fields: Some([ - ("math_department_id".into(), plan::Field::Column { column: "math_department_id".into(), fields: None, column_type: plan::Type::Scalar(plan_test_helpers::ScalarType::Int) }) - ].into()), - ..Default::default() - }, - }, - ), - ] - .into(), - ..Default::default() - }, - }, - ), - ( - "school_classes".into(), - Relationship { - column_mapping: [("_id".into(), "school_id".into())].into(), - relationship_type: RelationshipType::Array, - target_collection: "classes".into(), - arguments: Default::default(), - query: Query { - fields: Some( - [( - "student_name".into(), - plan::Field::Relationship { - relationship: "class_students".into(), - aggregates: None, - fields: None, - }, - )] - .into(), - ), - relationships: [( - "class_students".into(), - plan::Relationship { - target_collection: "students".into(), - column_mapping: [("_id".into(), "class_id".into())].into(), - relationship_type: RelationshipType::Array, - arguments: Default::default(), - query: Query { - scope: Some(plan::Scope::Named("scope_1".into())), - ..Default::default() - }, - }, - )].into(), - scope: Some(plan::Scope::Named("scope_0".into())), - ..Default::default() - }, - }, - ), - ( - "existence_check".into(), - Relationship { - column_mapping: [("some_id".into(), "_id".into())].into(), - relationship_type: RelationshipType::Array, - target_collection: "some_collection".into(), - arguments: Default::default(), - query: Query { - predicate: None, - ..Default::default() - }, - }, - ), - ] - .into(), - fields: Some( - [( - "class_name".into(), - Field::Relationship { - relationship: "school_classes".into(), - aggregates: None, - fields: Some( - [( - "student_name".into(), - Field::Relationship { - relationship: "class_students".into(), - aggregates: None, - fields: None, - }, - )] - .into(), - ), - }, - )] - .into(), - ), - scope: Some(plan::Scope::Root), - ..Default::default() - }, - }; +// TODO: ENG-1487 update this test to use named scopes instead of root column reference - let context = TestContext { - collections: [ - collection("schools"), - collection("classes"), - collection("students"), - collection("departments"), - collection("directory"), - collection("advisors"), - collection("some_collection"), - ] - .into(), - object_types: [ - ("schools".into(), object_type([("_id", named_type("Int"))])), - ( - "classes".into(), - object_type([ - ("_id", named_type("Int")), - ("school_id", named_type("Int")), - ("department_id", named_type("Int")), - ]), - ), - ( - "students".into(), - object_type([ - ("_id", named_type("Int")), - ("class_id", named_type("Int")), - ("advisor_id", named_type("Int")), - ("student_name", named_type("String")), - ]), - ), - ( - "departments".into(), - object_type([("_id", named_type("Int"))]), - ), - ( - "directory".into(), - object_type([ - ("_id", named_type("Int")), - ("school_id", named_type("Int")), - ("math_department_id", named_type("Int")), - ]), - ), - ( - "advisors".into(), - object_type([ - ("_id", named_type("Int")), - ("advisor_name", named_type("String")), - ]), - ), - ( - "some_collection".into(), - object_type([("_id", named_type("Int")), ("some_id", named_type("Int"))]), - ), - ] - .into(), - ..Default::default() - }; - - let query_plan = plan_for_query_request(&context, request)?; - - assert_eq!(query_plan, expected); - Ok(()) -} - -#[test] -fn translates_root_column_references() -> Result<(), anyhow::Error> { - let query_context = make_flat_schema(); - let query = query_request() - .collection("authors") - .query(query().fields([field!("last_name")]).predicate(exists( - unrelated!("articles"), - and([ - binop("Equal", target!("author_id"), column_value!(root("id"))), - binop("Regex", target!("title"), value!("Functional.*")), - ]), - ))) - .into(); - let query_plan = plan_for_query_request(&query_context, query)?; - - let expected = QueryPlan { - collection: "authors".into(), - query: plan::Query { - predicate: Some(plan::Expression::Exists { - in_collection: plan::ExistsInCollection::Unrelated { - unrelated_collection: "__join_articles_0".into(), - }, - predicate: Some(Box::new(plan::Expression::And { - expressions: vec![ - plan::Expression::BinaryComparisonOperator { - column: plan::ComparisonTarget::Column { - name: "author_id".into(), - field_path: Default::default(), - field_type: plan::Type::Scalar(plan_test_helpers::ScalarType::Int), - path: Default::default(), - }, - operator: plan_test_helpers::ComparisonOperator::Equal, - value: plan::ComparisonValue::Column { - column: plan::ComparisonTarget::ColumnInScope { - name: "id".into(), - field_path: Default::default(), - field_type: plan::Type::Scalar( - plan_test_helpers::ScalarType::Int, - ), - scope: plan::Scope::Root, - }, - }, - }, - plan::Expression::BinaryComparisonOperator { - column: plan::ComparisonTarget::Column { - name: "title".into(), - field_path: Default::default(), - field_type: plan::Type::Scalar( - plan_test_helpers::ScalarType::String, - ), - path: Default::default(), - }, - operator: plan_test_helpers::ComparisonOperator::Regex, - value: plan::ComparisonValue::Scalar { - value: json!("Functional.*"), - value_type: plan::Type::Scalar( - plan_test_helpers::ScalarType::String, - ), - }, - }, - ], - })), - }), - fields: Some( - [( - "last_name".into(), - plan::Field::Column { - column: "last_name".into(), - fields: None, - column_type: plan::Type::Scalar(plan_test_helpers::ScalarType::String), - }, - )] - .into(), - ), - scope: Some(plan::Scope::Root), - ..Default::default() - }, - unrelated_collections: [( - "__join_articles_0".into(), - UnrelatedJoin { - target_collection: "articles".into(), - arguments: Default::default(), - query: plan::Query { - predicate: Some(plan::Expression::And { - expressions: vec![ - plan::Expression::BinaryComparisonOperator { - column: plan::ComparisonTarget::Column { - name: "author_id".into(), - field_type: plan::Type::Scalar( - plan_test_helpers::ScalarType::Int, - ), - field_path: None, - path: vec![], - }, - operator: plan_test_helpers::ComparisonOperator::Equal, - value: plan::ComparisonValue::Column { - column: plan::ComparisonTarget::ColumnInScope { - name: "id".into(), - scope: plan::Scope::Root, - field_type: plan::Type::Scalar( - plan_test_helpers::ScalarType::Int, - ), - field_path: None, - }, - }, - }, - plan::Expression::BinaryComparisonOperator { - column: plan::ComparisonTarget::Column { - name: "title".into(), - field_type: plan::Type::Scalar( - plan_test_helpers::ScalarType::String, - ), - field_path: None, - path: vec![], - }, - operator: plan_test_helpers::ComparisonOperator::Regex, - value: plan::ComparisonValue::Scalar { - value: "Functional.*".into(), - value_type: plan::Type::Scalar( - plan_test_helpers::ScalarType::String, - ), - }, - }, - ], - }), - ..Default::default() - }, - }, - )] - .into(), - arguments: Default::default(), - variables: Default::default(), - variable_types: Default::default(), - }; - - assert_eq!(query_plan, expected); - Ok(()) -} +// #[test] +// fn translates_root_column_references() -> Result<(), anyhow::Error> { +// let query_context = make_flat_schema(); +// let query = query_request() +// .collection("authors") +// .query(query().fields([field!("last_name")]).predicate(exists( +// unrelated!("articles"), +// and([ +// binop("Equal", target!("author_id"), column_value!(root("id"))), +// binop("Regex", target!("title"), value!("Functional.*")), +// ]), +// ))) +// .into(); +// let query_plan = plan_for_query_request(&query_context, query)?; +// +// let expected = QueryPlan { +// collection: "authors".into(), +// query: plan::Query { +// predicate: Some(plan::Expression::Exists { +// in_collection: plan::ExistsInCollection::Unrelated { +// unrelated_collection: "__join_articles_0".into(), +// }, +// predicate: Some(Box::new(plan::Expression::And { +// expressions: vec![ +// plan::Expression::BinaryComparisonOperator { +// column: plan::ComparisonTarget::Column { +// name: "author_id".into(), +// field_path: Default::default(), +// field_type: plan::Type::Scalar(plan_test_helpers::ScalarType::Int), +// path: Default::default(), +// }, +// operator: plan_test_helpers::ComparisonOperator::Equal, +// value: plan::ComparisonValue::Column { +// column: plan::ComparisonTarget::ColumnInScope { +// name: "id".into(), +// field_path: Default::default(), +// field_type: plan::Type::Scalar( +// plan_test_helpers::ScalarType::Int, +// ), +// scope: plan::Scope::Root, +// }, +// }, +// }, +// plan::Expression::BinaryComparisonOperator { +// column: plan::ComparisonTarget::Column { +// name: "title".into(), +// field_path: Default::default(), +// field_type: plan::Type::Scalar( +// plan_test_helpers::ScalarType::String, +// ), +// path: Default::default(), +// }, +// operator: plan_test_helpers::ComparisonOperator::Regex, +// value: plan::ComparisonValue::Scalar { +// value: json!("Functional.*"), +// value_type: plan::Type::Scalar( +// plan_test_helpers::ScalarType::String, +// ), +// }, +// }, +// ], +// })), +// }), +// fields: Some( +// [( +// "last_name".into(), +// plan::Field::Column { +// column: "last_name".into(), +// fields: None, +// column_type: plan::Type::Scalar(plan_test_helpers::ScalarType::String), +// }, +// )] +// .into(), +// ), +// scope: Some(plan::Scope::Root), +// ..Default::default() +// }, +// unrelated_collections: [( +// "__join_articles_0".into(), +// UnrelatedJoin { +// target_collection: "articles".into(), +// arguments: Default::default(), +// query: plan::Query { +// predicate: Some(plan::Expression::And { +// expressions: vec![ +// plan::Expression::BinaryComparisonOperator { +// column: plan::ComparisonTarget::Column { +// name: "author_id".into(), +// field_type: plan::Type::Scalar( +// plan_test_helpers::ScalarType::Int, +// ), +// field_path: None, +// path: vec![], +// }, +// operator: plan_test_helpers::ComparisonOperator::Equal, +// value: plan::ComparisonValue::Column { +// column: plan::ComparisonTarget::ColumnInScope { +// name: "id".into(), +// scope: plan::Scope::Root, +// field_type: plan::Type::Scalar( +// plan_test_helpers::ScalarType::Int, +// ), +// field_path: None, +// }, +// }, +// }, +// plan::Expression::BinaryComparisonOperator { +// column: plan::ComparisonTarget::Column { +// name: "title".into(), +// field_type: plan::Type::Scalar( +// plan_test_helpers::ScalarType::String, +// ), +// field_path: None, +// path: vec![], +// }, +// operator: plan_test_helpers::ComparisonOperator::Regex, +// value: plan::ComparisonValue::Scalar { +// value: "Functional.*".into(), +// value_type: plan::Type::Scalar( +// plan_test_helpers::ScalarType::String, +// ), +// }, +// }, +// ], +// }), +// ..Default::default() +// }, +// }, +// )] +// .into(), +// arguments: Default::default(), +// variables: Default::default(), +// variable_types: Default::default(), +// }; +// +// assert_eq!(query_plan, expected); +// Ok(()) +// } #[test] fn translates_aggregate_selections() -> Result<(), anyhow::Error> { @@ -526,6 +536,7 @@ fn translates_aggregate_selections() -> Result<(), anyhow::Error> { "count_id".into(), plan::Aggregate::ColumnCount { column: "last_name".into(), + arguments: Default::default(), field_path: None, distinct: true, }, @@ -534,9 +545,11 @@ fn translates_aggregate_selections() -> Result<(), anyhow::Error> { "avg_id".into(), plan::Aggregate::SingleColumn { column: "id".into(), + arguments: Default::default(), field_path: None, function: plan_test_helpers::AggregateFunction::Average, - result_type: plan::Type::Scalar(plan_test_helpers::ScalarType::Double), + result_type: plan::Type::Scalar(plan_test_helpers::ScalarType::Double) + .into_nullable(), }, ), ] @@ -576,17 +589,21 @@ fn translates_relationships_in_fields_predicates_and_orderings() -> Result<(), a .order_by(vec![ ndc::OrderByElement { order_direction: OrderDirection::Asc, - target: OrderByTarget::SingleColumnAggregate { - column: "year".into(), - function: "Average".into(), - path: vec![path_element("author_articles".into()).into()], - field_path: None, + target: OrderByTarget::Aggregate { + path: vec![path_element("author_articles").into()], + aggregate: ndc::Aggregate::SingleColumn { + column: "year".into(), + arguments: Default::default(), + field_path: None, + function: "Average".into(), + }, }, }, ndc::OrderByElement { order_direction: OrderDirection::Desc, target: OrderByTarget::Column { name: "id".into(), + arguments: Default::default(), field_path: None, path: vec![], }, @@ -595,7 +612,7 @@ fn translates_relationships_in_fields_predicates_and_orderings() -> Result<(), a ) .relationships([( "author_articles", - relationship("articles", [("id", "author_id")]), + relationship("articles", [("id", &["author_id"])]), )]) .into(); let query_plan = plan_for_query_request(&query_context, query)?; @@ -608,12 +625,10 @@ fn translates_relationships_in_fields_predicates_and_orderings() -> Result<(), a relationship: "author_articles".into(), }, predicate: Some(Box::new(plan::Expression::BinaryComparisonOperator { - column: plan::ComparisonTarget::Column { - name: "title".into(), - field_path: Default::default(), - field_type: plan::Type::Scalar(plan_test_helpers::ScalarType::String), - path: Default::default(), - }, + column: plan::ComparisonTarget::column( + "title", + plan::Type::scalar(plan_test_helpers::ScalarType::String), + ), operator: plan_test_helpers::ComparisonOperator::Regex, value: plan::ComparisonValue::Scalar { value: "Functional.*".into(), @@ -625,17 +640,25 @@ fn translates_relationships_in_fields_predicates_and_orderings() -> Result<(), a elements: vec![ plan::OrderByElement { order_direction: OrderDirection::Asc, - target: plan::OrderByTarget::SingleColumnAggregate { - column: "year".into(), - function: plan_test_helpers::AggregateFunction::Average, - result_type: plan::Type::Scalar(plan_test_helpers::ScalarType::Double), + target: plan::OrderByTarget::Aggregate { path: vec!["author_articles".into()], + aggregate: plan::Aggregate::SingleColumn { + column: "year".into(), + arguments: Default::default(), + field_path: Default::default(), + function: plan_test_helpers::AggregateFunction::Average, + result_type: plan::Type::Scalar( + plan_test_helpers::ScalarType::Double, + ) + .into_nullable(), + }, }, }, plan::OrderByElement { order_direction: OrderDirection::Desc, target: plan::OrderByTarget::Column { name: "id".into(), + arguments: Default::default(), field_path: None, path: vec![], }, @@ -693,7 +716,7 @@ fn translates_relationships_in_fields_predicates_and_orderings() -> Result<(), a "author_articles".into(), plan::Relationship { target_collection: "articles".into(), - column_mapping: [("id".into(), "author_id".into())].into(), + column_mapping: [("id".into(), NonEmpty::singleton("author_id".into()))].into(), relationship_type: RelationshipType::Array, arguments: Default::default(), query: plan::Query { @@ -856,15 +879,13 @@ fn translates_predicate_referencing_field_of_related_collection() -> anyhow::Res let query_context = make_nested_schema(); let request = query_request() .collection("appearances") - .relationships([("author", relationship("authors", [("authorId", "id")]))]) + .relationships([("author", relationship("authors", [("authorId", &["id"])]))]) .query( query() .fields([relation_field!("presenter" => "author", query().fields([ field!("name"), ]))]) - .predicate(not(is_null( - target!("name", relations: [path_element("author".into())]), - ))), + .predicate(exists(in_related("author"), not(is_null(target!("name"))))), ) .into(); let query_plan = plan_for_query_request(&query_context, request)?; @@ -872,16 +893,21 @@ fn translates_predicate_referencing_field_of_related_collection() -> anyhow::Res let expected = QueryPlan { collection: "appearances".into(), query: plan::Query { - predicate: Some(plan::Expression::Not { - expression: Box::new(plan::Expression::UnaryComparisonOperator { - column: plan::ComparisonTarget::Column { - name: "name".into(), - field_path: None, - field_type: plan::Type::Scalar(plan_test_helpers::ScalarType::String), - path: vec!["author".into()], - }, - operator: ndc_models::UnaryComparisonOperator::IsNull, - }), + predicate: Some(plan::Expression::Exists { + in_collection: plan::ExistsInCollection::Related { + relationship: "author".into(), + }, + predicate: Some(Box::new(plan::Expression::Not { + expression: Box::new(plan::Expression::UnaryComparisonOperator { + column: plan::ComparisonTarget::Column { + name: "name".into(), + arguments: Default::default(), + field_path: None, + field_type: plan::Type::Scalar(plan_test_helpers::ScalarType::String), + }, + operator: ndc_models::UnaryComparisonOperator::IsNull, + }), + })), }), fields: Some( [( @@ -909,7 +935,7 @@ fn translates_predicate_referencing_field_of_related_collection() -> anyhow::Res relationships: [( "author".into(), plan::Relationship { - column_mapping: [("authorId".into(), "id".into())].into(), + column_mapping: [("authorId".into(), NonEmpty::singleton("id".into()))].into(), relationship_type: RelationshipType::Array, target_collection: "authors".into(), arguments: Default::default(), diff --git a/crates/ndc-query-plan/src/plan_for_query_request/type_annotated_field.rs b/crates/ndc-query-plan/src/plan_for_query_request/type_annotated_field.rs index fa6de979..70140626 100644 --- a/crates/ndc-query-plan/src/plan_for_query_request/type_annotated_field.rs +++ b/crates/ndc-query-plan/src/plan_for_query_request/type_annotated_field.rs @@ -44,7 +44,8 @@ fn type_annotated_field_helper( fields, arguments: _, } => { - let column_type = find_object_field(collection_object_type, &column)?; + let column_field = find_object_field(collection_object_type, &column)?; + let column_type = &column_field.r#type; let fields = fields .map(|nested_field| { type_annotated_nested_field_helper( @@ -162,6 +163,10 @@ fn type_annotated_nested_field_helper( )?), }) } + // TODO: ENG-1464 + (ndc::NestedField::Collection(_), _) => Err(QueryPlanError::NotImplemented( + "query.nested_fields.nested_collections".to_string(), + ))?, (nested, Type::Nullable(t)) => { // let path = append_to_path(path, []) type_annotated_nested_field_helper( diff --git a/crates/ndc-query-plan/src/plan_for_query_request/unify_relationship_references.rs b/crates/ndc-query-plan/src/plan_for_query_request/unify_relationship_references.rs index 1d16e70c..0f5c4527 100644 --- a/crates/ndc-query-plan/src/plan_for_query_request/unify_relationship_references.rs +++ b/crates/ndc-query-plan/src/plan_for_query_request/unify_relationship_references.rs @@ -341,9 +341,9 @@ mod tests { use crate::{ field, object, plan_for_query_request::plan_test_helpers::{ - date, double, int, object_type, relationship, string, TestContext, + date, double, int, relationship, string, TestContext, }, - Relationship, + Relationship, Type, }; use super::unify_relationship_references; @@ -395,10 +395,10 @@ mod tests { #[test] fn unifies_nested_field_selections() -> anyhow::Result<()> { - let tomatoes_type = object_type([ + let tomatoes_type = Type::object([ ( "viewer", - object_type([("numReviews", int()), ("rating", double())]), + Type::object([("numReviews", int()), ("rating", double())]), ), ("lastUpdated", date()), ]); diff --git a/crates/ndc-query-plan/src/query_plan.rs b/crates/ndc-query-plan/src/query_plan.rs index ef1cb6b4..84f5c2f1 100644 --- a/crates/ndc-query-plan/src/query_plan.rs +++ b/crates/ndc-query-plan/src/query_plan.rs @@ -1,9 +1,12 @@ -use std::{collections::BTreeMap, fmt::Debug, iter}; +use std::{borrow::Cow, collections::BTreeMap, fmt::Debug, iter}; use derivative::Derivative; use indexmap::IndexMap; use itertools::Either; -use ndc_models::{self as ndc, FieldName, OrderDirection, RelationshipType, UnaryComparisonOperator}; +use ndc_models::{ + self as ndc, ArgumentName, FieldName, OrderDirection, RelationshipType, UnaryComparisonOperator, +}; +use nonempty::NonEmpty; use crate::{vec_set::VecSet, Type}; @@ -11,6 +14,11 @@ pub trait ConnectorTypes { type ScalarType: Clone + Debug + PartialEq + Eq; type AggregateFunction: Clone + Debug + PartialEq; type ComparisonOperator: Clone + Debug + PartialEq; + + /// Result type for count aggregations + fn count_aggregate_type() -> Type; + + fn string_type() -> Type; } #[derive(Derivative)] @@ -115,9 +123,16 @@ pub enum Argument { #[derive(Derivative)] #[derivative(Clone(bound = ""), Debug(bound = ""), PartialEq(bound = ""))] pub struct Relationship { - pub column_mapping: BTreeMap, + /// A mapping between columns on the source row to columns on the target collection. + /// The column on the target collection is specified via a field path (ie. an array of field + /// names that descend through nested object fields). The field path will only contain a single item, + /// meaning a column on the target collection's type, unless the 'relationships.nested' + /// capability is supported, in which case multiple items denotes a nested object field. + pub column_mapping: BTreeMap>, pub relationship_type: RelationshipType, + /// The name of a collection pub target_collection: ndc::CollectionName, + /// Values to be provided to any collection arguments pub arguments: BTreeMap>, pub query: Query, } @@ -168,6 +183,8 @@ pub enum Aggregate { ColumnCount { /// The column to apply the count aggregate function to column: ndc::FieldName, + /// Arguments to satisfy the column specified by 'column' + arguments: BTreeMap>, /// Path to a nested field within an object column field_path: Option>, /// Whether or not only distinct items should be counted @@ -176,6 +193,8 @@ pub enum Aggregate { SingleColumn { /// The column to apply the aggregation function to column: ndc::FieldName, + /// Arguments to satisfy the column specified by 'column' + arguments: BTreeMap>, /// Path to a nested field within an object column field_path: Option>, /// Single column aggregate function name. @@ -185,6 +204,16 @@ pub enum Aggregate { StarCount, } +impl Aggregate { + pub fn result_type(&self) -> Cow> { + match self { + Aggregate::ColumnCount { .. } => Cow::Owned(T::count_aggregate_type()), + Aggregate::SingleColumn { result_type, .. } => Cow::Borrowed(result_type), + Aggregate::StarCount => Cow::Owned(T::count_aggregate_type()), + } + } +} + #[derive(Derivative)] #[derivative(Clone(bound = ""), Debug(bound = ""), PartialEq(bound = ""))] pub struct NestedObject { @@ -202,6 +231,7 @@ pub struct NestedArray { pub enum NestedField { Object(NestedObject), Array(NestedArray), + // TODO: ENG-1464 add `Collection(NestedCollection)` variant } #[derive(Derivative)] @@ -249,6 +279,12 @@ pub enum Expression { operator: T::ComparisonOperator, value: ComparisonValue, }, + /// A comparison against a nested array column. + /// Only used if the 'query.nested_fields.filter_by.nested_arrays' capability is supported. + ArrayComparison { + column: ComparisonTarget, + comparison: ArrayComparison, + }, Exists { in_collection: ExistsInCollection, predicate: Option>>, @@ -257,10 +293,14 @@ pub enum Expression { impl Expression { /// Get an iterator of columns referenced by the expression, not including columns of related - /// collections + /// collections. This is used to build a plan for joining the referenced collection - we need + /// to include fields in the join that the expression needs to access. + // + // TODO: ENG-1457 When we implement query.aggregates.filter_by we'll need to collect aggregates + // references. That's why this function returns [ComparisonTarget] instead of [Field]. pub fn query_local_comparison_targets<'a>( &'a self, - ) -> Box> + 'a> { + ) -> Box>> + 'a> { match self { Expression::And { expressions } => Box::new( expressions @@ -274,37 +314,64 @@ impl Expression { ), Expression::Not { expression } => expression.query_local_comparison_targets(), Expression::UnaryComparisonOperator { column, .. } => { - Box::new(Self::local_columns_from_comparison_target(column)) + Box::new(std::iter::once(Cow::Borrowed(column))) } - Expression::BinaryComparisonOperator { column, value, .. } => { - let value_targets = match value { - ComparisonValue::Column { column } => { - Either::Left(Self::local_columns_from_comparison_target(column)) - } - _ => Either::Right(iter::empty()), + Expression::BinaryComparisonOperator { column, value, .. } => Box::new( + std::iter::once(Cow::Borrowed(column)) + .chain(Self::local_targets_from_comparison_value(value).map(Cow::Owned)), + ), + Expression::ArrayComparison { column, comparison } => { + let value_targets = match comparison { + ArrayComparison::Contains { value } => Either::Left( + Self::local_targets_from_comparison_value(value).map(Cow::Owned), + ), + ArrayComparison::IsEmpty => Either::Right(std::iter::empty()), }; - Box::new(Self::local_columns_from_comparison_target(column).chain(value_targets)) + Box::new(std::iter::once(Cow::Borrowed(column)).chain(value_targets)) } Expression::Exists { .. } => Box::new(iter::empty()), } } - fn local_columns_from_comparison_target( - target: &ComparisonTarget, - ) -> impl Iterator> { - match target { - t @ ComparisonTarget::Column { path, .. } => { + fn local_targets_from_comparison_value( + value: &ComparisonValue, + ) -> impl Iterator> { + match value { + ComparisonValue::Column { + path, + name, + arguments, + field_path, + field_type, + .. + } => { if path.is_empty() { - Either::Left(iter::once(t)) + Either::Left(iter::once(ComparisonTarget::Column { + name: name.clone(), + arguments: arguments.clone(), + field_path: field_path.clone(), + field_type: field_type.clone(), + })) } else { Either::Right(iter::empty()) } } - t @ ComparisonTarget::ColumnInScope { .. } => Either::Left(iter::once(t)), + _ => Either::Right(std::iter::empty()), } } } +#[derive(Derivative)] +#[derivative(Clone(bound = ""), Debug(bound = ""), PartialEq(bound = ""))] +pub enum ArrayComparison { + /// Check if the array contains the specified value. + /// Only used if the 'query.nested_fields.filter_by.nested_arrays.contains' capability is supported. + Contains { value: ComparisonValue }, + /// Check is the array is empty. + /// Only used if the 'query.nested_fields.filter_by.nested_arrays.is_empty' capability is supported. + IsEmpty, +} + #[derive(Derivative)] #[derivative(Clone(bound = ""), Debug(bound = ""), PartialEq(bound = ""))] pub struct OrderBy { @@ -323,91 +390,72 @@ pub struct OrderByElement { #[derivative(Clone(bound = ""), Debug(bound = ""), PartialEq(bound = ""))] pub enum OrderByTarget { Column { - /// The name of the column - name: ndc::FieldName, - - /// Path to a nested field within an object column - field_path: Option>, - /// Any relationships to traverse to reach this column. These are translated from /// [ndc::OrderByElement] values in the [ndc::QueryRequest] to names of relation /// fields for the [QueryPlan]. path: Vec, - }, - SingleColumnAggregate { - /// The column to apply the aggregation function to - column: ndc::FieldName, - /// Single column aggregate function name. - function: T::AggregateFunction, - result_type: Type, + /// The name of the column + name: ndc::FieldName, - /// Any relationships to traverse to reach this aggregate. These are translated from - /// [ndc::OrderByElement] values in the [ndc::QueryRequest] to names of relation - /// fields for the [QueryPlan]. - path: Vec, + /// Arguments to satisfy the column specified by 'name' + arguments: BTreeMap>, + + /// Path to a nested field within an object column + field_path: Option>, }, - StarCountAggregate { - /// Any relationships to traverse to reach this aggregate. These are translated from - /// [ndc::OrderByElement] values in the [ndc::QueryRequest] to names of relation - /// fields for the [QueryPlan]. + Aggregate { + /// Non-empty collection of relationships to traverse path: Vec, + /// The aggregation method to use + aggregate: Aggregate, }, } #[derive(Derivative)] #[derivative(Clone(bound = ""), Debug(bound = ""), PartialEq(bound = ""))] pub enum ComparisonTarget { + /// The comparison targets a column. Column { /// The name of the column name: ndc::FieldName, - /// Path to a nested field within an object column - field_path: Option>, - - field_type: Type, - - /// Any relationships to traverse to reach this column. These are translated from - /// [ndc::PathElement] values in the [ndc::QueryRequest] to names of relation - /// fields for the [QueryPlan]. - path: Vec, - }, - ColumnInScope { - /// The name of the column - name: ndc::FieldName, - - /// The named scope that identifies the collection to reference. This corresponds to the - /// `scope` field of the [Query] type. - scope: Scope, + /// Arguments to satisfy the column specified by 'name' + arguments: BTreeMap>, /// Path to a nested field within an object column field_path: Option>, + /// Type of the field that you get *after* follwing `field_path` to a possibly-nested + /// field. field_type: Type, }, + // TODO: ENG-1457 Add this variant to support query.aggregates.filter_by + // /// The comparison targets the result of aggregation. + // /// Only used if the 'query.aggregates.filter_by' capability is supported. + // Aggregate { + // /// Non-empty collection of relationships to traverse + // path: Vec, + // /// The aggregation method to use + // aggregate: Aggregate, + // }, } impl ComparisonTarget { - pub fn column_name(&self) -> &ndc::FieldName { - match self { - ComparisonTarget::Column { name, .. } => name, - ComparisonTarget::ColumnInScope { name, .. } => name, + pub fn column(name: impl Into, field_type: Type) -> Self { + Self::Column { + name: name.into(), + arguments: Default::default(), + field_path: Default::default(), + field_type, } } - pub fn relationship_path(&self) -> &[ndc::RelationshipName] { - match self { - ComparisonTarget::Column { path, .. } => path, - ComparisonTarget::ColumnInScope { .. } => &[], - } - } -} - -impl ComparisonTarget { - pub fn get_field_type(&self) -> &Type { + pub fn target_type(&self) -> &Type { match self { ComparisonTarget::Column { field_type, .. } => field_type, - ComparisonTarget::ColumnInScope { field_type, .. } => field_type, + // TODO: ENG-1457 + // ComparisonTarget::Aggregate { aggregate, .. } => aggregate.result_type, } } } @@ -416,7 +464,28 @@ impl ComparisonTarget { #[derivative(Clone(bound = ""), Debug(bound = ""), PartialEq(bound = ""))] pub enum ComparisonValue { Column { - column: ComparisonTarget, + /// Any relationships to traverse to reach this column. + /// Only non-empty if the 'relationships.relation_comparisons' is supported. + path: Vec, + /// The name of the column + name: ndc::FieldName, + /// Arguments to satisfy the column specified by 'name' + arguments: BTreeMap>, + /// Path to a nested field within an object column. + /// Only non-empty if the 'query.nested_fields.filter_by' capability is supported. + field_path: Option>, + /// Type of the field that you get *after* follwing `field_path` to a possibly-nested + /// field. + field_type: Type, + /// The scope in which this column exists, identified + /// by an top-down index into the stack of scopes. + /// The stack grows inside each `Expression::Exists`, + /// so scope 0 (the default) refers to the current collection, + /// and each subsequent index refers to the collection outside + /// its predecessor's immediately enclosing `Expression::Exists` + /// expression. + /// Only used if the 'query.exists.named_scopes' capability is supported. + scope: Option, }, Scalar { value: serde_json::Value, @@ -428,6 +497,19 @@ pub enum ComparisonValue { }, } +impl ComparisonValue { + pub fn column(name: impl Into, field_type: Type) -> Self { + Self::Column { + path: Default::default(), + name: name.into(), + arguments: Default::default(), + field_path: Default::default(), + field_type, + scope: Default::default(), + } + } +} + #[derive(Derivative)] #[derivative(Clone(bound = ""), Debug(bound = ""), PartialEq(bound = ""))] pub struct AggregateFunctionDefinition { @@ -440,29 +522,102 @@ pub struct AggregateFunctionDefinition { pub enum ComparisonOperatorDefinition { Equal, In, + LessThan, + LessThanOrEqual, + GreaterThan, + GreaterThanOrEqual, + Contains, + ContainsInsensitive, + StartsWith, + StartsWithInsensitive, + EndsWith, + EndsWithInsensitive, Custom { /// The type of the argument to this operator argument_type: Type, }, } +impl ComparisonOperatorDefinition { + pub fn argument_type(self, left_operand_type: &Type) -> Type { + use ComparisonOperatorDefinition as C; + match self { + C::In => Type::ArrayOf(Box::new(left_operand_type.clone())), + C::Equal + | C::LessThan + | C::LessThanOrEqual + | C::GreaterThan + | C::GreaterThanOrEqual => left_operand_type.clone(), + C::Contains + | C::ContainsInsensitive + | C::StartsWith + | C::StartsWithInsensitive + | C::EndsWith + | C::EndsWithInsensitive => T::string_type(), + C::Custom { argument_type } => argument_type, + } + } + + pub fn from_ndc_definition( + ndc_definition: &ndc::ComparisonOperatorDefinition, + map_type: impl FnOnce(&ndc::Type) -> Result, E>, + ) -> Result { + use ndc::ComparisonOperatorDefinition as NDC; + let definition = match ndc_definition { + NDC::Equal => Self::Equal, + NDC::In => Self::In, + NDC::LessThan => Self::LessThan, + NDC::LessThanOrEqual => Self::LessThanOrEqual, + NDC::GreaterThan => Self::GreaterThan, + NDC::GreaterThanOrEqual => Self::GreaterThanOrEqual, + NDC::Contains => Self::Contains, + NDC::ContainsInsensitive => Self::ContainsInsensitive, + NDC::StartsWith => Self::StartsWith, + NDC::StartsWithInsensitive => Self::StartsWithInsensitive, + NDC::EndsWith => Self::EndsWith, + NDC::EndsWithInsensitive => Self::EndsWithInsensitive, + NDC::Custom { argument_type } => Self::Custom { + argument_type: map_type(argument_type)?, + }, + }; + Ok(definition) + } +} + #[derive(Derivative)] #[derivative(Clone(bound = ""), Debug(bound = ""), PartialEq(bound = ""))] pub enum ExistsInCollection { + /// The rows to evaluate the exists predicate against come from a related collection. + /// Only used if the 'relationships' capability is supported. Related { /// Key of the relation in the [Query] joins map. Relationships are scoped to the sub-query /// that defines the relation source. relationship: ndc::RelationshipName, }, + /// The rows to evaluate the exists predicate against come from an unrelated collection + /// Only used if the 'query.exists.unrelated' capability is supported. Unrelated { /// Key of the relation in the [QueryPlan] joins map. Unrelated collections are not scoped /// to a sub-query, instead they are given in the root [QueryPlan]. unrelated_collection: String, }, + /// The rows to evaluate the exists predicate against come from a nested array field. + /// Only used if the 'query.exists.nested_collections' capability is supported. NestedCollection { column_name: ndc::FieldName, arguments: BTreeMap>, /// Path to a nested collection via object columns field_path: Vec, }, + /// Specifies a column that contains a nested array of scalars. The + /// array will be brought into scope of the nested expression where + /// each element becomes an object with one '__value' column that + /// contains the element value. + /// Only used if the 'query.exists.nested_scalar_collections' capability is supported. + NestedScalarCollection { + column_name: FieldName, + arguments: BTreeMap>, + /// Path to a nested collection via object columns + field_path: Vec, + }, } diff --git a/crates/ndc-query-plan/src/type_system.rs b/crates/ndc-query-plan/src/type_system.rs index 7fea0395..922b52c4 100644 --- a/crates/ndc-query-plan/src/type_system.rs +++ b/crates/ndc-query-plan/src/type_system.rs @@ -2,10 +2,12 @@ use ref_cast::RefCast; use std::collections::BTreeMap; use itertools::Itertools as _; -use ndc_models as ndc; +use ndc_models::{self as ndc, ArgumentName, ObjectTypeName}; use crate::{self as plan, QueryPlanError}; +type Result = std::result::Result; + /// The type of values that a column, field, or argument may take. #[derive(Debug, Clone, PartialEq, Eq)] pub enum Type { @@ -18,6 +20,31 @@ pub enum Type { } impl Type { + pub fn array_of(t: Self) -> Self { + Self::ArrayOf(Box::new(t)) + } + + pub fn named_object( + name: impl Into, + fields: impl IntoIterator, impl Into>)>, + ) -> Self { + Self::Object(ObjectType::new(fields).named(name)) + } + + pub fn nullable(t: Self) -> Self { + t.into_nullable() + } + + pub fn object( + fields: impl IntoIterator, impl Into>)>, + ) -> Self { + Self::Object(ObjectType::new(fields)) + } + + pub fn scalar(scalar_type: impl Into) -> Self { + Self::Scalar(scalar_type.into()) + } + pub fn into_nullable(self) -> Self { match self { t @ Type::Nullable(_) => t, @@ -32,6 +59,32 @@ impl Type { _ => false, } } + + pub fn into_array_element_type(self) -> Result + where + S: Clone + std::fmt::Debug, + { + match self { + Type::ArrayOf(t) => Ok(*t), + Type::Nullable(t) => t.into_array_element_type(), + t => Err(QueryPlanError::TypeMismatch(format!( + "expected an array, but got type {t:?}" + ))), + } + } + + pub fn into_object_type(self) -> Result> + where + S: std::fmt::Debug, + { + match self { + Type::Object(object_type) => Ok(object_type), + Type::Nullable(t) => t.into_object_type(), + t => Err(QueryPlanError::TypeMismatch(format!( + "expected object type, but got {t:?}" + ))), + } + } } #[derive(Debug, Clone, PartialEq, Eq)] @@ -39,12 +92,82 @@ pub struct ObjectType { /// A type name may be tracked for error reporting. The name does not affect how query plans /// are generated. pub name: Option, - pub fields: BTreeMap>, + pub fields: BTreeMap>, } impl ObjectType { + pub fn new( + fields: impl IntoIterator, impl Into>)>, + ) -> Self { + ObjectType { + name: None, + fields: fields + .into_iter() + .map(|(name, field)| (name.into(), field.into())) + .collect(), + } + } + + pub fn named(mut self, name: impl Into) -> Self { + self.name = Some(name.into()); + self + } + pub fn named_fields(&self) -> impl Iterator)> { - self.fields.iter() + self.fields + .iter() + .map(|(name, field)| (name, &field.r#type)) + } + + pub fn get(&self, field_name: &ndc::FieldName) -> Result<&ObjectField> { + self.fields + .get(field_name) + .ok_or_else(|| QueryPlanError::UnknownObjectTypeField { + object_type: None, + field_name: field_name.clone(), + path: Default::default(), + }) + } +} + +#[derive(Clone, Debug, PartialEq, Eq)] +pub struct ObjectField { + pub r#type: Type, + /// The arguments available to the field - Matches implementation from CollectionInfo + pub parameters: BTreeMap>, +} + +impl ObjectField { + pub fn new(r#type: Type) -> Self { + Self { + r#type, + parameters: Default::default(), + } + } + + pub fn into_nullable(self) -> Self { + let new_field_type = match self.r#type { + t @ Type::Nullable(_) => t, + t => Type::Nullable(Box::new(t)), + }; + Self { + r#type: new_field_type, + parameters: self.parameters, + } + } + + pub fn with_parameters(mut self, parameters: BTreeMap>) -> Self { + self.parameters = parameters; + self + } +} + +impl From> for ObjectField { + fn from(value: Type) -> Self { + ObjectField { + r#type: value, + parameters: Default::default(), + } } } @@ -56,7 +179,7 @@ pub fn inline_object_types( object_types: &BTreeMap, t: &ndc::Type, lookup_scalar_type: fn(&ndc::ScalarTypeName) -> Option, -) -> Result, QueryPlanError> { +) -> Result> { let plan_type = match t { ndc::Type::Named { name } => lookup_type(object_types, name, lookup_scalar_type)?, @@ -77,7 +200,7 @@ fn lookup_type( object_types: &BTreeMap, name: &ndc::TypeName, lookup_scalar_type: fn(&ndc::ScalarTypeName) -> Option, -) -> Result, QueryPlanError> { +) -> Result> { if let Some(scalar_type) = lookup_scalar_type(ndc::ScalarTypeName::ref_cast(name)) { return Ok(Type::Scalar(scalar_type)); } @@ -93,7 +216,7 @@ fn lookup_object_type_helper( object_types: &BTreeMap, name: &ndc::ObjectTypeName, lookup_scalar_type: fn(&ndc::ScalarTypeName) -> Option, -) -> Result, QueryPlanError> { +) -> Result> { let object_type = object_types .get(name) .ok_or_else(|| QueryPlanError::UnknownObjectType(name.to_string()))?; @@ -104,12 +227,18 @@ fn lookup_object_type_helper( .fields .iter() .map(|(name, field)| { + let field_type = + inline_object_types(object_types, &field.r#type, lookup_scalar_type)?; Ok(( name.to_owned(), - inline_object_types(object_types, &field.r#type, lookup_scalar_type)?, - )) as Result<_, QueryPlanError> + plan::ObjectField { + r#type: field_type, + parameters: Default::default(), // TODO: connect ndc arguments to plan + // parameters + }, + )) }) - .try_collect()?, + .try_collect::<_, _, QueryPlanError>()?, }; Ok(plan_object_type) } @@ -118,6 +247,6 @@ pub fn lookup_object_type( object_types: &BTreeMap, name: &ndc::ObjectTypeName, lookup_scalar_type: fn(&ndc::ScalarTypeName) -> Option, -) -> Result, QueryPlanError> { +) -> Result> { lookup_object_type_helper(object_types, name, lookup_scalar_type) } diff --git a/crates/ndc-test-helpers/src/aggregates.rs b/crates/ndc-test-helpers/src/aggregates.rs index 212222c1..894a823a 100644 --- a/crates/ndc-test-helpers/src/aggregates.rs +++ b/crates/ndc-test-helpers/src/aggregates.rs @@ -5,6 +5,7 @@ macro_rules! column_aggregate { $name, $crate::ndc_models::Aggregate::SingleColumn { column: $column.into(), + arguments: Default::default(), function: $function.into(), field_path: None, }, @@ -26,6 +27,7 @@ macro_rules! column_count_aggregate { $name, $crate::ndc_models::Aggregate::ColumnCount { column: $column.into(), + arguments: Default::default(), distinct: $distinct.to_owned(), field_path: None, }, diff --git a/crates/ndc-test-helpers/src/collection_info.rs b/crates/ndc-test-helpers/src/collection_info.rs index 3e042711..040a8694 100644 --- a/crates/ndc-test-helpers/src/collection_info.rs +++ b/crates/ndc-test-helpers/src/collection_info.rs @@ -9,7 +9,6 @@ pub fn collection(name: impl Display + Clone) -> (ndc_models::CollectionName, Co arguments: Default::default(), collection_type: name.to_string().into(), uniqueness_constraints: make_primary_key_uniqueness_constraint(name.clone()), - foreign_keys: Default::default(), }; (name.to_string().into(), coll) } diff --git a/crates/ndc-test-helpers/src/comparison_target.rs b/crates/ndc-test-helpers/src/comparison_target.rs index 41463113..2bad170c 100644 --- a/crates/ndc-test-helpers/src/comparison_target.rs +++ b/crates/ndc-test-helpers/src/comparison_target.rs @@ -3,42 +3,18 @@ macro_rules! target { ($column:literal) => { $crate::ndc_models::ComparisonTarget::Column { name: $column.into(), + arguments: Default::default(), field_path: None, - path: vec![], } }; ($column:literal, field_path:$field_path:expr $(,)?) => { $crate::ndc_models::ComparisonTarget::Column { name: $column.into(), + arguments: Default::default(), field_path: $field_path.into_iter().map(|x| x.into()).collect(), - path: vec![], - } - }; - ($column:literal, relations:$path:expr $(,)?) => { - $crate::ndc_models::ComparisonTarget::Column { - name: $column.into(), - field_path: None, - path: $path.into_iter().map(|x| x.into()).collect(), - } - }; - ($column:literal, field_path:$field_path:expr, relations:$path:expr $(,)?) => { - $crate::ndc_models::ComparisonTarget::Column { - name: $column.into(), - // field_path: $field_path.into_iter().map(|x| x.into()).collect(), - path: $path.into_iter().map(|x| x.into()).collect(), } }; ($target:expr) => { $target }; } - -pub fn root(name: S) -> ndc_models::ComparisonTarget -where - S: ToString, -{ - ndc_models::ComparisonTarget::RootCollectionColumn { - name: name.to_string().into(), - field_path: None, - } -} diff --git a/crates/ndc-test-helpers/src/comparison_value.rs b/crates/ndc-test-helpers/src/comparison_value.rs index 350378e1..cfbeca92 100644 --- a/crates/ndc-test-helpers/src/comparison_value.rs +++ b/crates/ndc-test-helpers/src/comparison_value.rs @@ -1,11 +1,6 @@ -#[macro_export] -macro_rules! column_value { - ($($column:tt)+) => { - $crate::ndc_models::ComparisonValue::Column { - column: $crate::target!($($column)+), - } - }; -} +use std::collections::BTreeMap; + +use ndc_models::{Argument, ArgumentName, ComparisonValue, FieldName, PathElement}; #[macro_export] macro_rules! value { @@ -27,3 +22,65 @@ macro_rules! variable { $crate::ndc_models::ComparisonValue::Variable { name: $expr } }; } + +#[derive(Debug)] +pub struct ColumnValueBuilder { + path: Vec, + name: FieldName, + arguments: BTreeMap, + field_path: Option>, + scope: Option, +} + +pub fn column_value(name: impl Into) -> ColumnValueBuilder { + ColumnValueBuilder { + path: Default::default(), + name: name.into(), + arguments: Default::default(), + field_path: Default::default(), + scope: Default::default(), + } +} + +impl ColumnValueBuilder { + pub fn path(mut self, path: impl IntoIterator>) -> Self { + self.path = path.into_iter().map(Into::into).collect(); + self + } + + pub fn arguments( + mut self, + arguments: impl IntoIterator, impl Into)>, + ) -> Self { + self.arguments = arguments + .into_iter() + .map(|(name, arg)| (name.into(), arg.into())) + .collect(); + self + } + + pub fn field_path( + mut self, + field_path: impl IntoIterator>, + ) -> Self { + self.field_path = Some(field_path.into_iter().map(Into::into).collect()); + self + } + + pub fn scope(mut self, scope: usize) -> Self { + self.scope = Some(scope); + self + } +} + +impl From for ComparisonValue { + fn from(builder: ColumnValueBuilder) -> Self { + ComparisonValue::Column { + path: builder.path, + name: builder.name, + arguments: builder.arguments, + field_path: builder.field_path, + scope: builder.scope, + } + } +} diff --git a/crates/ndc-test-helpers/src/exists_in_collection.rs b/crates/ndc-test-helpers/src/exists_in_collection.rs index e13826c6..e7a581c0 100644 --- a/crates/ndc-test-helpers/src/exists_in_collection.rs +++ b/crates/ndc-test-helpers/src/exists_in_collection.rs @@ -1,13 +1,19 @@ +use std::collections::BTreeMap; + +use ndc_models::{Argument, ArgumentName, ExistsInCollection, FieldName}; + #[macro_export] macro_rules! related { ($rel:literal) => { $crate::ndc_models::ExistsInCollection::Related { + field_path: Default::default(), relationship: $rel.into(), arguments: Default::default(), } }; ($rel:literal, $args:expr $(,)?) => { $crate::ndc_models::ExistsInCollection::Related { + field_path: Default::default(), relationship: $rel.into(), arguments: $args.into_iter().map(|x| x.into()).collect(), } @@ -29,3 +35,49 @@ macro_rules! unrelated { } }; } + +#[derive(Debug)] +pub struct ExistsInNestedCollectionBuilder { + column_name: FieldName, + arguments: BTreeMap, + field_path: Vec, +} + +pub fn exists_in_nested(column_name: impl Into) -> ExistsInNestedCollectionBuilder { + ExistsInNestedCollectionBuilder { + column_name: column_name.into(), + arguments: Default::default(), + field_path: Default::default(), + } +} + +impl ExistsInNestedCollectionBuilder { + pub fn arguments( + mut self, + arguments: impl IntoIterator, impl Into)>, + ) -> Self { + self.arguments = arguments + .into_iter() + .map(|(k, v)| (k.into(), v.into())) + .collect(); + self + } + + pub fn field_path( + mut self, + field_path: impl IntoIterator>, + ) -> Self { + self.field_path = field_path.into_iter().map(Into::into).collect(); + self + } +} + +impl From for ExistsInCollection { + fn from(builder: ExistsInNestedCollectionBuilder) -> Self { + ExistsInCollection::NestedCollection { + column_name: builder.column_name, + arguments: builder.arguments, + field_path: builder.field_path, + } + } +} diff --git a/crates/ndc-test-helpers/src/expressions.rs b/crates/ndc-test-helpers/src/expressions.rs index 6b35ae2a..16aa63fc 100644 --- a/crates/ndc-test-helpers/src/expressions.rs +++ b/crates/ndc-test-helpers/src/expressions.rs @@ -1,5 +1,6 @@ use ndc_models::{ - ComparisonTarget, ComparisonValue, ExistsInCollection, Expression, UnaryComparisonOperator, + ArrayComparison, ComparisonTarget, ComparisonValue, ExistsInCollection, Expression, + RelationshipName, UnaryComparisonOperator, }; pub fn and(operands: I) -> Expression @@ -57,9 +58,39 @@ where } } -pub fn exists(in_collection: ExistsInCollection, predicate: Expression) -> Expression { +pub fn exists( + in_collection: impl Into, + predicate: impl Into, +) -> Expression { Expression::Exists { - in_collection, - predicate: Some(Box::new(predicate)), + in_collection: in_collection.into(), + predicate: Some(Box::new(predicate.into())), + } +} + +pub fn in_related(relationship: impl Into) -> ExistsInCollection { + ExistsInCollection::Related { + field_path: Default::default(), + relationship: relationship.into(), + arguments: Default::default(), + } +} + +pub fn array_contains( + column: impl Into, + value: impl Into, +) -> Expression { + Expression::ArrayComparison { + column: column.into(), + comparison: ArrayComparison::Contains { + value: value.into(), + }, + } +} + +pub fn is_empty(column: impl Into) -> Expression { + Expression::ArrayComparison { + column: column.into(), + comparison: ArrayComparison::IsEmpty, } } diff --git a/crates/ndc-test-helpers/src/lib.rs b/crates/ndc-test-helpers/src/lib.rs index 706cefd6..299c346a 100644 --- a/crates/ndc-test-helpers/src/lib.rs +++ b/crates/ndc-test-helpers/src/lib.rs @@ -47,6 +47,7 @@ pub struct QueryRequestBuilder { arguments: Option>, collection_relationships: Option>, variables: Option>>, + groups: Option, } pub fn query_request() -> QueryRequestBuilder { @@ -61,6 +62,7 @@ impl QueryRequestBuilder { arguments: None, collection_relationships: None, variables: None, + groups: None, } } @@ -116,6 +118,11 @@ impl QueryRequestBuilder { ); self } + + pub fn groups(mut self, groups: impl Into) -> Self { + self.groups = Some(groups.into()); + self + } } impl From for QueryRequest { @@ -142,6 +149,7 @@ pub struct QueryBuilder { offset: Option, order_by: Option, predicate: Option, + groups: Option, } pub fn query() -> QueryBuilder { @@ -157,6 +165,7 @@ impl QueryBuilder { offset: None, order_by: None, predicate: None, + groups: None, } } @@ -210,6 +219,7 @@ impl From for Query { offset: value.offset, order_by: value.order_by, predicate: value.predicate, + groups: value.groups, } } } diff --git a/crates/ndc-test-helpers/src/object_type.rs b/crates/ndc-test-helpers/src/object_type.rs index 01feb919..f4978ce5 100644 --- a/crates/ndc-test-helpers/src/object_type.rs +++ b/crates/ndc-test-helpers/src/object_type.rs @@ -20,5 +20,6 @@ pub fn object_type( ) }) .collect(), + foreign_keys: Default::default(), } } diff --git a/crates/ndc-test-helpers/src/order_by.rs b/crates/ndc-test-helpers/src/order_by.rs index 9ea8c778..22e9bce3 100644 --- a/crates/ndc-test-helpers/src/order_by.rs +++ b/crates/ndc-test-helpers/src/order_by.rs @@ -5,6 +5,7 @@ macro_rules! asc { order_direction: $crate::ndc_models::OrderDirection::Asc, target: $crate::ndc_models::OrderByTarget::Column { name: $crate::ndc_models::FieldName::new($crate::smol_str::SmolStr::new($name)), + arguments: Default::default(), field_path: None, path: vec![], }, @@ -19,6 +20,7 @@ macro_rules! desc { order_direction: $crate::ndc_models::OrderDirection::Desc, target: $crate::ndc_models::OrderByTarget::Column { name: $crate::ndc_models::FieldName::new($crate::smol_str::SmolStr::new($name)), + arguments: Default::default(), field_path: None, path: vec![], }, diff --git a/crates/ndc-test-helpers/src/path_element.rs b/crates/ndc-test-helpers/src/path_element.rs index b0c89d5b..25cc4d5d 100644 --- a/crates/ndc-test-helpers/src/path_element.rs +++ b/crates/ndc-test-helpers/src/path_element.rs @@ -1,16 +1,17 @@ use std::collections::BTreeMap; -use ndc_models::{Expression, PathElement, RelationshipArgument}; +use ndc_models::{Expression, FieldName, PathElement, RelationshipArgument}; #[derive(Clone, Debug)] pub struct PathElementBuilder { relationship: ndc_models::RelationshipName, arguments: Option>, + field_path: Option>, predicate: Option>, } -pub fn path_element(relationship: ndc_models::RelationshipName) -> PathElementBuilder { - PathElementBuilder::new(relationship) +pub fn path_element(relationship: impl Into) -> PathElementBuilder { + PathElementBuilder::new(relationship.into()) } impl PathElementBuilder { @@ -18,6 +19,7 @@ impl PathElementBuilder { PathElementBuilder { relationship, arguments: None, + field_path: None, predicate: None, } } @@ -26,6 +28,14 @@ impl PathElementBuilder { self.predicate = Some(Box::new(expression)); self } + + pub fn field_path( + mut self, + field_path: impl IntoIterator>, + ) -> Self { + self.field_path = Some(field_path.into_iter().map(Into::into).collect()); + self + } } impl From for PathElement { @@ -33,6 +43,7 @@ impl From for PathElement { PathElement { relationship: value.relationship, arguments: value.arguments.unwrap_or_default(), + field_path: value.field_path, predicate: value.predicate, } } diff --git a/crates/ndc-test-helpers/src/query_response.rs b/crates/ndc-test-helpers/src/query_response.rs index 72970bb2..3c94378f 100644 --- a/crates/ndc-test-helpers/src/query_response.rs +++ b/crates/ndc-test-helpers/src/query_response.rs @@ -30,6 +30,7 @@ impl QueryResponseBuilder { self.row_sets.push(RowSet { aggregates: None, rows: Some(vec![]), + groups: Default::default(), }); self } @@ -45,6 +46,7 @@ impl From for QueryResponse { pub struct RowSetBuilder { aggregates: IndexMap, rows: Vec>, + groups: Option>, } impl RowSetBuilder { @@ -89,10 +91,24 @@ impl RowSetBuilder { ); self } + + pub fn groups( + mut self, + groups: impl IntoIterator>, + ) -> Self { + self.groups = Some(groups.into_iter().map(Into::into).collect()); + self + } } impl From for RowSet { - fn from(RowSetBuilder { aggregates, rows }: RowSetBuilder) -> Self { + fn from( + RowSetBuilder { + aggregates, + rows, + groups, + }: RowSetBuilder, + ) -> Self { RowSet { aggregates: if aggregates.is_empty() { None @@ -100,6 +116,7 @@ impl From for RowSet { Some(aggregates) }, rows: if rows.is_empty() { None } else { Some(rows) }, + groups, } } } diff --git a/crates/ndc-test-helpers/src/relationships.rs b/crates/ndc-test-helpers/src/relationships.rs index 6166e809..053bb7c7 100644 --- a/crates/ndc-test-helpers/src/relationships.rs +++ b/crates/ndc-test-helpers/src/relationships.rs @@ -4,7 +4,7 @@ use ndc_models::{Relationship, RelationshipArgument, RelationshipType}; #[derive(Clone, Debug)] pub struct RelationshipBuilder { - column_mapping: BTreeMap, + column_mapping: BTreeMap>, relationship_type: RelationshipType, target_collection: ndc_models::CollectionName, arguments: BTreeMap, @@ -12,17 +12,22 @@ pub struct RelationshipBuilder { pub fn relationship( target: &str, - column_mapping: [(&str, &str); S], + column_mapping: [(&str, &[&str]); S], ) -> RelationshipBuilder { RelationshipBuilder::new(target, column_mapping) } impl RelationshipBuilder { - pub fn new(target: &str, column_mapping: [(&str, &str); S]) -> Self { + pub fn new(target: &str, column_mapping: [(&str, &[&str]); S]) -> Self { RelationshipBuilder { column_mapping: column_mapping .into_iter() - .map(|(source, target)| (source.to_owned().into(), target.to_owned().into())) + .map(|(source, target)| { + ( + source.to_owned().into(), + target.iter().map(|s| s.to_owned().into()).collect(), + ) + }) .collect(), relationship_type: RelationshipType::Array, target_collection: target.to_owned().into(), diff --git a/crates/test-helpers/src/arb_plan_type.rs b/crates/test-helpers/src/arb_plan_type.rs index 0ffe5ac1..4dfdff84 100644 --- a/crates/test-helpers/src/arb_plan_type.rs +++ b/crates/test-helpers/src/arb_plan_type.rs @@ -1,5 +1,5 @@ use configuration::MongoScalarType; -use ndc_query_plan::{ObjectType, Type}; +use ndc_query_plan::{ObjectField, ObjectType, Type}; use proptest::{collection::btree_map, prelude::*}; use crate::arb_type::arb_bson_scalar_type; @@ -14,9 +14,18 @@ pub fn arb_plan_type() -> impl Strategy> { any::>(), btree_map(any::().prop_map_into(), inner, 1..=10) ) - .prop_map(|(name, fields)| Type::Object(ObjectType { + .prop_map(|(name, field_types)| Type::Object(ObjectType { name: name.map(|n| n.into()), - fields + fields: field_types + .into_iter() + .map(|(name, t)| ( + name, + ObjectField { + r#type: t, + parameters: Default::default() + } + )) + .collect(), })) ] }) diff --git a/fixtures/hasura/app/connector/test_cases/schema/departments.json b/fixtures/hasura/app/connector/test_cases/schema/departments.json new file mode 100644 index 00000000..5f8996b4 --- /dev/null +++ b/fixtures/hasura/app/connector/test_cases/schema/departments.json @@ -0,0 +1,24 @@ +{ + "name": "departments", + "collections": { + "departments": { + "type": "departments" + } + }, + "objectTypes": { + "departments": { + "fields": { + "_id": { + "type": { + "scalar": "objectId" + } + }, + "description": { + "type": { + "scalar": "string" + } + } + } + } + } +} \ No newline at end of file diff --git a/fixtures/hasura/app/connector/test_cases/schema/schools.json b/fixtures/hasura/app/connector/test_cases/schema/schools.json new file mode 100644 index 00000000..0ebed63e --- /dev/null +++ b/fixtures/hasura/app/connector/test_cases/schema/schools.json @@ -0,0 +1,43 @@ +{ + "name": "schools", + "collections": { + "schools": { + "type": "schools" + } + }, + "objectTypes": { + "schools": { + "fields": { + "_id": { + "type": { + "scalar": "objectId" + } + }, + "departments": { + "type": { + "object": "schools_departments" + } + }, + "name": { + "type": { + "scalar": "string" + } + } + } + }, + "schools_departments": { + "fields": { + "english_department_id": { + "type": { + "scalar": "objectId" + } + }, + "math_department_id": { + "type": { + "scalar": "objectId" + } + } + } + } + } +} \ No newline at end of file diff --git a/fixtures/hasura/app/metadata/Album.hml b/fixtures/hasura/app/metadata/Album.hml index eb4505fe..d18208be 100644 --- a/fixtures/hasura/app/metadata/Album.hml +++ b/fixtures/hasura/app/metadata/Album.hml @@ -5,7 +5,7 @@ definition: name: Album fields: - name: id - type: ObjectId_1! + type: ObjectId! - name: albumId type: Int! - name: artistId @@ -56,7 +56,7 @@ definition: type: Album comparableFields: - fieldName: id - booleanExpressionType: ObjectIdBoolExp_1 + booleanExpressionType: ObjectIdBoolExp - fieldName: albumId booleanExpressionType: IntBoolExp - fieldName: artistId @@ -83,7 +83,7 @@ definition: aggregatedType: Album aggregatableFields: - fieldName: id - aggregateExpression: ObjectIdAggExp_1 + aggregateExpression: ObjectIdAggExp - fieldName: albumId aggregateExpression: IntAggExp - fieldName: artistId diff --git a/fixtures/hasura/app/metadata/Artist.hml b/fixtures/hasura/app/metadata/Artist.hml index 38755178..2ba6e1ac 100644 --- a/fixtures/hasura/app/metadata/Artist.hml +++ b/fixtures/hasura/app/metadata/Artist.hml @@ -5,7 +5,7 @@ definition: name: Artist fields: - name: id - type: ObjectId_1! + type: ObjectId! - name: artistId type: Int! - name: name @@ -50,7 +50,7 @@ definition: type: Artist comparableFields: - fieldName: id - booleanExpressionType: ObjectIdBoolExp_1 + booleanExpressionType: ObjectIdBoolExp - fieldName: artistId booleanExpressionType: IntBoolExp - fieldName: name @@ -74,7 +74,7 @@ definition: aggregatedType: Artist aggregatableFields: - fieldName: id - aggregateExpression: ObjectIdAggExp_1 + aggregateExpression: ObjectIdAggExp - fieldName: artistId aggregateExpression: IntAggExp - fieldName: name diff --git a/fixtures/hasura/app/metadata/ArtistsWithAlbumsAndTracks.hml b/fixtures/hasura/app/metadata/ArtistsWithAlbumsAndTracks.hml index 9d6f0cd2..11217659 100644 --- a/fixtures/hasura/app/metadata/ArtistsWithAlbumsAndTracks.hml +++ b/fixtures/hasura/app/metadata/ArtistsWithAlbumsAndTracks.hml @@ -5,7 +5,7 @@ definition: name: AlbumWithTracks fields: - name: id - type: ObjectId_1! + type: ObjectId! - name: title type: String! - name: tracks @@ -47,7 +47,7 @@ definition: name: ArtistWithAlbumsAndTracks fields: - name: id - type: ObjectId_1! + type: ObjectId! - name: albums type: "[AlbumWithTracks!]!" - name: name @@ -92,7 +92,7 @@ definition: type: AlbumWithTracks comparableFields: - fieldName: id - booleanExpressionType: ObjectIdBoolExp_1 + booleanExpressionType: ObjectIdBoolExp - fieldName: title booleanExpressionType: StringBoolExp comparableRelationships: [] @@ -113,7 +113,7 @@ definition: type: ArtistWithAlbumsAndTracks comparableFields: - fieldName: id - booleanExpressionType: ObjectIdBoolExp_1 + booleanExpressionType: ObjectIdBoolExp - fieldName: albums booleanExpressionType: AlbumWithTracksBoolExp - fieldName: name @@ -136,7 +136,7 @@ definition: aggregatedType: ArtistWithAlbumsAndTracks aggregatableFields: - fieldName: id - aggregateExpression: ObjectIdAggExp_1 + aggregateExpression: ObjectIdAggExp - fieldName: name aggregateExpression: StringAggExp count: diff --git a/fixtures/hasura/app/metadata/Customer.hml b/fixtures/hasura/app/metadata/Customer.hml index 61dfddc6..b853b340 100644 --- a/fixtures/hasura/app/metadata/Customer.hml +++ b/fixtures/hasura/app/metadata/Customer.hml @@ -5,7 +5,7 @@ definition: name: Customer fields: - name: id - type: ObjectId_1! + type: ObjectId! - name: address type: String! - name: city @@ -116,7 +116,7 @@ definition: type: Customer comparableFields: - fieldName: id - booleanExpressionType: ObjectIdBoolExp_1 + booleanExpressionType: ObjectIdBoolExp - fieldName: address booleanExpressionType: StringBoolExp - fieldName: city @@ -163,7 +163,7 @@ definition: aggregatedType: Customer aggregatableFields: - fieldName: id - aggregateExpression: ObjectIdAggExp_1 + aggregateExpression: ObjectIdAggExp - fieldName: address aggregateExpression: StringAggExp - fieldName: city diff --git a/fixtures/hasura/app/metadata/Departments.hml b/fixtures/hasura/app/metadata/Departments.hml new file mode 100644 index 00000000..92fa76ce --- /dev/null +++ b/fixtures/hasura/app/metadata/Departments.hml @@ -0,0 +1,122 @@ +--- +kind: ObjectType +version: v1 +definition: + name: Departments + fields: + - name: id + type: ObjectId! + - name: description + type: String! + graphql: + typeName: Departments + inputTypeName: DepartmentsInput + dataConnectorTypeMapping: + - dataConnectorName: test_cases + dataConnectorObjectType: departments + fieldMapping: + id: + column: + name: _id + description: + column: + name: description + +--- +kind: TypePermissions +version: v1 +definition: + typeName: Departments + permissions: + - role: admin + output: + allowedFields: + - id + - description + +--- +kind: BooleanExpressionType +version: v1 +definition: + name: DepartmentsBoolExp + operand: + object: + type: Departments + comparableFields: + - fieldName: id + booleanExpressionType: ObjectIdBoolExp + - fieldName: description + booleanExpressionType: StringBoolExp + comparableRelationships: [] + logicalOperators: + enable: true + isNull: + enable: true + graphql: + typeName: DepartmentsBoolExp + +--- +kind: AggregateExpression +version: v1 +definition: + name: DepartmentsAggExp + operand: + object: + aggregatedType: Departments + aggregatableFields: + - fieldName: id + aggregateExpression: ObjectIdAggExp + - fieldName: description + aggregateExpression: StringAggExp + count: + enable: true + graphql: + selectTypeName: DepartmentsAggExp + +--- +kind: Model +version: v1 +definition: + name: Departments + objectType: Departments + source: + dataConnectorName: test_cases + collection: departments + filterExpressionType: DepartmentsBoolExp + aggregateExpression: DepartmentsAggExp + orderableFields: + - fieldName: id + orderByDirections: + enableAll: true + - fieldName: description + orderByDirections: + enableAll: true + graphql: + selectMany: + queryRootField: departments + subscription: + rootField: departments + selectUniques: + - queryRootField: departmentsById + uniqueIdentifier: + - id + subscription: + rootField: departmentsById + orderByExpressionType: DepartmentsOrderBy + filterInputTypeName: DepartmentsFilterInput + aggregate: + queryRootField: departmentsAggregate + subscription: + rootField: departmentsAggregate + +--- +kind: ModelPermissions +version: v1 +definition: + modelName: Departments + permissions: + - role: admin + select: + filter: null + allowSubscriptions: true + diff --git a/fixtures/hasura/app/metadata/Employee.hml b/fixtures/hasura/app/metadata/Employee.hml index 5f926da4..151b55c0 100644 --- a/fixtures/hasura/app/metadata/Employee.hml +++ b/fixtures/hasura/app/metadata/Employee.hml @@ -5,7 +5,7 @@ definition: name: Employee fields: - name: id - type: ObjectId_1! + type: ObjectId! - name: address type: String! - name: birthDate @@ -128,7 +128,7 @@ definition: type: Employee comparableFields: - fieldName: id - booleanExpressionType: ObjectIdBoolExp_1 + booleanExpressionType: ObjectIdBoolExp - fieldName: address booleanExpressionType: StringBoolExp - fieldName: birthDate @@ -180,7 +180,7 @@ definition: aggregatedType: Employee aggregatableFields: - fieldName: id - aggregateExpression: ObjectIdAggExp_1 + aggregateExpression: ObjectIdAggExp - fieldName: address aggregateExpression: StringAggExp - fieldName: birthDate diff --git a/fixtures/hasura/app/metadata/Genre.hml b/fixtures/hasura/app/metadata/Genre.hml index 6f718cdb..a64a1ad1 100644 --- a/fixtures/hasura/app/metadata/Genre.hml +++ b/fixtures/hasura/app/metadata/Genre.hml @@ -5,7 +5,7 @@ definition: name: Genre fields: - name: id - type: ObjectId_1! + type: ObjectId! - name: genreId type: Int! - name: name @@ -50,7 +50,7 @@ definition: type: Genre comparableFields: - fieldName: id - booleanExpressionType: ObjectIdBoolExp_1 + booleanExpressionType: ObjectIdBoolExp - fieldName: genreId booleanExpressionType: IntBoolExp - fieldName: name @@ -74,7 +74,7 @@ definition: aggregatedType: Genre aggregatableFields: - fieldName: id - aggregateExpression: ObjectIdAggExp_1 + aggregateExpression: ObjectIdAggExp - fieldName: genreId aggregateExpression: IntAggExp - fieldName: name diff --git a/fixtures/hasura/app/metadata/Invoice.hml b/fixtures/hasura/app/metadata/Invoice.hml index 611f4faf..9d12ec8f 100644 --- a/fixtures/hasura/app/metadata/Invoice.hml +++ b/fixtures/hasura/app/metadata/Invoice.hml @@ -5,7 +5,7 @@ definition: name: Invoice fields: - name: id - type: ObjectId_1! + type: ObjectId! - name: billingAddress type: String! - name: billingCity @@ -92,7 +92,7 @@ definition: type: Invoice comparableFields: - fieldName: id - booleanExpressionType: ObjectIdBoolExp_1 + booleanExpressionType: ObjectIdBoolExp - fieldName: billingAddress booleanExpressionType: StringBoolExp - fieldName: billingCity @@ -131,7 +131,7 @@ definition: aggregatedType: Invoice aggregatableFields: - fieldName: id - aggregateExpression: ObjectIdAggExp_1 + aggregateExpression: ObjectIdAggExp - fieldName: billingAddress aggregateExpression: StringAggExp - fieldName: billingCity diff --git a/fixtures/hasura/app/metadata/InvoiceLine.hml b/fixtures/hasura/app/metadata/InvoiceLine.hml index a6a79cdb..9456c12b 100644 --- a/fixtures/hasura/app/metadata/InvoiceLine.hml +++ b/fixtures/hasura/app/metadata/InvoiceLine.hml @@ -5,7 +5,7 @@ definition: name: InvoiceLine fields: - name: id - type: ObjectId_1! + type: ObjectId! - name: invoiceId type: Int! - name: invoiceLineId @@ -68,7 +68,7 @@ definition: type: InvoiceLine comparableFields: - fieldName: id - booleanExpressionType: ObjectIdBoolExp_1 + booleanExpressionType: ObjectIdBoolExp - fieldName: invoiceId booleanExpressionType: IntBoolExp - fieldName: invoiceLineId @@ -99,7 +99,7 @@ definition: aggregatedType: InvoiceLine aggregatableFields: - fieldName: id - aggregateExpression: ObjectIdAggExp_1 + aggregateExpression: ObjectIdAggExp - fieldName: invoiceId aggregateExpression: IntAggExp - fieldName: invoiceLineId diff --git a/fixtures/hasura/app/metadata/MediaType.hml b/fixtures/hasura/app/metadata/MediaType.hml index fc2ab999..7c2f3c4e 100644 --- a/fixtures/hasura/app/metadata/MediaType.hml +++ b/fixtures/hasura/app/metadata/MediaType.hml @@ -5,7 +5,7 @@ definition: name: MediaType fields: - name: id - type: ObjectId_1! + type: ObjectId! - name: mediaTypeId type: Int! - name: name @@ -50,7 +50,7 @@ definition: type: MediaType comparableFields: - fieldName: id - booleanExpressionType: ObjectIdBoolExp_1 + booleanExpressionType: ObjectIdBoolExp - fieldName: mediaTypeId booleanExpressionType: IntBoolExp - fieldName: name @@ -74,7 +74,7 @@ definition: aggregatedType: MediaType aggregatableFields: - fieldName: id - aggregateExpression: ObjectIdAggExp_1 + aggregateExpression: ObjectIdAggExp - fieldName: mediaTypeId aggregateExpression: IntAggExp - fieldName: name diff --git a/fixtures/hasura/app/metadata/NestedCollection.hml b/fixtures/hasura/app/metadata/NestedCollection.hml index 4923afb9..880803e3 100644 --- a/fixtures/hasura/app/metadata/NestedCollection.hml +++ b/fixtures/hasura/app/metadata/NestedCollection.hml @@ -31,7 +31,7 @@ definition: name: NestedCollection fields: - name: id - type: ObjectId_2! + type: ObjectId! - name: institution type: String! - name: staff @@ -95,7 +95,7 @@ definition: type: NestedCollection comparableFields: - fieldName: id - booleanExpressionType: ObjectIdBoolExp_2 + booleanExpressionType: ObjectIdBoolExp - fieldName: institution booleanExpressionType: StringBoolExp - fieldName: staff @@ -118,7 +118,7 @@ definition: aggregatedType: NestedCollection aggregatableFields: - fieldName: id - aggregateExpression: ObjectIdAggExp_2 + aggregateExpression: ObjectIdAggExp - fieldName: institution aggregateExpression: StringAggExp count: diff --git a/fixtures/hasura/app/metadata/NestedFieldWithDollar.hml b/fixtures/hasura/app/metadata/NestedFieldWithDollar.hml index b1ca6f75..b02d7b9e 100644 --- a/fixtures/hasura/app/metadata/NestedFieldWithDollar.hml +++ b/fixtures/hasura/app/metadata/NestedFieldWithDollar.hml @@ -35,7 +35,7 @@ definition: name: NestedFieldWithDollar fields: - name: id - type: ObjectId_2! + type: ObjectId! - name: configuration type: NestedFieldWithDollarConfiguration! graphql: @@ -93,7 +93,7 @@ definition: type: NestedFieldWithDollar comparableFields: - fieldName: id - booleanExpressionType: ObjectIdBoolExp_2 + booleanExpressionType: ObjectIdBoolExp - fieldName: configuration booleanExpressionType: NestedFieldWithDollarConfigurationBoolExp comparableRelationships: [] @@ -114,7 +114,7 @@ definition: aggregatedType: NestedFieldWithDollar aggregatableFields: - fieldName: id - aggregateExpression: ObjectIdAggExp_2 + aggregateExpression: ObjectIdAggExp count: enable: true graphql: diff --git a/fixtures/hasura/app/metadata/Playlist.hml b/fixtures/hasura/app/metadata/Playlist.hml index 3fcf6bea..dd966838 100644 --- a/fixtures/hasura/app/metadata/Playlist.hml +++ b/fixtures/hasura/app/metadata/Playlist.hml @@ -5,7 +5,7 @@ definition: name: Playlist fields: - name: id - type: ObjectId_1! + type: ObjectId! - name: name type: String! - name: playlistId @@ -50,7 +50,7 @@ definition: type: Playlist comparableFields: - fieldName: id - booleanExpressionType: ObjectIdBoolExp_1 + booleanExpressionType: ObjectIdBoolExp - fieldName: name booleanExpressionType: StringBoolExp - fieldName: playlistId @@ -74,7 +74,7 @@ definition: aggregatedType: Playlist aggregatableFields: - fieldName: id - aggregateExpression: ObjectIdAggExp_1 + aggregateExpression: ObjectIdAggExp - fieldName: name aggregateExpression: StringAggExp - fieldName: playlistId diff --git a/fixtures/hasura/app/metadata/PlaylistTrack.hml b/fixtures/hasura/app/metadata/PlaylistTrack.hml index 02c4d289..973388d8 100644 --- a/fixtures/hasura/app/metadata/PlaylistTrack.hml +++ b/fixtures/hasura/app/metadata/PlaylistTrack.hml @@ -5,7 +5,7 @@ definition: name: PlaylistTrack fields: - name: id - type: ObjectId_1! + type: ObjectId! - name: playlistId type: Int! - name: trackId @@ -50,7 +50,7 @@ definition: type: PlaylistTrack comparableFields: - fieldName: id - booleanExpressionType: ObjectIdBoolExp_1 + booleanExpressionType: ObjectIdBoolExp - fieldName: playlistId booleanExpressionType: IntBoolExp - fieldName: trackId @@ -75,7 +75,7 @@ definition: aggregatedType: PlaylistTrack aggregatableFields: - fieldName: id - aggregateExpression: ObjectIdAggExp_1 + aggregateExpression: ObjectIdAggExp - fieldName: playlistId aggregateExpression: IntAggExp - fieldName: trackId diff --git a/fixtures/hasura/app/metadata/Schools.hml b/fixtures/hasura/app/metadata/Schools.hml new file mode 100644 index 00000000..8f5e624a --- /dev/null +++ b/fixtures/hasura/app/metadata/Schools.hml @@ -0,0 +1,210 @@ +--- +kind: ObjectType +version: v1 +definition: + name: SchoolsDepartments + fields: + - name: englishDepartmentId + type: ObjectId! + - name: mathDepartmentId + type: ObjectId! + graphql: + typeName: SchoolsDepartments + inputTypeName: SchoolsDepartmentsInput + dataConnectorTypeMapping: + - dataConnectorName: test_cases + dataConnectorObjectType: schools_departments + fieldMapping: + englishDepartmentId: + column: + name: english_department_id + mathDepartmentId: + column: + name: math_department_id + +--- +kind: TypePermissions +version: v1 +definition: + typeName: SchoolsDepartments + permissions: + - role: admin + output: + allowedFields: + - englishDepartmentId + - mathDepartmentId + +--- +kind: ObjectType +version: v1 +definition: + name: Schools + fields: + - name: id + type: ObjectId! + - name: departments + type: SchoolsDepartments! + - name: name + type: String! + graphql: + typeName: Schools + inputTypeName: SchoolsInput + dataConnectorTypeMapping: + - dataConnectorName: test_cases + dataConnectorObjectType: schools + fieldMapping: + id: + column: + name: _id + departments: + column: + name: departments + name: + column: + name: name + +--- +kind: TypePermissions +version: v1 +definition: + typeName: Schools + permissions: + - role: admin + output: + allowedFields: + - id + - departments + - name + +--- +kind: BooleanExpressionType +version: v1 +definition: + name: SchoolsDepartmentsBoolExp + operand: + object: + type: SchoolsDepartments + comparableFields: + - fieldName: englishDepartmentId + booleanExpressionType: ObjectIdBoolExp + - fieldName: mathDepartmentId + booleanExpressionType: ObjectIdBoolExp + comparableRelationships: [] + logicalOperators: + enable: true + isNull: + enable: true + graphql: + typeName: SchoolsDepartmentsBoolExp + +--- +kind: BooleanExpressionType +version: v1 +definition: + name: SchoolsBoolExp + operand: + object: + type: Schools + comparableFields: + - fieldName: id + booleanExpressionType: ObjectIdBoolExp + - fieldName: departments + booleanExpressionType: SchoolsDepartmentsBoolExp + - fieldName: name + booleanExpressionType: StringBoolExp + comparableRelationships: [] + logicalOperators: + enable: true + isNull: + enable: true + graphql: + typeName: SchoolsBoolExp + +--- +kind: AggregateExpression +version: v1 +definition: + name: SchoolsDepartmentsAggExp + operand: + object: + aggregatedType: SchoolsDepartments + aggregatableFields: + - fieldName: englishDepartmentId + aggregateExpression: ObjectIdAggExp + - fieldName: mathDepartmentId + aggregateExpression: ObjectIdAggExp + count: + enable: true + graphql: + selectTypeName: SchoolsDepartmentsAggExp + +--- +kind: AggregateExpression +version: v1 +definition: + name: SchoolsAggExp + operand: + object: + aggregatedType: Schools + aggregatableFields: + - fieldName: id + aggregateExpression: ObjectIdAggExp + - fieldName: departments + aggregateExpression: SchoolsDepartmentsAggExp + - fieldName: name + aggregateExpression: StringAggExp + count: + enable: true + graphql: + selectTypeName: SchoolsAggExp + +--- +kind: Model +version: v1 +definition: + name: Schools + objectType: Schools + source: + dataConnectorName: test_cases + collection: schools + filterExpressionType: SchoolsBoolExp + aggregateExpression: SchoolsAggExp + orderableFields: + - fieldName: id + orderByDirections: + enableAll: true + - fieldName: departments + orderByDirections: + enableAll: true + - fieldName: name + orderByDirections: + enableAll: true + graphql: + selectMany: + queryRootField: schools + subscription: + rootField: schools + selectUniques: + - queryRootField: schoolsById + uniqueIdentifier: + - id + subscription: + rootField: schoolsById + orderByExpressionType: SchoolsOrderBy + filterInputTypeName: SchoolsFilterInput + aggregate: + queryRootField: schoolsAggregate + subscription: + rootField: schoolsAggregate + +--- +kind: ModelPermissions +version: v1 +definition: + modelName: Schools + permissions: + - role: admin + select: + filter: null + allowSubscriptions: true + diff --git a/fixtures/hasura/app/metadata/Track.hml b/fixtures/hasura/app/metadata/Track.hml index b29ed569..f3a84064 100644 --- a/fixtures/hasura/app/metadata/Track.hml +++ b/fixtures/hasura/app/metadata/Track.hml @@ -5,7 +5,7 @@ definition: name: Track fields: - name: id - type: ObjectId_1! + type: ObjectId! - name: albumId type: Int! - name: bytes @@ -92,7 +92,7 @@ definition: type: Track comparableFields: - fieldName: id - booleanExpressionType: ObjectIdBoolExp_1 + booleanExpressionType: ObjectIdBoolExp - fieldName: albumId booleanExpressionType: IntBoolExp - fieldName: bytes @@ -134,7 +134,7 @@ definition: aggregatedType: Track aggregatableFields: - fieldName: id - aggregateExpression: ObjectIdAggExp_1 + aggregateExpression: ObjectIdAggExp - fieldName: albumId aggregateExpression: IntAggExp - fieldName: bytes diff --git a/fixtures/hasura/app/metadata/WeirdFieldNames.hml b/fixtures/hasura/app/metadata/WeirdFieldNames.hml index 03d33ac1..784959b7 100644 --- a/fixtures/hasura/app/metadata/WeirdFieldNames.hml +++ b/fixtures/hasura/app/metadata/WeirdFieldNames.hml @@ -101,7 +101,7 @@ definition: - name: invalidObjectName type: WeirdFieldNamesInvalidObjectName! - name: id - type: ObjectId_2! + type: ObjectId! - name: validObjectName type: WeirdFieldNamesValidObjectName! graphql: @@ -215,7 +215,7 @@ definition: - fieldName: invalidObjectName booleanExpressionType: WeirdFieldNamesInvalidObjectNameBoolExp - fieldName: id - booleanExpressionType: ObjectIdBoolExp_2 + booleanExpressionType: ObjectIdBoolExp - fieldName: validObjectName booleanExpressionType: WeirdFieldNamesValidObjectNameBoolExp comparableRelationships: [] @@ -238,7 +238,7 @@ definition: - fieldName: invalidName aggregateExpression: IntAggExp - fieldName: id - aggregateExpression: ObjectIdAggExp_2 + aggregateExpression: ObjectIdAggExp count: enable: true graphql: diff --git a/fixtures/hasura/app/metadata/chinook.hml b/fixtures/hasura/app/metadata/chinook.hml index a23c4937..1175ffaf 100644 --- a/fixtures/hasura/app/metadata/chinook.hml +++ b/fixtures/hasura/app/metadata/chinook.hml @@ -9,12 +9,36 @@ definition: write: valueFromEnv: APP_CHINOOK_WRITE_URL schema: - version: v0.1 + version: v0.2 + capabilities: + version: 0.2.0 + capabilities: + query: + aggregates: {} + variables: {} + explain: {} + nested_fields: + filter_by: + nested_arrays: + contains: {} + is_empty: {} + order_by: {} + aggregates: {} + nested_collections: {} + exists: + unrelated: {} + nested_collections: {} + mutation: {} + relationships: + relation_comparisons: {} schema: scalar_types: BinData: + representation: + type: json aggregate_functions: count: + type: custom result_type: type: named name: Int @@ -40,6 +64,7 @@ definition: type: boolean aggregate_functions: count: + type: custom result_type: type: named name: Int @@ -65,46 +90,27 @@ definition: type: timestamp aggregate_functions: count: + type: custom result_type: type: named name: Int max: - result_type: - type: nullable - underlying_type: - type: named - name: Date + type: max min: - result_type: - type: nullable - underlying_type: - type: named - name: Date + type: min comparison_operators: _eq: type: equal _gt: - type: custom - argument_type: - type: named - name: Date + type: greater_than _gte: - type: custom - argument_type: - type: named - name: Date + type: greater_than_or_equal _in: type: in _lt: - type: custom - argument_type: - type: named - name: Date + type: less_than _lte: - type: custom - argument_type: - type: named - name: Date + type: less_than_or_equal _neq: type: custom argument_type: @@ -118,8 +124,11 @@ definition: type: named name: Date DbPointer: + representation: + type: json aggregate_functions: count: + type: custom result_type: type: named name: Int @@ -145,58 +154,33 @@ definition: type: bigdecimal aggregate_functions: avg: - result_type: - type: nullable - underlying_type: - type: named - name: Decimal + type: average + result_type: Double count: + type: custom result_type: type: named name: Int max: - result_type: - type: nullable - underlying_type: - type: named - name: Decimal + type: max min: - result_type: - type: nullable - underlying_type: - type: named - name: Decimal + type: min sum: - result_type: - type: nullable - underlying_type: - type: named - name: Decimal + type: sum + result_type: Double comparison_operators: _eq: type: equal _gt: - type: custom - argument_type: - type: named - name: Decimal + type: greater_than _gte: - type: custom - argument_type: - type: named - name: Decimal + type: greater_than_or_equal _in: type: in _lt: - type: custom - argument_type: - type: named - name: Decimal + type: less_than _lte: - type: custom - argument_type: - type: named - name: Decimal + type: less_than_or_equal _neq: type: custom argument_type: @@ -214,58 +198,33 @@ definition: type: float64 aggregate_functions: avg: - result_type: - type: nullable - underlying_type: - type: named - name: Double + type: average + result_type: Double count: + type: custom result_type: type: named name: Int max: - result_type: - type: nullable - underlying_type: - type: named - name: Double + type: max min: - result_type: - type: nullable - underlying_type: - type: named - name: Double + type: min sum: - result_type: - type: nullable - underlying_type: - type: named - name: Double + type: sum + result_type: Double comparison_operators: _eq: type: equal _gt: - type: custom - argument_type: - type: named - name: Double + type: greater_than _gte: - type: custom - argument_type: - type: named - name: Double + type: greater_than_or_equal _in: type: in _lt: - type: custom - argument_type: - type: named - name: Double + type: less_than _lte: - type: custom - argument_type: - type: named - name: Double + type: less_than_or_equal _neq: type: custom argument_type: @@ -283,22 +242,21 @@ definition: type: json aggregate_functions: avg: + type: custom result_type: type: named name: ExtendedJSON count: + type: custom result_type: type: named name: Int max: - result_type: - type: named - name: ExtendedJSON + type: max min: - result_type: - type: named - name: ExtendedJSON + type: min sum: + type: custom result_type: type: named name: ExtendedJSON @@ -306,35 +264,20 @@ definition: _eq: type: equal _gt: - type: custom - argument_type: - type: named - name: ExtendedJSON + type: greater_than _gte: - type: custom - argument_type: - type: named - name: ExtendedJSON + type: greater_than_or_equal _in: - type: custom - argument_type: - type: named - name: ExtendedJSON + type: in _iregex: type: custom argument_type: type: named - name: String + name: Regex _lt: - type: custom - argument_type: - type: named - name: ExtendedJSON + type: less_than _lte: - type: custom - argument_type: - type: named - name: ExtendedJSON + type: less_than_or_equal _neq: type: custom argument_type: @@ -343,70 +286,47 @@ definition: _nin: type: custom argument_type: - type: named - name: ExtendedJSON + type: array + element_type: + type: named + name: ExtendedJSON _regex: type: custom argument_type: type: named - name: String + name: Regex Int: representation: type: int32 aggregate_functions: avg: - result_type: - type: nullable - underlying_type: - type: named - name: Int + type: average + result_type: Double count: + type: custom result_type: type: named name: Int max: - result_type: - type: nullable - underlying_type: - type: named - name: Int + type: max min: - result_type: - type: nullable - underlying_type: - type: named - name: Int + type: min sum: - result_type: - type: nullable - underlying_type: - type: named - name: Int + type: sum + result_type: Long comparison_operators: _eq: type: equal _gt: - type: custom - argument_type: - type: named - name: Int + type: greater_than _gte: - type: custom - argument_type: - type: named - name: Int + type: greater_than_or_equal _in: type: in _lt: - type: custom - argument_type: - type: named - name: Int + type: less_than _lte: - type: custom - argument_type: - type: named - name: Int + type: less_than_or_equal _neq: type: custom argument_type: @@ -420,15 +340,21 @@ definition: type: named name: Int Javascript: + representation: + type: string aggregate_functions: count: + type: custom result_type: type: named name: Int comparison_operators: {} JavascriptWithScope: + representation: + type: json aggregate_functions: count: + type: custom result_type: type: named name: Int @@ -438,58 +364,33 @@ definition: type: int64 aggregate_functions: avg: - result_type: - type: nullable - underlying_type: - type: named - name: Long + type: average + result_type: Double count: + type: custom result_type: type: named name: Int max: - result_type: - type: nullable - underlying_type: - type: named - name: Long + type: max min: - result_type: - type: nullable - underlying_type: - type: named - name: Long + type: min sum: - result_type: - type: nullable - underlying_type: - type: named - name: Long + type: sum + result_type: Long comparison_operators: _eq: type: equal _gt: - type: custom - argument_type: - type: named - name: Long + type: greater_than _gte: - type: custom - argument_type: - type: named - name: Long + type: greater_than_or_equal _in: type: in _lt: - type: custom - argument_type: - type: named - name: Long + type: less_than _lte: - type: custom - argument_type: - type: named - name: Long + type: less_than_or_equal _neq: type: custom argument_type: @@ -503,8 +404,11 @@ definition: type: named name: Long MaxKey: + representation: + type: json aggregate_functions: count: + type: custom result_type: type: named name: Int @@ -526,8 +430,11 @@ definition: type: named name: MaxKey MinKey: + representation: + type: json aggregate_functions: count: + type: custom result_type: type: named name: Int @@ -548,9 +455,12 @@ definition: element_type: type: named name: MinKey - "Null": + 'Null': + representation: + type: json aggregate_functions: count: + type: custom result_type: type: named name: Int @@ -563,19 +473,20 @@ definition: type: custom argument_type: type: named - name: "Null" + name: 'Null' _nin: type: custom argument_type: type: array element_type: type: named - name: "Null" + name: 'Null' ObjectId: representation: type: string aggregate_functions: count: + type: custom result_type: type: named name: Int @@ -597,8 +508,11 @@ definition: type: named name: ObjectId Regex: + representation: + type: json aggregate_functions: count: + type: custom result_type: type: named name: Int @@ -608,51 +522,32 @@ definition: type: string aggregate_functions: count: + type: custom result_type: type: named name: Int max: - result_type: - type: nullable - underlying_type: - type: named - name: String + type: max min: - result_type: - type: nullable - underlying_type: - type: named - name: String + type: min comparison_operators: _eq: type: equal _gt: - type: custom - argument_type: - type: named - name: String + type: greater_than _gte: - type: custom - argument_type: - type: named - name: String + type: greater_than_or_equal _in: type: in _iregex: type: custom argument_type: type: named - name: String + name: Regex _lt: - type: custom - argument_type: - type: named - name: String + type: less_than _lte: - type: custom - argument_type: - type: named - name: String + type: less_than_or_equal _neq: type: custom argument_type: @@ -669,10 +564,13 @@ definition: type: custom argument_type: type: named - name: String + name: Regex Symbol: + representation: + type: string aggregate_functions: count: + type: custom result_type: type: named name: Int @@ -694,48 +592,31 @@ definition: type: named name: Symbol Timestamp: + representation: + type: json aggregate_functions: count: + type: custom result_type: type: named name: Int max: - result_type: - type: nullable - underlying_type: - type: named - name: Timestamp + type: max min: - result_type: - type: nullable - underlying_type: - type: named - name: Timestamp + type: min comparison_operators: _eq: type: equal _gt: - type: custom - argument_type: - type: named - name: Timestamp + type: greater_than _gte: - type: custom - argument_type: - type: named - name: Timestamp + type: greater_than_or_equal _in: type: in _lt: - type: custom - argument_type: - type: named - name: Timestamp + type: less_than _lte: - type: custom - argument_type: - type: named - name: Timestamp + type: less_than_or_equal _neq: type: custom argument_type: @@ -749,8 +630,11 @@ definition: type: named name: Timestamp Undefined: + representation: + type: json aggregate_functions: count: + type: custom result_type: type: named name: Int @@ -774,10 +658,6 @@ definition: object_types: Album: fields: - _id: - type: - type: named - name: ObjectId AlbumId: type: type: named @@ -790,12 +670,13 @@ definition: type: type: named name: String - AlbumWithTracks: - fields: _id: type: type: named name: ObjectId + foreign_keys: {} + AlbumWithTracks: + fields: Title: type: type: named @@ -806,12 +687,13 @@ definition: element_type: type: named name: Track - Artist: - fields: _id: type: type: named name: ObjectId + foreign_keys: {} + Artist: + fields: ArtistId: type: type: named @@ -820,12 +702,13 @@ definition: type: type: named name: String - ArtistWithAlbumsAndTracks: - fields: _id: type: type: named name: ObjectId + foreign_keys: {} + ArtistWithAlbumsAndTracks: + fields: Albums: type: type: array @@ -836,12 +719,13 @@ definition: type: type: named name: String - Customer: - fields: _id: type: type: named name: ObjectId + foreign_keys: {} + Customer: + fields: Address: type: type: named @@ -904,12 +788,13 @@ definition: type: type: named name: Int - Employee: - fields: _id: type: type: named name: ObjectId + foreign_keys: {} + Employee: + fields: Address: type: type: named @@ -972,12 +857,13 @@ definition: type: type: named name: String - Genre: - fields: _id: type: type: named name: ObjectId + foreign_keys: {} + Genre: + fields: GenreId: type: type: named @@ -986,9 +872,14 @@ definition: type: type: named name: String + _id: + type: + type: named + name: ObjectId + foreign_keys: {} InsertArtist: fields: - "n": + n: type: type: named name: Int @@ -996,12 +887,9 @@ definition: type: type: named name: Double + foreign_keys: {} Invoice: fields: - _id: - type: - type: named - name: ObjectId BillingAddress: type: type: named @@ -1042,12 +930,13 @@ definition: type: type: named name: Decimal - InvoiceLine: - fields: _id: type: type: named name: ObjectId + foreign_keys: {} + InvoiceLine: + fields: InvoiceId: type: type: named @@ -1068,12 +957,13 @@ definition: type: type: named name: Decimal - MediaType: - fields: _id: type: type: named name: ObjectId + foreign_keys: {} + MediaType: + fields: MediaTypeId: type: type: named @@ -1082,12 +972,13 @@ definition: type: type: named name: String - Playlist: - fields: _id: type: type: named name: ObjectId + foreign_keys: {} + Playlist: + fields: Name: type: type: named @@ -1096,12 +987,13 @@ definition: type: type: named name: Int - PlaylistTrack: - fields: _id: type: type: named name: ObjectId + foreign_keys: {} + PlaylistTrack: + fields: PlaylistId: type: type: named @@ -1110,12 +1002,13 @@ definition: type: type: named name: Int - Track: - fields: _id: type: type: named name: ObjectId + foreign_keys: {} + Track: + fields: AlbumId: type: type: named @@ -1154,147 +1047,128 @@ definition: type: type: named name: Decimal - collections: - - name: Album - arguments: {} - type: Album - uniqueness_constraints: - Album_id: - unique_columns: - - _id - foreign_keys: {} - - name: Artist - arguments: {} - type: Artist - uniqueness_constraints: - Artist_id: - unique_columns: - - _id - foreign_keys: {} - - name: Customer - arguments: {} - type: Customer - uniqueness_constraints: - Customer_id: - unique_columns: - - _id - foreign_keys: {} - - name: Employee - arguments: {} - type: Employee - uniqueness_constraints: - Employee_id: - unique_columns: - - _id - foreign_keys: {} - - name: Genre - arguments: {} - type: Genre - uniqueness_constraints: - Genre_id: - unique_columns: - - _id - foreign_keys: {} - - name: Invoice - arguments: {} - type: Invoice - uniqueness_constraints: - Invoice_id: - unique_columns: - - _id - foreign_keys: {} - - name: InvoiceLine - arguments: {} - type: InvoiceLine - uniqueness_constraints: - InvoiceLine_id: - unique_columns: - - _id - foreign_keys: {} - - name: MediaType - arguments: {} - type: MediaType - uniqueness_constraints: - MediaType_id: - unique_columns: - - _id - foreign_keys: {} - - name: Playlist - arguments: {} - type: Playlist - uniqueness_constraints: - Playlist_id: - unique_columns: - - _id - foreign_keys: {} - - name: PlaylistTrack - arguments: {} - type: PlaylistTrack - uniqueness_constraints: - PlaylistTrack_id: - unique_columns: - - _id - foreign_keys: {} - - name: Track - arguments: {} - type: Track - uniqueness_constraints: - Track_id: - unique_columns: - - _id - foreign_keys: {} - - name: artists_with_albums_and_tracks - description: combines artist, albums, and tracks into a single document per artist - arguments: {} - type: ArtistWithAlbumsAndTracks - uniqueness_constraints: - artists_with_albums_and_tracks_id: - unique_columns: - - _id + _id: + type: + type: named + name: ObjectId foreign_keys: {} + collections: + - name: Album + arguments: {} + type: Album + uniqueness_constraints: + Album_id: + unique_columns: + - _id + - name: Artist + arguments: {} + type: Artist + uniqueness_constraints: + Artist_id: + unique_columns: + - _id + - name: Customer + arguments: {} + type: Customer + uniqueness_constraints: + Customer_id: + unique_columns: + - _id + - name: Employee + arguments: {} + type: Employee + uniqueness_constraints: + Employee_id: + unique_columns: + - _id + - name: Genre + arguments: {} + type: Genre + uniqueness_constraints: + Genre_id: + unique_columns: + - _id + - name: Invoice + arguments: {} + type: Invoice + uniqueness_constraints: + Invoice_id: + unique_columns: + - _id + - name: InvoiceLine + arguments: {} + type: InvoiceLine + uniqueness_constraints: + InvoiceLine_id: + unique_columns: + - _id + - name: MediaType + arguments: {} + type: MediaType + uniqueness_constraints: + MediaType_id: + unique_columns: + - _id + - name: Playlist + arguments: {} + type: Playlist + uniqueness_constraints: + Playlist_id: + unique_columns: + - _id + - name: PlaylistTrack + arguments: {} + type: PlaylistTrack + uniqueness_constraints: + PlaylistTrack_id: + unique_columns: + - _id + - name: Track + arguments: {} + type: Track + uniqueness_constraints: + Track_id: + unique_columns: + - _id + - name: artists_with_albums_and_tracks + description: combines artist, albums, and tracks into a single document per artist + arguments: {} + type: ArtistWithAlbumsAndTracks + uniqueness_constraints: + artists_with_albums_and_tracks_id: + unique_columns: + - _id functions: [] procedures: - - name: insertArtist - description: Example of a database update using a native mutation - arguments: - id: - type: - type: named - name: Int - name: - type: - type: named - name: String - result_type: - type: named - name: InsertArtist - - name: updateTrackPrices - description: Update unit price of every track that matches predicate - arguments: - newPrice: - type: - type: named - name: Decimal - where: - type: - type: predicate - object_type_name: Track - result_type: - type: named - name: InsertArtist - capabilities: - version: 0.1.6 + - name: insertArtist + description: Example of a database update using a native mutation + arguments: + id: + type: + type: named + name: Int + name: + type: + type: named + name: String + result_type: + type: named + name: InsertArtist + - name: updateTrackPrices + description: Update unit price of every track that matches predicate + arguments: + newPrice: + type: + type: named + name: Decimal + where: + type: + type: predicate + object_type_name: Track + result_type: + type: named + name: InsertArtist capabilities: query: - aggregates: {} - variables: {} - explain: {} - nested_fields: - filter_by: {} - order_by: {} - aggregates: {} - exists: - nested_collections: {} - mutation: {} - relationships: - relation_comparisons: {} + aggregates: + count_scalar_type: Int diff --git a/fixtures/hasura/app/metadata/sample_mflix-types.hml b/fixtures/hasura/app/metadata/sample_mflix-types.hml deleted file mode 100644 index 0675e1a7..00000000 --- a/fixtures/hasura/app/metadata/sample_mflix-types.hml +++ /dev/null @@ -1,601 +0,0 @@ ---- -kind: ScalarType -version: v1 -definition: - name: ObjectId - graphql: - typeName: ObjectId - ---- -kind: BooleanExpressionType -version: v1 -definition: - name: ObjectIdBoolExp - operand: - scalar: - type: ObjectId - comparisonOperators: - - name: _eq - argumentType: ObjectId! - - name: _in - argumentType: "[ObjectId!]!" - - name: _neq - argumentType: ObjectId! - - name: _nin - argumentType: "[ObjectId!]!" - dataConnectorOperatorMapping: - - dataConnectorName: sample_mflix - dataConnectorScalarType: ObjectId - operatorMapping: {} - logicalOperators: - enable: true - isNull: - enable: true - graphql: - typeName: ObjectIdBoolExp - ---- -kind: DataConnectorScalarRepresentation -version: v1 -definition: - dataConnectorName: sample_mflix - dataConnectorScalarType: ObjectId - representation: ObjectId - graphql: - comparisonExpressionTypeName: ObjectIdComparisonExp - ---- -kind: ScalarType -version: v1 -definition: - name: Date - graphql: - typeName: Date - ---- -kind: BooleanExpressionType -version: v1 -definition: - name: DateBoolExp - operand: - scalar: - type: Date - comparisonOperators: - - name: _eq - argumentType: Date! - - name: _gt - argumentType: Date! - - name: _gte - argumentType: Date! - - name: _in - argumentType: "[Date!]!" - - name: _lt - argumentType: Date! - - name: _lte - argumentType: Date! - - name: _neq - argumentType: Date! - - name: _nin - argumentType: "[Date!]!" - dataConnectorOperatorMapping: - - dataConnectorName: sample_mflix - dataConnectorScalarType: Date - operatorMapping: {} - logicalOperators: - enable: true - isNull: - enable: true - graphql: - typeName: DateBoolExp - ---- -kind: DataConnectorScalarRepresentation -version: v1 -definition: - dataConnectorName: sample_mflix - dataConnectorScalarType: Date - representation: Date - graphql: - comparisonExpressionTypeName: DateComparisonExp - ---- -kind: DataConnectorScalarRepresentation -version: v1 -definition: - dataConnectorName: sample_mflix - dataConnectorScalarType: String - representation: String - graphql: - comparisonExpressionTypeName: StringComparisonExp - ---- -kind: BooleanExpressionType -version: v1 -definition: - name: StringBoolExp - operand: - scalar: - type: String - comparisonOperators: - - name: _eq - argumentType: String! - - name: _gt - argumentType: String! - - name: _gte - argumentType: String! - - name: _in - argumentType: "[String!]!" - - name: _iregex - argumentType: String! - - name: _lt - argumentType: String! - - name: _lte - argumentType: String! - - name: _neq - argumentType: String! - - name: _nin - argumentType: "[String!]!" - - name: _regex - argumentType: String! - dataConnectorOperatorMapping: - - dataConnectorName: sample_mflix - dataConnectorScalarType: String - operatorMapping: {} - - dataConnectorName: chinook - dataConnectorScalarType: String - operatorMapping: {} - - dataConnectorName: test_cases - dataConnectorScalarType: String - operatorMapping: {} - logicalOperators: - enable: true - isNull: - enable: true - graphql: - typeName: StringBoolExp - ---- -kind: DataConnectorScalarRepresentation -version: v1 -definition: - dataConnectorName: sample_mflix - dataConnectorScalarType: Int - representation: Int - graphql: - comparisonExpressionTypeName: IntComparisonExp - ---- -kind: AggregateExpression -version: v1 -definition: - name: ObjectIdAggExp - operand: - scalar: - aggregatedType: ObjectId - aggregationFunctions: - - name: count - returnType: Int! - dataConnectorAggregationFunctionMapping: - - dataConnectorName: sample_mflix - dataConnectorScalarType: ObjectId - functionMapping: - count: - name: count - count: - enable: true - countDistinct: - enable: true - graphql: - selectTypeName: ObjectIdAggExp - ---- -kind: AggregateExpression -version: v1 -definition: - name: DateAggExp - operand: - scalar: - aggregatedType: Date - aggregationFunctions: - - name: count - returnType: Int! - - name: max - returnType: Date - - name: min - returnType: Date - dataConnectorAggregationFunctionMapping: - - dataConnectorName: sample_mflix - dataConnectorScalarType: Date - functionMapping: - count: - name: count - max: - name: max - min: - name: min - count: - enable: true - countDistinct: - enable: true - graphql: - selectTypeName: DateAggExp - ---- -kind: AggregateExpression -version: v1 -definition: - name: StringAggExp - operand: - scalar: - aggregatedType: String - aggregationFunctions: - - name: count - returnType: Int! - - name: max - returnType: String - - name: min - returnType: String - dataConnectorAggregationFunctionMapping: - - dataConnectorName: sample_mflix - dataConnectorScalarType: String - functionMapping: - count: - name: count - max: - name: max - min: - name: min - - dataConnectorName: chinook - dataConnectorScalarType: String - functionMapping: - count: - name: count - max: - name: max - min: - name: min - - dataConnectorName: test_cases - dataConnectorScalarType: String - functionMapping: - count: - name: count - max: - name: max - min: - name: min - count: - enable: true - countDistinct: - enable: true - graphql: - selectTypeName: StringAggExp - ---- -kind: ScalarType -version: v1 -definition: - name: Double - graphql: - typeName: Double - ---- -kind: BooleanExpressionType -version: v1 -definition: - name: DoubleBoolExp - operand: - scalar: - type: Double - comparisonOperators: - - name: _eq - argumentType: Double! - - name: _gt - argumentType: Double! - - name: _gte - argumentType: Double! - - name: _in - argumentType: "[Double!]!" - - name: _lt - argumentType: Double! - - name: _lte - argumentType: Double! - - name: _neq - argumentType: Double! - - name: _nin - argumentType: "[Double!]!" - dataConnectorOperatorMapping: - - dataConnectorName: sample_mflix - dataConnectorScalarType: Double - operatorMapping: {} - - dataConnectorName: chinook - dataConnectorScalarType: Double - operatorMapping: {} - logicalOperators: - enable: true - isNull: - enable: true - graphql: - typeName: DoubleBoolExp - ---- -kind: AggregateExpression -version: v1 -definition: - name: DoubleAggExp - operand: - scalar: - aggregatedType: Double - aggregationFunctions: - - name: avg - returnType: Double - - name: count - returnType: Int! - - name: max - returnType: Double - - name: min - returnType: Double - - name: sum - returnType: Double - dataConnectorAggregationFunctionMapping: - - dataConnectorName: sample_mflix - dataConnectorScalarType: Double - functionMapping: - avg: - name: avg - count: - name: count - max: - name: max - min: - name: min - sum: - name: sum - - dataConnectorName: chinook - dataConnectorScalarType: Double - functionMapping: - avg: - name: avg - count: - name: count - max: - name: max - min: - name: min - sum: - name: sum - - dataConnectorName: test_cases - dataConnectorScalarType: Double - functionMapping: - avg: - name: avg - count: - name: count - max: - name: max - min: - name: min - sum: - name: sum - count: - enable: true - countDistinct: - enable: true - graphql: - selectTypeName: DoubleAggExp - ---- -kind: DataConnectorScalarRepresentation -version: v1 -definition: - dataConnectorName: sample_mflix - dataConnectorScalarType: Double - representation: Double - graphql: - comparisonExpressionTypeName: DoubleComparisonExp - ---- -kind: BooleanExpressionType -version: v1 -definition: - name: IntBoolExp - operand: - scalar: - type: Int - comparisonOperators: - - name: _eq - argumentType: Int! - - name: _gt - argumentType: Int! - - name: _gte - argumentType: Int! - - name: _in - argumentType: "[Int!]!" - - name: _lt - argumentType: Int! - - name: _lte - argumentType: Int! - - name: _neq - argumentType: Int! - - name: _nin - argumentType: "[Int!]!" - dataConnectorOperatorMapping: - - dataConnectorName: sample_mflix - dataConnectorScalarType: Int - operatorMapping: {} - - dataConnectorName: chinook - dataConnectorScalarType: Int - operatorMapping: {} - - dataConnectorName: test_cases - dataConnectorScalarType: Int - operatorMapping: {} - logicalOperators: - enable: true - isNull: - enable: true - graphql: - typeName: IntBoolExp - ---- -kind: AggregateExpression -version: v1 -definition: - name: IntAggExp - operand: - scalar: - aggregatedType: Int - aggregationFunctions: - - name: avg - returnType: Int - - name: count - returnType: Int! - - name: max - returnType: Int - - name: min - returnType: Int - - name: sum - returnType: Int - dataConnectorAggregationFunctionMapping: - - dataConnectorName: sample_mflix - dataConnectorScalarType: Int - functionMapping: - avg: - name: avg - count: - name: count - max: - name: max - min: - name: min - sum: - name: sum - - dataConnectorName: chinook - dataConnectorScalarType: Int - functionMapping: - avg: - name: avg - count: - name: count - max: - name: max - min: - name: min - sum: - name: sum - - dataConnectorName: test_cases - dataConnectorScalarType: Int - functionMapping: - avg: - name: avg - count: - name: count - max: - name: max - min: - name: min - sum: - name: sum - count: - enable: true - countDistinct: - enable: true - graphql: - selectTypeName: IntAggExp - ---- -kind: ScalarType -version: v1 -definition: - name: ExtendedJson - graphql: - typeName: ExtendedJson - ---- -kind: BooleanExpressionType -version: v1 -definition: - name: ExtendedJsonBoolExp - operand: - scalar: - type: ExtendedJson - comparisonOperators: - - name: _eq - argumentType: ExtendedJson! - - name: _gt - argumentType: ExtendedJson! - - name: _gte - argumentType: ExtendedJson! - - name: _in - argumentType: ExtendedJson! - - name: _iregex - argumentType: String! - - name: _lt - argumentType: ExtendedJson! - - name: _lte - argumentType: ExtendedJson! - - name: _neq - argumentType: ExtendedJson! - - name: _nin - argumentType: ExtendedJson! - - name: _regex - argumentType: String! - dataConnectorOperatorMapping: - - dataConnectorName: sample_mflix - dataConnectorScalarType: ExtendedJSON - operatorMapping: {} - logicalOperators: - enable: true - isNull: - enable: true - graphql: - typeName: ExtendedJsonBoolExp - ---- -kind: DataConnectorScalarRepresentation -version: v1 -definition: - dataConnectorName: sample_mflix - dataConnectorScalarType: ExtendedJSON - representation: ExtendedJson - graphql: - comparisonExpressionTypeName: ExtendedJsonComparisonExp - ---- -kind: AggregateExpression -version: v1 -definition: - name: ExtendedJsonAggExp - operand: - scalar: - aggregatedType: ExtendedJson - aggregationFunctions: - - name: avg - returnType: ExtendedJson! - - name: count - returnType: Int! - - name: max - returnType: ExtendedJson! - - name: min - returnType: ExtendedJson! - - name: sum - returnType: ExtendedJson! - dataConnectorAggregationFunctionMapping: - - dataConnectorName: sample_mflix - dataConnectorScalarType: ExtendedJSON - functionMapping: - avg: - name: avg - count: - name: count - max: - name: max - min: - name: min - sum: - name: sum - count: - enable: true - countDistinct: - enable: true - graphql: - selectTypeName: ExtendedJsonAggExp - diff --git a/fixtures/hasura/app/metadata/sample_mflix.hml b/fixtures/hasura/app/metadata/sample_mflix.hml index e5cd1f4c..b49a9f0f 100644 --- a/fixtures/hasura/app/metadata/sample_mflix.hml +++ b/fixtures/hasura/app/metadata/sample_mflix.hml @@ -9,12 +9,36 @@ definition: write: valueFromEnv: APP_SAMPLE_MFLIX_WRITE_URL schema: - version: v0.1 + version: v0.2 + capabilities: + version: 0.2.0 + capabilities: + query: + aggregates: {} + variables: {} + explain: {} + nested_fields: + filter_by: + nested_arrays: + contains: {} + is_empty: {} + order_by: {} + aggregates: {} + nested_collections: {} + exists: + unrelated: {} + nested_collections: {} + mutation: {} + relationships: + relation_comparisons: {} schema: scalar_types: BinData: + representation: + type: json aggregate_functions: count: + type: custom result_type: type: named name: Int @@ -40,6 +64,7 @@ definition: type: boolean aggregate_functions: count: + type: custom result_type: type: named name: Int @@ -65,46 +90,27 @@ definition: type: timestamp aggregate_functions: count: + type: custom result_type: type: named name: Int max: - result_type: - type: nullable - underlying_type: - type: named - name: Date + type: max min: - result_type: - type: nullable - underlying_type: - type: named - name: Date + type: min comparison_operators: _eq: type: equal _gt: - type: custom - argument_type: - type: named - name: Date + type: greater_than _gte: - type: custom - argument_type: - type: named - name: Date + type: greater_than_or_equal _in: type: in _lt: - type: custom - argument_type: - type: named - name: Date + type: less_than _lte: - type: custom - argument_type: - type: named - name: Date + type: less_than_or_equal _neq: type: custom argument_type: @@ -118,8 +124,11 @@ definition: type: named name: Date DbPointer: + representation: + type: json aggregate_functions: count: + type: custom result_type: type: named name: Int @@ -145,58 +154,33 @@ definition: type: bigdecimal aggregate_functions: avg: - result_type: - type: nullable - underlying_type: - type: named - name: Decimal + type: average + result_type: Double count: + type: custom result_type: type: named name: Int max: - result_type: - type: nullable - underlying_type: - type: named - name: Decimal + type: max min: - result_type: - type: nullable - underlying_type: - type: named - name: Decimal + type: min sum: - result_type: - type: nullable - underlying_type: - type: named - name: Decimal + type: sum + result_type: Double comparison_operators: _eq: type: equal _gt: - type: custom - argument_type: - type: named - name: Decimal + type: greater_than _gte: - type: custom - argument_type: - type: named - name: Decimal + type: greater_than_or_equal _in: type: in _lt: - type: custom - argument_type: - type: named - name: Decimal + type: less_than _lte: - type: custom - argument_type: - type: named - name: Decimal + type: less_than_or_equal _neq: type: custom argument_type: @@ -214,58 +198,33 @@ definition: type: float64 aggregate_functions: avg: - result_type: - type: nullable - underlying_type: - type: named - name: Double + type: average + result_type: Double count: + type: custom result_type: type: named name: Int max: - result_type: - type: nullable - underlying_type: - type: named - name: Double + type: max min: - result_type: - type: nullable - underlying_type: - type: named - name: Double + type: min sum: - result_type: - type: nullable - underlying_type: - type: named - name: Double + type: sum + result_type: Double comparison_operators: _eq: type: equal _gt: - type: custom - argument_type: - type: named - name: Double + type: greater_than _gte: - type: custom - argument_type: - type: named - name: Double + type: greater_than_or_equal _in: type: in _lt: - type: custom - argument_type: - type: named - name: Double + type: less_than _lte: - type: custom - argument_type: - type: named - name: Double + type: less_than_or_equal _neq: type: custom argument_type: @@ -283,22 +242,21 @@ definition: type: json aggregate_functions: avg: + type: custom result_type: type: named name: ExtendedJSON count: + type: custom result_type: type: named name: Int max: - result_type: - type: named - name: ExtendedJSON + type: max min: - result_type: - type: named - name: ExtendedJSON + type: min sum: + type: custom result_type: type: named name: ExtendedJSON @@ -306,35 +264,20 @@ definition: _eq: type: equal _gt: - type: custom - argument_type: - type: named - name: ExtendedJSON + type: greater_than _gte: - type: custom - argument_type: - type: named - name: ExtendedJSON + type: greater_than_or_equal _in: - type: custom - argument_type: - type: named - name: ExtendedJSON + type: in _iregex: type: custom argument_type: type: named - name: String + name: Regex _lt: - type: custom - argument_type: - type: named - name: ExtendedJSON + type: less_than _lte: - type: custom - argument_type: - type: named - name: ExtendedJSON + type: less_than_or_equal _neq: type: custom argument_type: @@ -343,70 +286,47 @@ definition: _nin: type: custom argument_type: - type: named - name: ExtendedJSON + type: array + element_type: + type: named + name: ExtendedJSON _regex: type: custom argument_type: type: named - name: String + name: Regex Int: representation: type: int32 aggregate_functions: avg: - result_type: - type: nullable - underlying_type: - type: named - name: Int + type: average + result_type: Double count: + type: custom result_type: type: named name: Int max: - result_type: - type: nullable - underlying_type: - type: named - name: Int + type: max min: - result_type: - type: nullable - underlying_type: - type: named - name: Int + type: min sum: - result_type: - type: nullable - underlying_type: - type: named - name: Int + type: sum + result_type: Long comparison_operators: _eq: type: equal _gt: - type: custom - argument_type: - type: named - name: Int + type: greater_than _gte: - type: custom - argument_type: - type: named - name: Int + type: greater_than_or_equal _in: type: in _lt: - type: custom - argument_type: - type: named - name: Int + type: less_than _lte: - type: custom - argument_type: - type: named - name: Int + type: less_than_or_equal _neq: type: custom argument_type: @@ -420,15 +340,21 @@ definition: type: named name: Int Javascript: + representation: + type: string aggregate_functions: count: + type: custom result_type: type: named name: Int comparison_operators: {} JavascriptWithScope: + representation: + type: json aggregate_functions: count: + type: custom result_type: type: named name: Int @@ -438,58 +364,33 @@ definition: type: int64 aggregate_functions: avg: - result_type: - type: nullable - underlying_type: - type: named - name: Long + type: average + result_type: Double count: + type: custom result_type: type: named name: Int max: - result_type: - type: nullable - underlying_type: - type: named - name: Long + type: max min: - result_type: - type: nullable - underlying_type: - type: named - name: Long + type: min sum: - result_type: - type: nullable - underlying_type: - type: named - name: Long + type: sum + result_type: Long comparison_operators: _eq: type: equal _gt: - type: custom - argument_type: - type: named - name: Long + type: greater_than _gte: - type: custom - argument_type: - type: named - name: Long + type: greater_than_or_equal _in: type: in _lt: - type: custom - argument_type: - type: named - name: Long + type: less_than _lte: - type: custom - argument_type: - type: named - name: Long + type: less_than_or_equal _neq: type: custom argument_type: @@ -503,8 +404,11 @@ definition: type: named name: Long MaxKey: + representation: + type: json aggregate_functions: count: + type: custom result_type: type: named name: Int @@ -526,8 +430,11 @@ definition: type: named name: MaxKey MinKey: + representation: + type: json aggregate_functions: count: + type: custom result_type: type: named name: Int @@ -548,9 +455,12 @@ definition: element_type: type: named name: MinKey - "Null": + 'Null': + representation: + type: json aggregate_functions: count: + type: custom result_type: type: named name: Int @@ -563,19 +473,20 @@ definition: type: custom argument_type: type: named - name: "Null" + name: 'Null' _nin: type: custom argument_type: type: array element_type: type: named - name: "Null" + name: 'Null' ObjectId: representation: type: string aggregate_functions: count: + type: custom result_type: type: named name: Int @@ -597,8 +508,11 @@ definition: type: named name: ObjectId Regex: + representation: + type: json aggregate_functions: count: + type: custom result_type: type: named name: Int @@ -608,51 +522,32 @@ definition: type: string aggregate_functions: count: + type: custom result_type: type: named name: Int max: - result_type: - type: nullable - underlying_type: - type: named - name: String + type: max min: - result_type: - type: nullable - underlying_type: - type: named - name: String + type: min comparison_operators: _eq: type: equal _gt: - type: custom - argument_type: - type: named - name: String + type: greater_than _gte: - type: custom - argument_type: - type: named - name: String + type: greater_than_or_equal _in: type: in _iregex: type: custom argument_type: type: named - name: String + name: Regex _lt: - type: custom - argument_type: - type: named - name: String + type: less_than _lte: - type: custom - argument_type: - type: named - name: String + type: less_than_or_equal _neq: type: custom argument_type: @@ -669,10 +564,13 @@ definition: type: custom argument_type: type: named - name: String + name: Regex Symbol: + representation: + type: string aggregate_functions: count: + type: custom result_type: type: named name: Int @@ -694,48 +592,31 @@ definition: type: named name: Symbol Timestamp: + representation: + type: json aggregate_functions: count: + type: custom result_type: type: named name: Int max: - result_type: - type: nullable - underlying_type: - type: named - name: Timestamp + type: max min: - result_type: - type: nullable - underlying_type: - type: named - name: Timestamp + type: min comparison_operators: _eq: type: equal _gt: - type: custom - argument_type: - type: named - name: Timestamp + type: greater_than _gte: - type: custom - argument_type: - type: named - name: Timestamp + type: greater_than_or_equal _in: type: in _lt: - type: custom - argument_type: - type: named - name: Timestamp + type: less_than _lte: - type: custom - argument_type: - type: named - name: Timestamp + type: less_than_or_equal _neq: type: custom argument_type: @@ -749,8 +630,11 @@ definition: type: named name: Timestamp Undefined: + representation: + type: json aggregate_functions: count: + type: custom result_type: type: named name: Int @@ -784,12 +668,14 @@ definition: underlying_type: type: named name: ExtendedJSON + foreign_keys: {} Hello: fields: __value: type: type: named name: String + foreign_keys: {} comments: fields: _id: @@ -816,6 +702,7 @@ definition: type: type: named name: String + foreign_keys: {} eq_title_project: fields: _id: @@ -844,12 +731,14 @@ definition: type: type: named name: eq_title_project_what + foreign_keys: {} eq_title_project_bar: fields: foo: type: type: named name: movies_imdb + foreign_keys: {} eq_title_project_foo: fields: bar: @@ -858,18 +747,21 @@ definition: underlying_type: type: named name: movies_tomatoes_critic + foreign_keys: {} eq_title_project_what: fields: the: type: type: named name: eq_title_project_what_the + foreign_keys: {} eq_title_project_what_the: fields: heck: type: type: named name: String + foreign_keys: {} movies: fields: _id: @@ -1000,6 +892,7 @@ definition: type: type: named name: Int + foreign_keys: {} movies_awards: fields: nominations: @@ -1014,6 +907,7 @@ definition: type: type: named name: Int + foreign_keys: {} movies_imdb: fields: id: @@ -1028,6 +922,7 @@ definition: type: type: named name: Int + foreign_keys: {} movies_tomatoes: fields: boxOffice: @@ -1086,6 +981,7 @@ definition: underlying_type: type: named name: String + foreign_keys: {} movies_tomatoes_critic: fields: meter: @@ -1104,6 +1000,7 @@ definition: underlying_type: type: named name: Double + foreign_keys: {} movies_tomatoes_viewer: fields: meter: @@ -1122,6 +1019,7 @@ definition: underlying_type: type: named name: Double + foreign_keys: {} native_query_project: fields: _id: @@ -1150,12 +1048,14 @@ definition: type: type: named name: native_query_project_what + foreign_keys: {} native_query_project_bar: fields: foo: type: type: named name: movies_imdb + foreign_keys: {} native_query_project_foo: fields: bar: @@ -1164,18 +1064,21 @@ definition: underlying_type: type: named name: movies_tomatoes_critic + foreign_keys: {} native_query_project_what: fields: the: type: type: named name: native_query_project_what_the + foreign_keys: {} native_query_project_what_the: fields: heck: type: type: named name: String + foreign_keys: {} sessions: fields: _id: @@ -1190,6 +1093,7 @@ definition: type: type: named name: String + foreign_keys: {} theaters: fields: _id: @@ -1204,6 +1108,7 @@ definition: type: type: named name: Int + foreign_keys: {} theaters_location: fields: address: @@ -1214,6 +1119,7 @@ definition: type: type: named name: theaters_location_geo + foreign_keys: {} theaters_location_address: fields: city: @@ -1238,6 +1144,7 @@ definition: type: type: named name: String + foreign_keys: {} theaters_location_geo: fields: coordinates: @@ -1250,6 +1157,7 @@ definition: type: type: named name: String + foreign_keys: {} title_word_frequency_group: fields: _id: @@ -1260,6 +1168,7 @@ definition: type: type: named name: Int + foreign_keys: {} users: fields: _id: @@ -1284,116 +1193,97 @@ definition: underlying_type: type: named name: users_preferences + foreign_keys: {} users_preferences: fields: {} - collections: - - name: comments - arguments: {} - type: comments - uniqueness_constraints: - comments_id: - unique_columns: - - _id - foreign_keys: {} - - name: eq_title - arguments: - title: - type: - type: named - name: String - year: - type: - type: named - name: Int - type: eq_title_project - uniqueness_constraints: - eq_title_id: - unique_columns: - - _id - foreign_keys: {} - - name: extended_json_test_data - description: various values that all have the ExtendedJSON type - arguments: {} - type: DocWithExtendedJsonValue - uniqueness_constraints: {} - foreign_keys: {} - - name: movies - arguments: {} - type: movies - uniqueness_constraints: - movies_id: - unique_columns: - - _id - foreign_keys: {} - - name: native_query - arguments: - title: - type: - type: named - name: String - type: native_query_project - uniqueness_constraints: - native_query_id: - unique_columns: - - _id - foreign_keys: {} - - name: sessions - arguments: {} - type: sessions - uniqueness_constraints: - sessions_id: - unique_columns: - - _id - foreign_keys: {} - - name: theaters - arguments: {} - type: theaters - uniqueness_constraints: - theaters_id: - unique_columns: - - _id - foreign_keys: {} - - name: title_word_frequency - arguments: {} - type: title_word_frequency_group - uniqueness_constraints: - title_word_frequency_id: - unique_columns: - - _id - foreign_keys: {} - - name: users - arguments: {} - type: users - uniqueness_constraints: - users_id: - unique_columns: - - _id foreign_keys: {} + collections: + - name: comments + arguments: {} + type: comments + uniqueness_constraints: + comments_id: + unique_columns: + - _id + - name: eq_title + arguments: + title: + type: + type: named + name: String + year: + type: + type: named + name: Int + type: eq_title_project + uniqueness_constraints: + eq_title_id: + unique_columns: + - _id + - name: extended_json_test_data + description: various values that all have the ExtendedJSON type + arguments: {} + type: DocWithExtendedJsonValue + uniqueness_constraints: {} + - name: movies + arguments: {} + type: movies + uniqueness_constraints: + movies_id: + unique_columns: + - _id + - name: native_query + arguments: + title: + type: + type: named + name: String + type: native_query_project + uniqueness_constraints: + native_query_id: + unique_columns: + - _id + - name: sessions + arguments: {} + type: sessions + uniqueness_constraints: + sessions_id: + unique_columns: + - _id + - name: theaters + arguments: {} + type: theaters + uniqueness_constraints: + theaters_id: + unique_columns: + - _id + - name: title_word_frequency + arguments: {} + type: title_word_frequency_group + uniqueness_constraints: + title_word_frequency_id: + unique_columns: + - _id + - name: users + arguments: {} + type: users + uniqueness_constraints: + users_id: + unique_columns: + - _id functions: - - name: hello - description: Basic test of native queries - arguments: - name: - type: - type: named - name: String - result_type: - type: named - name: String + - name: hello + description: Basic test of native queries + arguments: + name: + type: + type: named + name: String + result_type: + type: named + name: String procedures: [] - capabilities: - version: 0.1.6 capabilities: query: - aggregates: {} - variables: {} - explain: {} - nested_fields: - filter_by: {} - order_by: {} - aggregates: {} - exists: - nested_collections: {} - mutation: {} - relationships: - relation_comparisons: {} + aggregates: + count_scalar_type: Int diff --git a/fixtures/hasura/app/metadata/test_cases-types.hml b/fixtures/hasura/app/metadata/test_cases-types.hml deleted file mode 100644 index 440117db..00000000 --- a/fixtures/hasura/app/metadata/test_cases-types.hml +++ /dev/null @@ -1,99 +0,0 @@ ---- -kind: ScalarType -version: v1 -definition: - name: ObjectId_2 - graphql: - typeName: ObjectId2 - ---- -kind: BooleanExpressionType -version: v1 -definition: - name: ObjectIdBoolExp_2 - operand: - scalar: - type: ObjectId_2 - comparisonOperators: - - name: _eq - argumentType: ObjectId_2! - - name: _in - argumentType: "[ObjectId_2!]!" - - name: _neq - argumentType: ObjectId_2! - - name: _nin - argumentType: "[ObjectId_2!]!" - dataConnectorOperatorMapping: - - dataConnectorName: test_cases - dataConnectorScalarType: ObjectId - operatorMapping: {} - logicalOperators: - enable: true - isNull: - enable: true - graphql: - typeName: ObjectIdBoolExp2 - ---- -kind: DataConnectorScalarRepresentation -version: v1 -definition: - dataConnectorName: test_cases - dataConnectorScalarType: ObjectId - representation: ObjectId_2 - graphql: - comparisonExpressionTypeName: ObjectId2ComparisonExp - ---- -kind: DataConnectorScalarRepresentation -version: v1 -definition: - dataConnectorName: test_cases - dataConnectorScalarType: String - representation: String - graphql: - comparisonExpressionTypeName: StringComparisonExp_2 - ---- -kind: DataConnectorScalarRepresentation -version: v1 -definition: - dataConnectorName: test_cases - dataConnectorScalarType: Int - representation: Int - graphql: - comparisonExpressionTypeName: IntComparisonExp_2 - ---- -kind: AggregateExpression -version: v1 -definition: - name: ObjectIdAggExp_2 - operand: - scalar: - aggregatedType: ObjectId_2 - aggregationFunctions: - - name: count - returnType: Int! - dataConnectorAggregationFunctionMapping: - - dataConnectorName: test_cases - dataConnectorScalarType: ObjectId - functionMapping: - count: - name: count - count: - enable: true - countDistinct: - enable: true - graphql: - selectTypeName: ObjectIdAggExp2 - ---- -kind: DataConnectorScalarRepresentation -version: v1 -definition: - dataConnectorName: test_cases - dataConnectorScalarType: Double - representation: Double - graphql: - comparisonExpressionTypeName: DoubleComparisonExp diff --git a/fixtures/hasura/app/metadata/test_cases.hml b/fixtures/hasura/app/metadata/test_cases.hml index 8ade514b..eaf77cf0 100644 --- a/fixtures/hasura/app/metadata/test_cases.hml +++ b/fixtures/hasura/app/metadata/test_cases.hml @@ -9,12 +9,36 @@ definition: write: valueFromEnv: APP_TEST_CASES_WRITE_URL schema: - version: v0.1 + version: v0.2 + capabilities: + version: 0.2.0 + capabilities: + query: + aggregates: {} + variables: {} + explain: {} + nested_fields: + filter_by: + nested_arrays: + contains: {} + is_empty: {} + order_by: {} + aggregates: {} + nested_collections: {} + exists: + unrelated: {} + nested_collections: {} + mutation: {} + relationships: + relation_comparisons: {} schema: scalar_types: BinData: + representation: + type: json aggregate_functions: count: + type: custom result_type: type: named name: Int @@ -40,6 +64,7 @@ definition: type: boolean aggregate_functions: count: + type: custom result_type: type: named name: Int @@ -65,46 +90,27 @@ definition: type: timestamp aggregate_functions: count: + type: custom result_type: type: named name: Int max: - result_type: - type: nullable - underlying_type: - type: named - name: Date + type: max min: - result_type: - type: nullable - underlying_type: - type: named - name: Date + type: min comparison_operators: _eq: type: equal _gt: - type: custom - argument_type: - type: named - name: Date + type: greater_than _gte: - type: custom - argument_type: - type: named - name: Date + type: greater_than_or_equal _in: type: in _lt: - type: custom - argument_type: - type: named - name: Date + type: less_than _lte: - type: custom - argument_type: - type: named - name: Date + type: less_than_or_equal _neq: type: custom argument_type: @@ -118,8 +124,11 @@ definition: type: named name: Date DbPointer: + representation: + type: json aggregate_functions: count: + type: custom result_type: type: named name: Int @@ -145,58 +154,33 @@ definition: type: bigdecimal aggregate_functions: avg: - result_type: - type: nullable - underlying_type: - type: named - name: Decimal + type: average + result_type: Double count: + type: custom result_type: type: named name: Int max: - result_type: - type: nullable - underlying_type: - type: named - name: Decimal + type: max min: - result_type: - type: nullable - underlying_type: - type: named - name: Decimal + type: min sum: - result_type: - type: nullable - underlying_type: - type: named - name: Decimal + type: sum + result_type: Double comparison_operators: _eq: type: equal _gt: - type: custom - argument_type: - type: named - name: Decimal + type: greater_than _gte: - type: custom - argument_type: - type: named - name: Decimal + type: greater_than_or_equal _in: type: in _lt: - type: custom - argument_type: - type: named - name: Decimal + type: less_than _lte: - type: custom - argument_type: - type: named - name: Decimal + type: less_than_or_equal _neq: type: custom argument_type: @@ -214,58 +198,33 @@ definition: type: float64 aggregate_functions: avg: - result_type: - type: nullable - underlying_type: - type: named - name: Double + type: average + result_type: Double count: + type: custom result_type: type: named name: Int max: - result_type: - type: nullable - underlying_type: - type: named - name: Double + type: max min: - result_type: - type: nullable - underlying_type: - type: named - name: Double + type: min sum: - result_type: - type: nullable - underlying_type: - type: named - name: Double + type: sum + result_type: Double comparison_operators: _eq: type: equal _gt: - type: custom - argument_type: - type: named - name: Double + type: greater_than _gte: - type: custom - argument_type: - type: named - name: Double + type: greater_than_or_equal _in: type: in _lt: - type: custom - argument_type: - type: named - name: Double + type: less_than _lte: - type: custom - argument_type: - type: named - name: Double + type: less_than_or_equal _neq: type: custom argument_type: @@ -283,22 +242,21 @@ definition: type: json aggregate_functions: avg: + type: custom result_type: type: named name: ExtendedJSON count: + type: custom result_type: type: named name: Int max: - result_type: - type: named - name: ExtendedJSON + type: max min: - result_type: - type: named - name: ExtendedJSON + type: min sum: + type: custom result_type: type: named name: ExtendedJSON @@ -306,35 +264,20 @@ definition: _eq: type: equal _gt: - type: custom - argument_type: - type: named - name: ExtendedJSON + type: greater_than _gte: - type: custom - argument_type: - type: named - name: ExtendedJSON + type: greater_than_or_equal _in: - type: custom - argument_type: - type: named - name: ExtendedJSON + type: in _iregex: type: custom argument_type: type: named - name: String + name: Regex _lt: - type: custom - argument_type: - type: named - name: ExtendedJSON + type: less_than _lte: - type: custom - argument_type: - type: named - name: ExtendedJSON + type: less_than_or_equal _neq: type: custom argument_type: @@ -343,70 +286,47 @@ definition: _nin: type: custom argument_type: - type: named - name: ExtendedJSON + type: array + element_type: + type: named + name: ExtendedJSON _regex: type: custom argument_type: type: named - name: String + name: Regex Int: representation: type: int32 aggregate_functions: avg: - result_type: - type: nullable - underlying_type: - type: named - name: Int + type: average + result_type: Double count: + type: custom result_type: type: named name: Int max: - result_type: - type: nullable - underlying_type: - type: named - name: Int + type: max min: - result_type: - type: nullable - underlying_type: - type: named - name: Int + type: min sum: - result_type: - type: nullable - underlying_type: - type: named - name: Int + type: sum + result_type: Long comparison_operators: _eq: type: equal _gt: - type: custom - argument_type: - type: named - name: Int + type: greater_than _gte: - type: custom - argument_type: - type: named - name: Int + type: greater_than_or_equal _in: type: in _lt: - type: custom - argument_type: - type: named - name: Int + type: less_than _lte: - type: custom - argument_type: - type: named - name: Int + type: less_than_or_equal _neq: type: custom argument_type: @@ -420,15 +340,21 @@ definition: type: named name: Int Javascript: + representation: + type: string aggregate_functions: count: + type: custom result_type: type: named name: Int comparison_operators: {} JavascriptWithScope: + representation: + type: json aggregate_functions: count: + type: custom result_type: type: named name: Int @@ -438,58 +364,33 @@ definition: type: int64 aggregate_functions: avg: - result_type: - type: nullable - underlying_type: - type: named - name: Long + type: average + result_type: Double count: + type: custom result_type: type: named name: Int max: - result_type: - type: nullable - underlying_type: - type: named - name: Long + type: max min: - result_type: - type: nullable - underlying_type: - type: named - name: Long + type: min sum: - result_type: - type: nullable - underlying_type: - type: named - name: Long + type: sum + result_type: Long comparison_operators: _eq: type: equal _gt: - type: custom - argument_type: - type: named - name: Long + type: greater_than _gte: - type: custom - argument_type: - type: named - name: Long + type: greater_than_or_equal _in: type: in _lt: - type: custom - argument_type: - type: named - name: Long + type: less_than _lte: - type: custom - argument_type: - type: named - name: Long + type: less_than_or_equal _neq: type: custom argument_type: @@ -503,8 +404,11 @@ definition: type: named name: Long MaxKey: + representation: + type: json aggregate_functions: count: + type: custom result_type: type: named name: Int @@ -526,8 +430,11 @@ definition: type: named name: MaxKey MinKey: + representation: + type: json aggregate_functions: count: + type: custom result_type: type: named name: Int @@ -548,9 +455,12 @@ definition: element_type: type: named name: MinKey - "Null": + 'Null': + representation: + type: json aggregate_functions: count: + type: custom result_type: type: named name: Int @@ -563,19 +473,20 @@ definition: type: custom argument_type: type: named - name: "Null" + name: 'Null' _nin: type: custom argument_type: type: array element_type: type: named - name: "Null" + name: 'Null' ObjectId: representation: type: string aggregate_functions: count: + type: custom result_type: type: named name: Int @@ -597,8 +508,11 @@ definition: type: named name: ObjectId Regex: + representation: + type: json aggregate_functions: count: + type: custom result_type: type: named name: Int @@ -608,51 +522,32 @@ definition: type: string aggregate_functions: count: + type: custom result_type: type: named name: Int max: - result_type: - type: nullable - underlying_type: - type: named - name: String + type: max min: - result_type: - type: nullable - underlying_type: - type: named - name: String + type: min comparison_operators: _eq: type: equal _gt: - type: custom - argument_type: - type: named - name: String + type: greater_than _gte: - type: custom - argument_type: - type: named - name: String + type: greater_than_or_equal _in: type: in _iregex: type: custom argument_type: type: named - name: String + name: Regex _lt: - type: custom - argument_type: - type: named - name: String + type: less_than _lte: - type: custom - argument_type: - type: named - name: String + type: less_than_or_equal _neq: type: custom argument_type: @@ -669,10 +564,13 @@ definition: type: custom argument_type: type: named - name: String + name: Regex Symbol: + representation: + type: string aggregate_functions: count: + type: custom result_type: type: named name: Int @@ -694,48 +592,31 @@ definition: type: named name: Symbol Timestamp: + representation: + type: json aggregate_functions: count: + type: custom result_type: type: named name: Int max: - result_type: - type: nullable - underlying_type: - type: named - name: Timestamp + type: max min: - result_type: - type: nullable - underlying_type: - type: named - name: Timestamp + type: min comparison_operators: _eq: type: equal _gt: - type: custom - argument_type: - type: named - name: Timestamp + type: greater_than _gte: - type: custom - argument_type: - type: named - name: Timestamp + type: greater_than_or_equal _in: type: in _lt: - type: custom - argument_type: - type: named - name: Timestamp + type: less_than _lte: - type: custom - argument_type: - type: named - name: Timestamp + type: less_than_or_equal _neq: type: custom argument_type: @@ -749,8 +630,11 @@ definition: type: named name: Timestamp Undefined: + representation: + type: json aggregate_functions: count: + type: custom result_type: type: named name: Int @@ -772,6 +656,49 @@ definition: type: named name: Undefined object_types: + departments: + fields: + _id: + type: + type: named + name: ObjectId + description: + type: + type: named + name: String + foreign_keys: {} + schools: + fields: + _id: + type: + type: named + name: ObjectId + departments: + type: + type: named + name: schools_departments + name: + type: + type: named + name: String + foreign_keys: {} + schools_departments: + fields: + english_department_id: + type: + type: named + name: ObjectId + math_department_id: + type: + type: named + name: ObjectId + description: + type: + type: nullable + underlying_type: + type: named + name: String + foreign_keys: {} nested_collection: fields: _id: @@ -788,12 +715,14 @@ definition: element_type: type: named name: nested_collection_staff + foreign_keys: {} nested_collection_staff: fields: name: type: type: named name: String + foreign_keys: {} nested_field_with_dollar: fields: _id: @@ -804,6 +733,7 @@ definition: type: type: named name: nested_field_with_dollar_configuration + foreign_keys: {} nested_field_with_dollar_configuration: fields: $schema: @@ -812,6 +742,7 @@ definition: underlying_type: type: named name: String + foreign_keys: {} weird_field_names: fields: $invalid.array: @@ -836,64 +767,67 @@ definition: type: type: named name: weird_field_names_valid_object_name + foreign_keys: {} weird_field_names_$invalid.array: fields: $invalid.element: type: type: named name: Int + foreign_keys: {} weird_field_names_$invalid.object.name: fields: valid_name: type: type: named name: Int + foreign_keys: {} weird_field_names_valid_object_name: fields: $invalid.nested.name: type: type: named name: Int - collections: - - name: nested_collection - arguments: {} - type: nested_collection - uniqueness_constraints: - nested_collection_id: - unique_columns: - - _id - foreign_keys: {} - - name: nested_field_with_dollar - arguments: {} - type: nested_field_with_dollar - uniqueness_constraints: - nested_field_with_dollar_id: - unique_columns: - - _id - foreign_keys: {} - - name: weird_field_names - arguments: {} - type: weird_field_names - uniqueness_constraints: - weird_field_names_id: - unique_columns: - - _id foreign_keys: {} + collections: + - name: departments + arguments: {} + type: departments + uniqueness_constraints: + nested_field_with_dollar_id: + unique_columns: + - _id + - name: schools + arguments: {} + type: schools + uniqueness_constraints: + nested_field_with_dollar_id: + unique_columns: + - _id + - name: nested_collection + arguments: {} + type: nested_collection + uniqueness_constraints: + nested_collection_id: + unique_columns: + - _id + - name: nested_field_with_dollar + arguments: {} + type: nested_field_with_dollar + uniqueness_constraints: + nested_field_with_dollar_id: + unique_columns: + - _id + - name: weird_field_names + arguments: {} + type: weird_field_names + uniqueness_constraints: + weird_field_names_id: + unique_columns: + - _id functions: [] procedures: [] - capabilities: - version: 0.1.6 capabilities: query: - aggregates: {} - variables: {} - explain: {} - nested_fields: - filter_by: {} - order_by: {} - aggregates: {} - exists: - nested_collections: {} - mutation: {} - relationships: - relation_comparisons: {} + aggregates: + count_scalar_type: Int diff --git a/fixtures/hasura/app/metadata/types/date.hml b/fixtures/hasura/app/metadata/types/date.hml new file mode 100644 index 00000000..fc3cdceb --- /dev/null +++ b/fixtures/hasura/app/metadata/types/date.hml @@ -0,0 +1,85 @@ +--- +kind: ScalarType +version: v1 +definition: + name: Date + graphql: + typeName: Date + +--- +kind: BooleanExpressionType +version: v1 +definition: + name: DateBoolExp + operand: + scalar: + type: Date + comparisonOperators: + - name: _eq + argumentType: Date! + - name: _gt + argumentType: Date! + - name: _gte + argumentType: Date! + - name: _in + argumentType: "[Date!]!" + - name: _lt + argumentType: Date! + - name: _lte + argumentType: Date! + - name: _neq + argumentType: Date! + - name: _nin + argumentType: "[Date!]!" + dataConnectorOperatorMapping: + - dataConnectorName: sample_mflix + dataConnectorScalarType: Date + operatorMapping: {} + logicalOperators: + enable: true + isNull: + enable: true + graphql: + typeName: DateBoolExp + +--- +kind: DataConnectorScalarRepresentation +version: v1 +definition: + dataConnectorName: sample_mflix + dataConnectorScalarType: Date + representation: Date + graphql: + comparisonExpressionTypeName: DateComparisonExp + +--- +kind: AggregateExpression +version: v1 +definition: + name: DateAggExp + operand: + scalar: + aggregatedType: Date + aggregationFunctions: + - name: count + returnType: Int! + - name: max + returnType: Date + - name: min + returnType: Date + dataConnectorAggregationFunctionMapping: + - dataConnectorName: sample_mflix + dataConnectorScalarType: Date + functionMapping: + count: + name: count + max: + name: max + min: + name: min + count: + enable: true + countDistinct: + enable: true + graphql: + selectTypeName: DateAggExp diff --git a/fixtures/hasura/app/metadata/chinook-types.hml b/fixtures/hasura/app/metadata/types/decimal.hml similarity index 52% rename from fixtures/hasura/app/metadata/chinook-types.hml rename to fixtures/hasura/app/metadata/types/decimal.hml index ef109d7b..4a30e020 100644 --- a/fixtures/hasura/app/metadata/chinook-types.hml +++ b/fixtures/hasura/app/metadata/types/decimal.hml @@ -2,99 +2,39 @@ kind: ScalarType version: v1 definition: - name: ObjectId_1 - graphql: - typeName: ObjectId1 - ---- -kind: BooleanExpressionType -version: v1 -definition: - name: ObjectIdBoolExp_1 - operand: - scalar: - type: ObjectId_1 - comparisonOperators: - - name: _eq - argumentType: ObjectId_1! - - name: _in - argumentType: "[ObjectId_1!]!" - - name: _neq - argumentType: ObjectId_1! - - name: _nin - argumentType: "[ObjectId_1!]!" - dataConnectorOperatorMapping: - - dataConnectorName: chinook - dataConnectorScalarType: ObjectId - operatorMapping: {} - logicalOperators: - enable: true - isNull: - enable: true + name: Decimal graphql: - typeName: ObjectIdBoolExp1 + typeName: Decimal --- kind: DataConnectorScalarRepresentation version: v1 definition: dataConnectorName: chinook - dataConnectorScalarType: ObjectId - representation: ObjectId_1 + dataConnectorScalarType: Decimal + representation: Decimal graphql: - comparisonExpressionTypeName: ObjectId1ComparisonExp + comparisonExpressionTypeName: DecimalComparisonExp --- kind: DataConnectorScalarRepresentation version: v1 definition: - dataConnectorName: chinook - dataConnectorScalarType: Int - representation: Int + dataConnectorName: sample_mflix + dataConnectorScalarType: Decimal + representation: Decimal graphql: - comparisonExpressionTypeName: IntComparisonExp_1 + comparisonExpressionTypeName: DecimalComparisonExp --- kind: DataConnectorScalarRepresentation version: v1 definition: - dataConnectorName: chinook - dataConnectorScalarType: String - representation: String - graphql: - comparisonExpressionTypeName: StringComparisonExp_1 - ---- -kind: AggregateExpression -version: v1 -definition: - name: ObjectIdAggExp_1 - operand: - scalar: - aggregatedType: ObjectId_1 - aggregationFunctions: - - name: count - returnType: Int! - dataConnectorAggregationFunctionMapping: - - dataConnectorName: chinook - dataConnectorScalarType: ObjectId - functionMapping: - count: - name: count - count: - enable: true - countDistinct: - enable: true - graphql: - selectTypeName: ObjectIdAggExp1 - ---- -kind: ScalarType -version: v1 -definition: - name: Decimal + dataConnectorName: test_cases + dataConnectorScalarType: Decimal + representation: Decimal graphql: - typeName: Decimal + comparisonExpressionTypeName: DecimalComparisonExp --- kind: BooleanExpressionType @@ -132,16 +72,6 @@ definition: graphql: typeName: DecimalBoolExp ---- -kind: DataConnectorScalarRepresentation -version: v1 -definition: - dataConnectorName: chinook - dataConnectorScalarType: Decimal - representation: Decimal - graphql: - comparisonExpressionTypeName: DecimalComparisonExp - --- kind: AggregateExpression version: v1 @@ -152,7 +82,7 @@ definition: aggregatedType: Decimal aggregationFunctions: - name: avg - returnType: Decimal + returnType: Double - name: count returnType: Int! - name: max @@ -160,7 +90,7 @@ definition: - name: min returnType: Decimal - name: sum - returnType: Decimal + returnType: Double dataConnectorAggregationFunctionMapping: - dataConnectorName: chinook dataConnectorScalarType: Decimal @@ -175,20 +105,35 @@ definition: name: min sum: name: sum + - dataConnectorName: sample_mflix + dataConnectorScalarType: Decimal + functionMapping: + avg: + name: avg + count: + name: count + max: + name: max + min: + name: min + sum: + name: sum + - dataConnectorName: test_cases + dataConnectorScalarType: Decimal + functionMapping: + avg: + name: avg + count: + name: count + max: + name: max + min: + name: min + sum: + name: sum count: enable: true countDistinct: enable: true graphql: selectTypeName: DecimalAggExp - ---- -kind: DataConnectorScalarRepresentation -version: v1 -definition: - dataConnectorName: chinook - dataConnectorScalarType: Double - representation: Double - graphql: - comparisonExpressionTypeName: DoubleComparisonExp - diff --git a/fixtures/hasura/app/metadata/types/double.hml b/fixtures/hasura/app/metadata/types/double.hml new file mode 100644 index 00000000..8d9ca0bc --- /dev/null +++ b/fixtures/hasura/app/metadata/types/double.hml @@ -0,0 +1,142 @@ +--- +kind: ScalarType +version: v1 +definition: + name: Double + graphql: + typeName: Double + +--- +kind: DataConnectorScalarRepresentation +version: v1 +definition: + dataConnectorName: chinook + dataConnectorScalarType: Double + representation: Double + graphql: + comparisonExpressionTypeName: DoubleComparisonExp + +--- +kind: DataConnectorScalarRepresentation +version: v1 +definition: + dataConnectorName: sample_mflix + dataConnectorScalarType: Double + representation: Double + graphql: + comparisonExpressionTypeName: DoubleComparisonExp + +--- +kind: DataConnectorScalarRepresentation +version: v1 +definition: + dataConnectorName: test_cases + dataConnectorScalarType: Double + representation: Double + graphql: + comparisonExpressionTypeName: DoubleComparisonExp + +--- +kind: BooleanExpressionType +version: v1 +definition: + name: DoubleBoolExp + operand: + scalar: + type: Double + comparisonOperators: + - name: _eq + argumentType: Double! + - name: _gt + argumentType: Double! + - name: _gte + argumentType: Double! + - name: _in + argumentType: "[Double!]!" + - name: _lt + argumentType: Double! + - name: _lte + argumentType: Double! + - name: _neq + argumentType: Double! + - name: _nin + argumentType: "[Double!]!" + dataConnectorOperatorMapping: + - dataConnectorName: sample_mflix + dataConnectorScalarType: Double + operatorMapping: {} + - dataConnectorName: chinook + dataConnectorScalarType: Double + operatorMapping: {} + logicalOperators: + enable: true + isNull: + enable: true + graphql: + typeName: DoubleBoolExp + +--- +kind: AggregateExpression +version: v1 +definition: + name: DoubleAggExp + operand: + scalar: + aggregatedType: Double + aggregationFunctions: + - name: avg + returnType: Double + - name: count + returnType: Int! + - name: max + returnType: Double + - name: min + returnType: Double + - name: sum + returnType: Double + dataConnectorAggregationFunctionMapping: + - dataConnectorName: sample_mflix + dataConnectorScalarType: Double + functionMapping: + avg: + name: avg + count: + name: count + max: + name: max + min: + name: min + sum: + name: sum + - dataConnectorName: chinook + dataConnectorScalarType: Double + functionMapping: + avg: + name: avg + count: + name: count + max: + name: max + min: + name: min + sum: + name: sum + - dataConnectorName: test_cases + dataConnectorScalarType: Double + functionMapping: + avg: + name: avg + count: + name: count + max: + name: max + min: + name: min + sum: + name: sum + count: + enable: true + countDistinct: + enable: true + graphql: + selectTypeName: DoubleAggExp diff --git a/fixtures/hasura/app/metadata/types/extendedJSON.hml b/fixtures/hasura/app/metadata/types/extendedJSON.hml new file mode 100644 index 00000000..fad40c22 --- /dev/null +++ b/fixtures/hasura/app/metadata/types/extendedJSON.hml @@ -0,0 +1,97 @@ +--- +kind: ScalarType +version: v1 +definition: + name: ExtendedJson + graphql: + typeName: ExtendedJson + +--- +kind: BooleanExpressionType +version: v1 +definition: + name: ExtendedJsonBoolExp + operand: + scalar: + type: ExtendedJson + comparisonOperators: + - name: _eq + argumentType: ExtendedJson! + - name: _gt + argumentType: ExtendedJson! + - name: _gte + argumentType: ExtendedJson! + - name: _in + argumentType: ExtendedJson! + - name: _iregex + argumentType: String! + - name: _lt + argumentType: ExtendedJson! + - name: _lte + argumentType: ExtendedJson! + - name: _neq + argumentType: ExtendedJson! + - name: _nin + argumentType: ExtendedJson! + - name: _regex + argumentType: String! + dataConnectorOperatorMapping: + - dataConnectorName: sample_mflix + dataConnectorScalarType: ExtendedJSON + operatorMapping: {} + logicalOperators: + enable: true + isNull: + enable: true + graphql: + typeName: ExtendedJsonBoolExp + +--- +kind: DataConnectorScalarRepresentation +version: v1 +definition: + dataConnectorName: sample_mflix + dataConnectorScalarType: ExtendedJSON + representation: ExtendedJson + graphql: + comparisonExpressionTypeName: ExtendedJsonComparisonExp + +--- +kind: AggregateExpression +version: v1 +definition: + name: ExtendedJsonAggExp + operand: + scalar: + aggregatedType: ExtendedJson + aggregationFunctions: + - name: avg + returnType: ExtendedJson! + - name: count + returnType: Int! + - name: max + returnType: ExtendedJson! + - name: min + returnType: ExtendedJson! + - name: sum + returnType: ExtendedJson! + dataConnectorAggregationFunctionMapping: + - dataConnectorName: sample_mflix + dataConnectorScalarType: ExtendedJSON + functionMapping: + avg: + name: avg + count: + name: count + max: + name: max + min: + name: min + sum: + name: sum + count: + enable: true + countDistinct: + enable: true + graphql: + selectTypeName: ExtendedJsonAggExp diff --git a/fixtures/hasura/app/metadata/types/int.hml b/fixtures/hasura/app/metadata/types/int.hml new file mode 100644 index 00000000..88d6333b --- /dev/null +++ b/fixtures/hasura/app/metadata/types/int.hml @@ -0,0 +1,137 @@ +--- +kind: DataConnectorScalarRepresentation +version: v1 +definition: + dataConnectorName: chinook + dataConnectorScalarType: Int + representation: Int + graphql: + comparisonExpressionTypeName: IntComparisonExp + +--- +kind: DataConnectorScalarRepresentation +version: v1 +definition: + dataConnectorName: sample_mflix + dataConnectorScalarType: Int + representation: Int + graphql: + comparisonExpressionTypeName: IntComparisonExp + +--- +kind: DataConnectorScalarRepresentation +version: v1 +definition: + dataConnectorName: test_cases + dataConnectorScalarType: Int + representation: Int + graphql: + comparisonExpressionTypeName: IntComparisonExp + +--- +kind: BooleanExpressionType +version: v1 +definition: + name: IntBoolExp + operand: + scalar: + type: Int + comparisonOperators: + - name: _eq + argumentType: Int! + - name: _gt + argumentType: Int! + - name: _gte + argumentType: Int! + - name: _in + argumentType: "[Int!]!" + - name: _lt + argumentType: Int! + - name: _lte + argumentType: Int! + - name: _neq + argumentType: Int! + - name: _nin + argumentType: "[Int!]!" + dataConnectorOperatorMapping: + - dataConnectorName: sample_mflix + dataConnectorScalarType: Int + operatorMapping: {} + - dataConnectorName: chinook + dataConnectorScalarType: Int + operatorMapping: {} + - dataConnectorName: test_cases + dataConnectorScalarType: Int + operatorMapping: {} + logicalOperators: + enable: true + isNull: + enable: true + graphql: + typeName: IntBoolExp + +--- +kind: AggregateExpression +version: v1 +definition: + name: IntAggExp + operand: + scalar: + aggregatedType: Int + aggregationFunctions: + - name: avg + returnType: Double + - name: count + returnType: Int! + - name: max + returnType: Int + - name: min + returnType: Int + - name: sum + returnType: Long + dataConnectorAggregationFunctionMapping: + - dataConnectorName: sample_mflix + dataConnectorScalarType: Int + functionMapping: + avg: + name: avg + count: + name: count + max: + name: max + min: + name: min + sum: + name: sum + - dataConnectorName: chinook + dataConnectorScalarType: Int + functionMapping: + avg: + name: avg + count: + name: count + max: + name: max + min: + name: min + sum: + name: sum + - dataConnectorName: test_cases + dataConnectorScalarType: Int + functionMapping: + avg: + name: avg + count: + name: count + max: + name: max + min: + name: min + sum: + name: sum + count: + enable: true + countDistinct: + enable: true + graphql: + selectTypeName: IntAggExp diff --git a/fixtures/hasura/app/metadata/types/long.hml b/fixtures/hasura/app/metadata/types/long.hml new file mode 100644 index 00000000..68f08e76 --- /dev/null +++ b/fixtures/hasura/app/metadata/types/long.hml @@ -0,0 +1,145 @@ +--- +kind: ScalarType +version: v1 +definition: + name: Long + graphql: + typeName: Long + +--- +kind: DataConnectorScalarRepresentation +version: v1 +definition: + dataConnectorName: chinook + dataConnectorScalarType: Long + representation: Long + graphql: + comparisonExpressionTypeName: LongComparisonExp + +--- +kind: DataConnectorScalarRepresentation +version: v1 +definition: + dataConnectorName: sample_mflix + dataConnectorScalarType: Long + representation: Long + graphql: + comparisonExpressionTypeName: LongComparisonExp + +--- +kind: DataConnectorScalarRepresentation +version: v1 +definition: + dataConnectorName: test_cases + dataConnectorScalarType: Long + representation: Long + graphql: + comparisonExpressionTypeName: LongComparisonExp + +--- +kind: BooleanExpressionType +version: v1 +definition: + name: LongBoolExp + operand: + scalar: + type: Long + comparisonOperators: + - name: _eq + argumentType: Long! + - name: _gt + argumentType: Long! + - name: _gte + argumentType: Long! + - name: _in + argumentType: "[Long!]!" + - name: _lt + argumentType: Long! + - name: _lte + argumentType: Long! + - name: _neq + argumentType: Long! + - name: _nin + argumentType: "[Long!]!" + dataConnectorOperatorMapping: + - dataConnectorName: sample_mflix + dataConnectorScalarType: Long + operatorMapping: {} + - dataConnectorName: chinook + dataConnectorScalarType: Long + operatorMapping: {} + - dataConnectorName: test_cases + dataConnectorScalarType: Long + operatorMapping: {} + logicalOperators: + enable: true + isNull: + enable: true + graphql: + typeName: LongBoolExp + +--- +kind: AggregateExpression +version: v1 +definition: + name: LongAggExp + operand: + scalar: + aggregatedType: Long + aggregationFunctions: + - name: avg + returnType: Double + - name: count + returnType: Int! + - name: max + returnType: Long + - name: min + returnType: Long + - name: sum + returnType: Long + dataConnectorAggregationFunctionMapping: + - dataConnectorName: sample_mflix + dataConnectorScalarType: Long + functionMapping: + avg: + name: avg + count: + name: count + max: + name: max + min: + name: min + sum: + name: sum + - dataConnectorName: chinook + dataConnectorScalarType: Long + functionMapping: + avg: + name: avg + count: + name: count + max: + name: max + min: + name: min + sum: + name: sum + - dataConnectorName: test_cases + dataConnectorScalarType: Long + functionMapping: + avg: + name: avg + count: + name: count + max: + name: max + min: + name: min + sum: + name: sum + count: + enable: true + countDistinct: + enable: true + graphql: + selectTypeName: LongAggExp diff --git a/fixtures/hasura/app/metadata/types/objectId.hml b/fixtures/hasura/app/metadata/types/objectId.hml new file mode 100644 index 00000000..80647c95 --- /dev/null +++ b/fixtures/hasura/app/metadata/types/objectId.hml @@ -0,0 +1,104 @@ +--- +kind: ScalarType +version: v1 +definition: + name: ObjectId + graphql: + typeName: ObjectId + +--- +kind: DataConnectorScalarRepresentation +version: v1 +definition: + dataConnectorName: chinook + dataConnectorScalarType: ObjectId + representation: ObjectId + graphql: + comparisonExpressionTypeName: ObjectIdComparisonExp + +--- +kind: DataConnectorScalarRepresentation +version: v1 +definition: + dataConnectorName: sample_mflix + dataConnectorScalarType: ObjectId + representation: ObjectId + graphql: + comparisonExpressionTypeName: ObjectIdComparisonExp + +kind: DataConnectorScalarRepresentation +version: v1 +definition: + dataConnectorName: test_cases + dataConnectorScalarType: ObjectId + representation: ObjectId + graphql: + comparisonExpressionTypeName: ObjectIdComparisonExp + +--- +kind: BooleanExpressionType +version: v1 +definition: + name: ObjectIdBoolExp + operand: + scalar: + type: ObjectId + comparisonOperators: + - name: _eq + argumentType: ObjectId! + - name: _in + argumentType: "[ObjectId!]!" + - name: _neq + argumentType: ObjectId! + - name: _nin + argumentType: "[ObjectId!]!" + dataConnectorOperatorMapping: + - dataConnectorName: chinook + dataConnectorScalarType: ObjectId + operatorMapping: {} + - dataConnectorName: sample_mflix + dataConnectorScalarType: ObjectId + operatorMapping: {} + - dataConnectorName: test_cases + dataConnectorScalarType: ObjectId + operatorMapping: {} + logicalOperators: + enable: true + isNull: + enable: true + graphql: + typeName: ObjectIdBoolExp + +--- +kind: AggregateExpression +version: v1 +definition: + name: ObjectIdAggExp + operand: + scalar: + aggregatedType: ObjectId + aggregationFunctions: + - name: count + returnType: Int! + dataConnectorAggregationFunctionMapping: + - dataConnectorName: chinook + dataConnectorScalarType: ObjectId + functionMapping: + count: + name: count + - dataConnectorName: sample_mflix + dataConnectorScalarType: ObjectId + functionMapping: + count: + name: count + - dataConnectorName: test_cases + dataConnectorScalarType: ObjectId + functionMapping: + count: + name: count + count: + enable: true + countDistinct: + enable: true + graphql: + selectTypeName: ObjectIdAggExp diff --git a/fixtures/hasura/app/metadata/types/string.hml b/fixtures/hasura/app/metadata/types/string.hml new file mode 100644 index 00000000..54d1047e --- /dev/null +++ b/fixtures/hasura/app/metadata/types/string.hml @@ -0,0 +1,125 @@ +--- +kind: DataConnectorScalarRepresentation +version: v1 +definition: + dataConnectorName: chinook + dataConnectorScalarType: String + representation: String + graphql: + comparisonExpressionTypeName: StringComparisonExp + +--- +kind: DataConnectorScalarRepresentation +version: v1 +definition: + dataConnectorName: sample_mflix + dataConnectorScalarType: String + representation: String + graphql: + comparisonExpressionTypeName: StringComparisonExp + +--- +kind: DataConnectorScalarRepresentation +version: v1 +definition: + dataConnectorName: test_cases + dataConnectorScalarType: String + representation: String + graphql: + comparisonExpressionTypeName: StringComparisonExp + +--- +kind: BooleanExpressionType +version: v1 +definition: + name: StringBoolExp + operand: + scalar: + type: String + comparisonOperators: + - name: _eq + argumentType: String! + - name: _gt + argumentType: String! + - name: _gte + argumentType: String! + - name: _in + argumentType: "[String!]!" + - name: _iregex + argumentType: String! + - name: _lt + argumentType: String! + - name: _lte + argumentType: String! + - name: _neq + argumentType: String! + - name: _nin + argumentType: "[String!]!" + - name: _regex + argumentType: String! + dataConnectorOperatorMapping: + - dataConnectorName: sample_mflix + dataConnectorScalarType: String + operatorMapping: {} + - dataConnectorName: chinook + dataConnectorScalarType: String + operatorMapping: {} + - dataConnectorName: test_cases + dataConnectorScalarType: String + operatorMapping: {} + logicalOperators: + enable: true + isNull: + enable: true + graphql: + typeName: StringBoolExp + +--- +kind: AggregateExpression +version: v1 +definition: + name: StringAggExp + operand: + scalar: + aggregatedType: String + aggregationFunctions: + - name: count + returnType: Int! + - name: max + returnType: String + - name: min + returnType: String + dataConnectorAggregationFunctionMapping: + - dataConnectorName: sample_mflix + dataConnectorScalarType: String + functionMapping: + count: + name: count + max: + name: max + min: + name: min + - dataConnectorName: chinook + dataConnectorScalarType: String + functionMapping: + count: + name: count + max: + name: max + min: + name: min + - dataConnectorName: test_cases + dataConnectorScalarType: String + functionMapping: + count: + name: count + max: + name: max + min: + name: min + count: + enable: true + countDistinct: + enable: true + graphql: + selectTypeName: StringAggExp diff --git a/fixtures/mongodb/sample_mflix/movies.json b/fixtures/mongodb/sample_mflix/movies.json index c957d784..3cf5fd14 100644 --- a/fixtures/mongodb/sample_mflix/movies.json +++ b/fixtures/mongodb/sample_mflix/movies.json @@ -1,7 +1,7 @@ {"_id":{"$oid":"573a1390f29313caabcd4135"},"plot":"Three men hammer on an anvil and pass a bottle of beer around.","genres":["Short"],"runtime":{"$numberInt":"1"},"cast":["Charles Kayser","John Ott"],"num_mflix_comments":{"$numberInt":"1"},"title":"Blacksmith Scene","fullplot":"A stationary camera looks at a large anvil with a blacksmith behind it and one on either side. The smith in the middle draws a heated metal rod from the fire, places it on the anvil, and all three begin a rhythmic hammering. After several blows, the metal goes back in the fire. One smith pulls out a bottle of beer, and they each take a swig. Then, out comes the glowing metal and the hammering resumes.","countries":["USA"],"released":{"$date":{"$numberLong":"-2418768000000"}},"directors":["William K.L. Dickson"],"rated":"UNRATED","awards":{"wins":{"$numberInt":"1"},"nominations":{"$numberInt":"0"},"text":"1 win."},"lastupdated":"2015-08-26 00:03:50.133000000","year":{"$numberInt":"1893"},"imdb":{"rating":{"$numberDouble":"6.2"},"votes":{"$numberInt":"1189"},"id":{"$numberInt":"5"}},"type":"movie","tomatoes":{"viewer":{"rating":{"$numberInt":"3"},"numReviews":{"$numberInt":"184"},"meter":{"$numberInt":"32"}},"lastUpdated":{"$date":{"$numberLong":"1435516449000"}}}} {"_id":{"$oid":"573a1390f29313caabcd42e8"},"plot":"A group of bandits stage a brazen train hold-up, only to find a determined posse hot on their heels.","genres":["Short","Western"],"runtime":{"$numberInt":"11"},"cast":["A.C. Abadie","Gilbert M. 'Broncho Billy' Anderson","George Barnes","Justus D. Barnes"],"poster":"https://m.media-amazon.com/images/M/MV5BMTU3NjE5NzYtYTYyNS00MDVmLWIwYjgtMmYwYWIxZDYyNzU2XkEyXkFqcGdeQXVyNzQzNzQxNzI@._V1_SY1000_SX677_AL_.jpg","title":"The Great Train Robbery","fullplot":"Among the earliest existing films in American cinema - notable as the first film that presented a narrative story to tell - it depicts a group of cowboy outlaws who hold up a train and rob the passengers. They are then pursued by a Sheriff's posse. Several scenes have color included - all hand tinted.","languages":["English"],"released":{"$date":{"$numberLong":"-2085523200000"}},"directors":["Edwin S. Porter"],"rated":"TV-G","awards":{"wins":{"$numberInt":"1"},"nominations":{"$numberInt":"0"},"text":"1 win."},"lastupdated":"2015-08-13 00:27:59.177000000","year":{"$numberInt":"1903"},"imdb":{"rating":{"$numberDouble":"7.4"},"votes":{"$numberInt":"9847"},"id":{"$numberInt":"439"}},"countries":["USA"],"type":"movie","tomatoes":{"viewer":{"rating":{"$numberDouble":"3.7"},"numReviews":{"$numberInt":"2559"},"meter":{"$numberInt":"75"}},"fresh":{"$numberInt":"6"},"critic":{"rating":{"$numberDouble":"7.6"},"numReviews":{"$numberInt":"6"},"meter":{"$numberInt":"100"}},"rotten":{"$numberInt":"0"},"lastUpdated":{"$date":{"$numberLong":"1439061370000"}}}} {"_id":{"$oid":"573a1390f29313caabcd4323"},"plot":"A young boy, opressed by his mother, goes on an outing in the country with a social welfare group where he dares to dream of a land where the cares of his ordinary life fade.","genres":["Short","Drama","Fantasy"],"runtime":{"$numberInt":"14"},"rated":"UNRATED","cast":["Martin Fuller","Mrs. William Bechtel","Walter Edwin","Ethel Jewett"],"num_mflix_comments":{"$numberInt":"2"},"poster":"https://m.media-amazon.com/images/M/MV5BMTMzMDcxMjgyNl5BMl5BanBnXkFtZTcwOTgxNjg4Mg@@._V1_SY1000_SX677_AL_.jpg","title":"The Land Beyond the Sunset","fullplot":"Thanks to the Fresh Air Fund, a slum child escapes his drunken mother for a day's outing in the country. Upon arriving, he and the other children are told a story about a mythical land of no pain. Rather then return to the slum at day's end, the lad seeks to journey to that beautiful land beyond the sunset.","languages":["English"],"released":{"$date":{"$numberLong":"-1804377600000"}},"directors":["Harold M. Shaw"],"writers":["Dorothy G. Shore"],"awards":{"wins":{"$numberInt":"1"},"nominations":{"$numberInt":"0"},"text":"1 win."},"lastupdated":"2015-08-29 00:27:45.437000000","year":{"$numberInt":"1912"},"imdb":{"rating":{"$numberDouble":"7.1"},"votes":{"$numberInt":"448"},"id":{"$numberInt":"488"}},"countries":["USA"],"type":"movie","tomatoes":{"viewer":{"rating":{"$numberDouble":"3.7"},"numReviews":{"$numberInt":"53"},"meter":{"$numberInt":"67"}},"lastUpdated":{"$date":{"$numberLong":"1430161595000"}}}} -{"_id":{"$oid":"573a1390f29313caabcd446f"},"plot":"A greedy tycoon decides, on a whim, to corner the world market in wheat. This doubles the price of bread, forcing the grain's producers into charity lines and further into poverty. The film...","genres":["Short","Drama"],"runtime":{"$numberInt":"14"},"cast":["Frank Powell","Grace Henderson","James Kirkwood","Linda Arvidson"],"num_mflix_comments":{"$numberInt":"1"},"title":"A Corner in Wheat","fullplot":"A greedy tycoon decides, on a whim, to corner the world market in wheat. This doubles the price of bread, forcing the grain's producers into charity lines and further into poverty. The film continues to contrast the ironic differences between the lives of those who work to grow the wheat and the life of the man who dabbles in its sale for profit.","languages":["English"],"released":{"$date":{"$numberLong":"-1895097600000"}},"directors":["D.W. Griffith"],"rated":"G","awards":{"wins":{"$numberInt":"1"},"nominations":{"$numberInt":"0"},"text":"1 win."},"lastupdated":"2015-08-13 00:46:30.660000000","year":{"$numberInt":"1909"},"imdb":{"rating":{"$numberDouble":"6.6"},"votes":{"$numberInt":"1375"},"id":{"$numberInt":"832"}},"countries":["USA"],"type":"movie","tomatoes":{"viewer":{"rating":{"$numberDouble":"3.6"},"numReviews":{"$numberInt":"109"},"meter":{"$numberInt":"73"}},"lastUpdated":{"$date":{"$numberLong":"1431369413000"}}}} +{"_id":{"$oid":"573a1390f29313caabcd446f"},"plot":"A greedy tycoon decides, on a whim, to corner the world market in wheat. This doubles the price of bread, forcing the grain's producers into charity lines and further into poverty. The film...","genres":["Short","Drama"],"runtime":{"$numberInt":"14"},"cast":["Frank Powell","Grace Henderson","James Kirkwood","Linda Arvidson"],"num_mflix_comments":{"$numberInt":"1"},"title":"A Corner in Wheat","fullplot":"A greedy tycoon decides, on a whim, to corner the world market in wheat. This doubles the price of bread, forcing the grain's producers into charity lines and further into poverty. The film continues to contrast the ironic differences between the lives of those who work to grow the wheat and the life of the man who dabbles in its sale for profit.","languages":["English"],"released":{"$date":{"$numberLong":"-1895097600000"}},"directors":["D.W. Griffith"],"writers":[],"rated":"G","awards":{"wins":{"$numberInt":"1"},"nominations":{"$numberInt":"0"},"text":"1 win."},"lastupdated":"2015-08-13 00:46:30.660000000","year":{"$numberInt":"1909"},"imdb":{"rating":{"$numberDouble":"6.6"},"votes":{"$numberInt":"1375"},"id":{"$numberInt":"832"}},"countries":["USA"],"type":"movie","tomatoes":{"viewer":{"rating":{"$numberDouble":"3.6"},"numReviews":{"$numberInt":"109"},"meter":{"$numberInt":"73"}},"lastUpdated":{"$date":{"$numberLong":"1431369413000"}}}} {"_id":{"$oid":"573a1390f29313caabcd4803"},"plot":"Cartoon figures announce, via comic strip balloons, that they will move - and move they do, in a wildly exaggerated style.","genres":["Animation","Short","Comedy"],"runtime":{"$numberInt":"7"},"cast":["Winsor McCay"],"num_mflix_comments":{"$numberInt":"1"},"poster":"https://m.media-amazon.com/images/M/MV5BYzg2NjNhNTctMjUxMi00ZWU4LWI3ZjYtNTI0NTQxNThjZTk2XkEyXkFqcGdeQXVyNzg5OTk2OA@@._V1_SY1000_SX677_AL_.jpg","title":"Winsor McCay, the Famous Cartoonist of the N.Y. Herald and His Moving Comics","fullplot":"Cartoonist Winsor McCay agrees to create a large set of drawings that will be photographed and made into a motion picture. The job requires plenty of drawing supplies, and the cartoonist must also overcome some mishaps caused by an assistant. Finally, the work is done, and everyone can see the resulting animated picture.","languages":["English"],"released":{"$date":{"$numberLong":"-1853539200000"}},"directors":["Winsor McCay","J. Stuart Blackton"],"writers":["Winsor McCay (comic strip \"Little Nemo in Slumberland\")","Winsor McCay (screenplay)"],"awards":{"wins":{"$numberInt":"1"},"nominations":{"$numberInt":"0"},"text":"1 win."},"lastupdated":"2015-08-29 01:09:03.030000000","year":{"$numberInt":"1911"},"imdb":{"rating":{"$numberDouble":"7.3"},"votes":{"$numberInt":"1034"},"id":{"$numberInt":"1737"}},"countries":["USA"],"type":"movie","tomatoes":{"viewer":{"rating":{"$numberDouble":"3.4"},"numReviews":{"$numberInt":"89"},"meter":{"$numberInt":"47"}},"lastUpdated":{"$date":{"$numberLong":"1440096684000"}}}} {"_id":{"$oid":"573a1390f29313caabcd4eaf"},"plot":"A woman, with the aid of her police officer sweetheart, endeavors to uncover the prostitution ring that has kidnapped her sister, and the philanthropist who secretly runs it.","genres":["Crime","Drama"],"runtime":{"$numberInt":"88"},"cast":["Jane Gail","Ethel Grandin","William H. Turner","Matt Moore"],"num_mflix_comments":{"$numberInt":"2"},"poster":"https://m.media-amazon.com/images/M/MV5BYzk0YWQzMGYtYTM5MC00NjM2LWE5YzYtMjgyNDVhZDg1N2YzXkEyXkFqcGdeQXVyMzE0MjY5ODA@._V1_SY1000_SX677_AL_.jpg","title":"Traffic in Souls","lastupdated":"2015-09-15 02:07:14.247000000","languages":["English"],"released":{"$date":{"$numberLong":"-1770508800000"}},"directors":["George Loane Tucker"],"rated":"TV-PG","awards":{"wins":{"$numberInt":"1"},"nominations":{"$numberInt":"0"},"text":"1 win."},"year":{"$numberInt":"1913"},"imdb":{"rating":{"$numberInt":"6"},"votes":{"$numberInt":"371"},"id":{"$numberInt":"3471"}},"countries":["USA"],"type":"movie","tomatoes":{"viewer":{"rating":{"$numberInt":"3"},"numReviews":{"$numberInt":"85"},"meter":{"$numberInt":"57"}},"dvd":{"$date":{"$numberLong":"1219708800000"}},"lastUpdated":{"$date":{"$numberLong":"1439231635000"}}}} {"_id":{"$oid":"573a1390f29313caabcd50e5"},"plot":"The cartoonist, Winsor McCay, brings the Dinosaurus back to life in the figure of his latest creation, Gertie the Dinosaur.","genres":["Animation","Short","Comedy"],"runtime":{"$numberInt":"12"},"cast":["Winsor McCay","George McManus","Roy L. McCardell"],"num_mflix_comments":{"$numberInt":"1"},"poster":"https://m.media-amazon.com/images/M/MV5BMTQxNzI4ODQ3NF5BMl5BanBnXkFtZTgwNzY5NzMwMjE@._V1_SY1000_SX677_AL_.jpg","title":"Gertie the Dinosaur","fullplot":"Winsor Z. McCay bets another cartoonist that he can animate a dinosaur. So he draws a big friendly herbivore called Gertie. Then he get into his own picture. Gertie walks through the picture, eats a tree, meets her creator, and takes him carefully on her back for a ride.","languages":["English"],"released":{"$date":{"$numberLong":"-1745020800000"}},"directors":["Winsor McCay"],"writers":["Winsor McCay"],"awards":{"wins":{"$numberInt":"1"},"nominations":{"$numberInt":"0"},"text":"1 win."},"lastupdated":"2015-08-18 01:03:15.313000000","year":{"$numberInt":"1914"},"imdb":{"rating":{"$numberDouble":"7.3"},"votes":{"$numberInt":"1837"},"id":{"$numberInt":"4008"}},"countries":["USA"],"type":"movie","tomatoes":{"viewer":{"rating":{"$numberDouble":"3.7"},"numReviews":{"$numberInt":"29"}},"lastUpdated":{"$date":{"$numberLong":"1439234403000"}}}} diff --git a/fixtures/mongodb/test_cases/departments.json b/fixtures/mongodb/test_cases/departments.json new file mode 100644 index 00000000..557e4621 --- /dev/null +++ b/fixtures/mongodb/test_cases/departments.json @@ -0,0 +1,2 @@ +{ "_id": { "$oid": "67857bc2f317ca21359981d5" }, "description": "West Valley English" } +{ "_id": { "$oid": "67857be3f317ca21359981d6" }, "description": "West Valley Math" } diff --git a/fixtures/mongodb/test_cases/import.sh b/fixtures/mongodb/test_cases/import.sh index 6f647970..3c7f671f 100755 --- a/fixtures/mongodb/test_cases/import.sh +++ b/fixtures/mongodb/test_cases/import.sh @@ -11,8 +11,9 @@ set -euo pipefail FIXTURES=$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd ) echo "📡 Importing test case data..." -mongoimport --db test_cases --collection weird_field_names --file "$FIXTURES"/weird_field_names.json -mongoimport --db test_cases --collection nested_collection --file "$FIXTURES"/nested_collection.json -mongoimport --db test_cases --collection nested_field_with_dollar --file "$FIXTURES"/nested_field_with_dollar.json +for fixture in "$FIXTURES"/*.json; do + collection=$(basename "$fixture" .json) + mongoimport --db test_cases --collection "$collection" --file "$fixture" +done echo "✅ test case data imported..." diff --git a/fixtures/mongodb/test_cases/schools.json b/fixtures/mongodb/test_cases/schools.json new file mode 100644 index 00000000..c2cc732a --- /dev/null +++ b/fixtures/mongodb/test_cases/schools.json @@ -0,0 +1 @@ +{ "_id": { "$oid": "67857b7ef317ca21359981d4" }, "name": "West Valley", "departments": { "english_department_id": { "$oid": "67857bc2f317ca21359981d5" }, "math_department_id": { "$oid": "67857be3f317ca21359981d6" } } } diff --git a/flake.lock b/flake.lock index e3d798a2..6173d578 100644 --- a/flake.lock +++ b/flake.lock @@ -132,11 +132,11 @@ "graphql-engine-source": { "flake": false, "locked": { - "lastModified": 1733318858, - "narHash": "sha256-7/nTrhvRvKnHnDwBxLPpAfwHg06qLyQd3S1iuzQjI5o=", + "lastModified": 1736343392, + "narHash": "sha256-qv7MPD9NhZE1q7yFbGuqkoRF1igV0hCfn16DzhgZSUs=", "owner": "hasura", "repo": "graphql-engine", - "rev": "8b7ad6684f30266326c49208b8c36251b984bb18", + "rev": "48910e25ef253f033b80b487381f0e94e5f1ea27", "type": "github" }, "original": {