diff --git a/Cargo.lock b/Cargo.lock index e05e8d17..d4ce9980 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -218,24 +218,6 @@ dependencies = [ "tower-service", ] -[[package]] -name = "axum-test-helper" -version = "0.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "298f62fa902c2515c169ab0bfb56c593229f33faa01131215d58e3d4898e3aa9" -dependencies = [ - "axum", - "bytes", - "http 0.2.9", - "http-body 0.4.5", - "hyper 0.14.27", - "reqwest 0.11.27", - "serde", - "tokio", - "tower", - "tower-service", -] - [[package]] name = "backtrace" version = "0.3.69" @@ -440,6 +422,7 @@ dependencies = [ "mongodb", "mongodb-support", "ndc-models", + "ndc-query-plan", "schemars", "serde", "serde_json", @@ -605,48 +588,6 @@ version = "2.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c2e66c9d817f1720209181c316d28635c050fa304f9c79e47a520882661b7308" -[[package]] -name = "dc-api" -version = "0.1.0" -dependencies = [ - "axum", - "axum-test-helper", - "bytes", - "dc-api-types", - "http 0.2.9", - "jsonwebtoken", - "mime", - "serde", - "serde_json", - "thiserror", - "tokio", - "tracing", -] - -[[package]] -name = "dc-api-test-helpers" -version = "0.1.0" -dependencies = [ - "dc-api-types", - "itertools 0.12.1", -] - -[[package]] -name = "dc-api-types" -version = "0.1.0" -dependencies = [ - "anyhow", - "itertools 0.12.1", - "mongodb", - "nonempty", - "once_cell", - "pretty_assertions", - "regex", - "serde", - "serde_json", - "serde_with 3.7.0", -] - [[package]] name = "deranged" version = "0.3.9" @@ -1419,20 +1360,6 @@ dependencies = [ "wasm-bindgen", ] -[[package]] -name = "jsonwebtoken" -version = "8.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6971da4d9c3aa03c3d8f3ff0f4155b534aad021292003895a469716b2a230378" -dependencies = [ - "base64 0.21.5", - "pem", - "ring 0.16.20", - "serde", - "serde_json", - "simple_asn1", -] - [[package]] name = "lazy_static" version = "1.4.0" @@ -1651,20 +1578,21 @@ dependencies = [ "axum", "bytes", "configuration", - "dc-api", - "dc-api-test-helpers", - "dc-api-types", "enum-iterator", "futures", "futures-util", "http 0.2.9", "indent", - "indexmap 1.9.3", + "indexmap 2.2.5", "itertools 0.12.1", + "lazy_static", "mockall", "mongodb", "mongodb-cli-plugin", "mongodb-support", + "ndc-models", + "ndc-query-plan", + "ndc-test-helpers", "once_cell", "pretty_assertions", "proptest", @@ -1688,7 +1616,7 @@ dependencies = [ "clap", "configuration", "futures-util", - "indexmap 1.9.3", + "indexmap 2.2.5", "itertools 0.12.1", "mongodb", "mongodb-agent-common", @@ -1708,18 +1636,15 @@ dependencies = [ "anyhow", "async-trait", "configuration", - "dc-api", - "dc-api-test-helpers", - "dc-api-types", "enum-iterator", "futures", "http 0.2.9", "indexmap 2.2.5", "itertools 0.12.1", - "lazy_static", "mongodb", "mongodb-agent-common", "mongodb-support", + "ndc-query-plan", "ndc-sdk", "ndc-test-helpers", "pretty_assertions", @@ -1736,9 +1661,8 @@ name = "mongodb-support" version = "0.1.0" dependencies = [ "anyhow", - "dc-api-types", "enum-iterator", - "indexmap 1.9.3", + "indexmap 2.2.5", "mongodb", "schemars", "serde", @@ -1776,6 +1700,24 @@ dependencies = [ "serde_with 2.3.3", ] +[[package]] +name = "ndc-query-plan" +version = "0.1.0" +dependencies = [ + "anyhow", + "derivative", + "enum-iterator", + "indexmap 2.2.5", + "itertools 0.12.1", + "lazy_static", + "ndc-models", + "ndc-test-helpers", + "nonempty", + "pretty_assertions", + "serde_json", + "thiserror", +] + [[package]] name = "ndc-sdk" version = "0.1.0" @@ -1841,12 +1783,9 @@ dependencies = [ [[package]] name = "nonempty" -version = "0.8.1" +version = "0.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "aeaf4ad7403de93e699c191202f017118df734d3850b01e13a3a8b2e6953d3c9" -dependencies = [ - "serde", -] +checksum = "303e8749c804ccd6ca3b428de7fe0d86cb86bc7606bc15291f100fd487960bb8" [[package]] name = "nu-ansi-term" @@ -1858,26 +1797,6 @@ dependencies = [ "winapi", ] -[[package]] -name = "num-bigint" -version = "0.4.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "608e7659b5c3d7cba262d894801b9ec9d00de989e8a82bd4bef91d08da45cdc0" -dependencies = [ - "autocfg", - "num-integer", - "num-traits", -] - -[[package]] -name = "num-integer" -version = "0.1.46" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7969661fd2958a5cb096e56c8e1ad0444ac2bbcd0061bd28660485a44879858f" -dependencies = [ - "num-traits", -] - [[package]] name = "num-traits" version = "0.2.17" @@ -2114,15 +2033,6 @@ dependencies = [ "digest", ] -[[package]] -name = "pem" -version = "1.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a8835c273a76a90455d7344889b0964598e3316e2a79ede8e36f16bdcf2228b8" -dependencies = [ - "base64 0.13.1", -] - [[package]] name = "percent-encoding" version = "2.3.1" @@ -2449,12 +2359,10 @@ dependencies = [ "system-configuration", "tokio", "tokio-native-tls", - "tokio-util", "tower-service", "url", "wasm-bindgen", "wasm-bindgen-futures", - "wasm-streams", "web-sys", "winreg 0.50.0", ] @@ -3017,18 +2925,6 @@ version = "2.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fa42c91313f1d05da9b26f267f931cf178d4aba455b4c4622dd7355eb80c6640" -[[package]] -name = "simple_asn1" -version = "0.6.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "adc4e5204eb1910f40f9cfa375f6f05b68c3abac4b6fd879c8ff5e7ae8a0a085" -dependencies = [ - "num-bigint", - "num-traits", - "thiserror", - "time", -] - [[package]] name = "slab" version = "0.4.9" @@ -3202,6 +3098,8 @@ dependencies = [ "enum-iterator", "mongodb", "mongodb-support", + "ndc-models", + "ndc-test-helpers", "proptest", ] @@ -3834,19 +3732,6 @@ version = "0.2.87" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ca6ad05a4870b2bf5fe995117d3728437bd27d7cd5f06f13c17443ef369775a1" -[[package]] -name = "wasm-streams" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b65dc4c90b63b118468cf747d8bf3566c1913ef60be765b5730ead9e0a3ba129" -dependencies = [ - "futures-util", - "js-sys", - "wasm-bindgen", - "wasm-bindgen-futures", - "web-sys", -] - [[package]] name = "web-sys" version = "0.3.64" diff --git a/Cargo.toml b/Cargo.toml index 6ad3537b..bb51c4ff 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -5,13 +5,11 @@ version = "0.0.6" members = [ "crates/cli", "crates/configuration", - "crates/dc-api", - "crates/dc-api-test-helpers", - "crates/dc-api-types", "crates/integration-tests", "crates/mongodb-agent-common", "crates/mongodb-connector", "crates/mongodb-support", + "crates/ndc-query-plan", "crates/ndc-test-helpers", "crates/test-helpers", ] @@ -23,8 +21,10 @@ resolver = "2" ndc-sdk = { git = "https://github.com/hasura/ndc-sdk-rs.git" } ndc-models = { git = "http://github.com/hasura/ndc-spec.git", tag = "v0.1.2" } +indexmap = { version = "2", features = ["serde"] } # should match the version that ndc-models uses itertools = "^0.12.1" mongodb = { version = "2.8", features = ["tracing-unstable"] } +schemars = "^0.8.12" # Connecting to MongoDB Atlas database with time series collections fails in the # latest released version of the MongoDB Rust driver. A fix has been merged, but diff --git a/crates/cli/Cargo.toml b/crates/cli/Cargo.toml index bba31456..fb59274f 100644 --- a/crates/cli/Cargo.toml +++ b/crates/cli/Cargo.toml @@ -12,7 +12,7 @@ mongodb-support = { path = "../mongodb-support" } anyhow = "1.0.80" clap = { version = "4.5.1", features = ["derive", "env"] } futures-util = "0.3.28" -indexmap = { version = "1", features = ["serde"] } # must match the version that ndc-client uses +indexmap = { workspace = true } itertools = { workspace = true } serde = { version = "1.0", features = ["derive"] } serde_json = { version = "1.0.113", features = ["raw_value"] } diff --git a/crates/configuration/Cargo.toml b/crates/configuration/Cargo.toml index 0bb952f2..772aa473 100644 --- a/crates/configuration/Cargo.toml +++ b/crates/configuration/Cargo.toml @@ -4,13 +4,15 @@ version = "0.1.0" edition = "2021" [dependencies] +mongodb-support = { path = "../mongodb-support" } +ndc-query-plan = { path = "../ndc-query-plan" } + anyhow = "1" futures = "^0.3" itertools = { workspace = true } mongodb = { workspace = true } -mongodb-support = { path = "../mongodb-support" } ndc-models = { workspace = true } -schemars = "^0.8.12" +schemars = { workspace = true } serde = { version = "1", features = ["derive"] } serde_json = { version = "1" } serde_yaml = "^0.9" diff --git a/crates/configuration/src/configuration.rs b/crates/configuration/src/configuration.rs index 04eecab6..8c645515 100644 --- a/crates/configuration/src/configuration.rs +++ b/crates/configuration/src/configuration.rs @@ -28,11 +28,11 @@ pub struct Configuration { /// response. pub functions: BTreeMap, - /// Mutations are based on native mutations. - pub mutations: BTreeMap, + /// Procedures are based on native mutations. + pub procedures: BTreeMap, - /// Native murations allow arbitrary MongoDB commands where types of results are - /// specified via user configuration. + /// Native mutations allow arbitrary MongoDB commands where types of results are specified via + /// user configuration. pub native_mutations: BTreeMap, /// Native queries allow arbitrary aggregation pipelines that can be included in a query plan. @@ -45,7 +45,7 @@ pub struct Configuration { /// The object types here combine object type defined in files in the `schema/`, /// `native_queries/`, and `native_mutations/` subdirectories in the connector configuration /// directory. - pub object_types: BTreeMap, + pub object_types: BTreeMap, pub options: ConfigurationOptions, } @@ -55,7 +55,7 @@ impl Configuration { schema: serialized::Schema, native_mutations: BTreeMap, native_queries: BTreeMap, - options: ConfigurationOptions + options: ConfigurationOptions, ) -> anyhow::Result { let object_types_iter = || merge_object_types(&schema, &native_mutations, &native_queries); let object_type_errors = { @@ -76,16 +76,6 @@ impl Configuration { .map(|(name, ot)| (name.to_owned(), ot.clone())) .collect(); - let internal_native_queries: BTreeMap<_, _> = native_queries - .into_iter() - .map(|(name, nq)| (name, nq.into())) - .collect(); - - let internal_native_mutations: BTreeMap<_, _> = native_mutations - .into_iter() - .map(|(name, np)| (name, np.into())) - .collect(); - let collections = { let regular_collections = schema.collections.into_iter().map(|(name, collection)| { ( @@ -93,8 +83,8 @@ impl Configuration { collection_to_collection_info(&object_types, name, collection), ) }); - let native_query_collections = internal_native_queries.iter().filter_map( - |(name, native_query): (&String, &NativeQuery)| { + let native_query_collections = native_queries.iter().filter_map( + |(name, native_query): (&String, &serialized::NativeQuery)| { if native_query.representation == NativeQueryRepresentation::Collection { Some(( name.to_owned(), @@ -110,7 +100,7 @@ impl Configuration { .collect() }; - let (functions, function_errors): (BTreeMap<_, _>, Vec<_>) = internal_native_queries + let (functions, function_errors): (BTreeMap<_, _>, Vec<_>) = native_queries .iter() .filter_map(|(name, native_query)| { if native_query.representation == NativeQueryRepresentation::Function { @@ -129,16 +119,39 @@ impl Configuration { }) .partition_result(); - let mutations = internal_native_mutations + let procedures = native_mutations .iter() .map(|(name, native_mutation)| { ( name.to_owned(), - native_mutation_to_mutation_info(name, native_mutation), + native_mutation_to_procedure_info(name, native_mutation), ) }) .collect(); + let ndc_object_types = object_types + .into_iter() + .map(|(name, ot)| (name, ot.into())) + .collect(); + + let internal_native_queries: BTreeMap<_, _> = native_queries + .into_iter() + .map(|(name, nq)| { + Ok((name, NativeQuery::from_serialized(&ndc_object_types, nq)?)) + as Result<_, anyhow::Error> + }) + .try_collect()?; + + let internal_native_mutations: BTreeMap<_, _> = native_mutations + .into_iter() + .map(|(name, np)| { + Ok(( + name, + NativeMutation::from_serialized(&ndc_object_types, np)?, + )) as Result<_, anyhow::Error> + }) + .try_collect()?; + let errors: Vec = object_type_errors .into_iter() .chain(function_errors) @@ -153,16 +166,21 @@ impl Configuration { Ok(Configuration { collections, functions, - mutations, + procedures, native_mutations: internal_native_mutations, native_queries: internal_native_queries, - object_types, - options + object_types: ndc_object_types, + options, }) } pub fn from_schema(schema: serialized::Schema) -> anyhow::Result { - Self::validate(schema, Default::default(), Default::default(), Default::default()) + Self::validate( + schema, + Default::default(), + Default::default(), + Default::default(), + ) } pub async fn parse_configuration( @@ -240,7 +258,7 @@ fn collection_to_collection_info( fn native_query_to_collection_info( object_types: &BTreeMap, name: &str, - native_query: &NativeQuery, + native_query: &serialized::NativeQuery, ) -> ndc::CollectionInfo { let pk_constraint = get_primary_key_uniqueness_constraint( object_types, @@ -282,7 +300,7 @@ fn get_primary_key_uniqueness_constraint( fn native_query_to_function_info( object_types: &BTreeMap, name: &str, - native_query: &NativeQuery, + native_query: &serialized::NativeQuery, ) -> anyhow::Result { Ok(ndc::FunctionInfo { name: name.to_owned(), @@ -305,9 +323,9 @@ fn function_result_type( Ok(value_field.r#type.clone().into()) } -fn native_mutation_to_mutation_info( +fn native_mutation_to_procedure_info( mutation_name: &str, - mutation: &NativeMutation, + mutation: &serialized::NativeMutation, ) -> ndc::ProcedureInfo { ndc::ProcedureInfo { name: mutation_name.to_owned(), @@ -385,7 +403,12 @@ mod tests { )] .into_iter() .collect(); - let result = Configuration::validate(schema, native_mutations, Default::default(), Default::default()); + let result = Configuration::validate( + schema, + native_mutations, + Default::default(), + Default::default(), + ); let error_msg = result.unwrap_err().to_string(); assert!(error_msg.contains("multiple definitions")); assert!(error_msg.contains("Album")); diff --git a/crates/configuration/src/directory.rs b/crates/configuration/src/directory.rs index 75f5e30b..a67e2c24 100644 --- a/crates/configuration/src/directory.rs +++ b/crates/configuration/src/directory.rs @@ -3,12 +3,16 @@ use futures::stream::TryStreamExt as _; use itertools::Itertools as _; use serde::{Deserialize, Serialize}; use std::{ - collections::{BTreeMap, HashSet}, fs::Metadata, path::{Path, PathBuf} + collections::{BTreeMap, HashSet}, + fs::Metadata, + path::{Path, PathBuf}, }; use tokio::{fs, io::AsyncWriteExt}; use tokio_stream::wrappers::ReadDirStream; -use crate::{configuration::ConfigurationOptions, serialized::Schema, with_name::WithName, Configuration}; +use crate::{ + configuration::ConfigurationOptions, serialized::Schema, with_name::WithName, Configuration, +}; pub const SCHEMA_DIRNAME: &str = "schema"; pub const NATIVE_MUTATIONS_DIRNAME: &str = "native_mutations"; @@ -59,8 +63,7 @@ pub async fn read_directory( .await? .unwrap_or_default(); - let options = parse_configuration_options_file(dir) - .await; + let options = parse_configuration_options_file(dir).await; native_mutations.extend(native_procedures.into_iter()); @@ -129,13 +132,13 @@ pub async fn parse_configuration_options_file(dir: &Path) -> ConfigurationOption let json_filename = CONFIGURATION_OPTIONS_BASENAME.to_owned() + ".json"; let json_config_file = parse_config_file(&dir.join(json_filename), JSON).await; if let Ok(config_options) = json_config_file { - return config_options + return config_options; } let yaml_filename = CONFIGURATION_OPTIONS_BASENAME.to_owned() + ".yaml"; let yaml_config_file = parse_config_file(&dir.join(yaml_filename), YAML).await; if let Ok(config_options) = yaml_config_file { - return config_options + return config_options; } // If a configuration file does not exist use defaults and write the file @@ -205,7 +208,7 @@ where // Don't write the file if it hasn't changed. if let Ok(existing_bytes) = fs::read(&path).await { if bytes == existing_bytes { - return Ok(()) + return Ok(()); } } fs::write(&path, bytes) @@ -228,9 +231,7 @@ pub async fn list_existing_schemas( // Metadata file is just a dot filed used for the purposes of know if the user has updated their config to force refresh // of the schema introspection. -async fn write_config_metadata_file( - configuration_dir: impl AsRef -) { +async fn write_config_metadata_file(configuration_dir: impl AsRef) { let dir = configuration_dir.as_ref(); let file_result = fs::OpenOptions::new() .write(true) @@ -244,26 +245,20 @@ async fn write_config_metadata_file( }; } -pub async fn get_config_file_changed( - dir: impl AsRef -) -> anyhow::Result { +pub async fn get_config_file_changed(dir: impl AsRef) -> anyhow::Result { let path = dir.as_ref(); - let dot_metadata: Result = fs::metadata( - &path.join(CONFIGURATION_OPTIONS_METADATA) - ).await; - let json_metadata = fs::metadata( - &path.join(CONFIGURATION_OPTIONS_BASENAME.to_owned() + ".json") - ).await; - let yaml_metadata = fs::metadata( - &path.join(CONFIGURATION_OPTIONS_BASENAME.to_owned() + ".yaml") - ).await; + let dot_metadata: Result = + fs::metadata(&path.join(CONFIGURATION_OPTIONS_METADATA)).await; + let json_metadata = + fs::metadata(&path.join(CONFIGURATION_OPTIONS_BASENAME.to_owned() + ".json")).await; + let yaml_metadata = + fs::metadata(&path.join(CONFIGURATION_OPTIONS_BASENAME.to_owned() + ".yaml")).await; let compare = |dot_date, config_date| async move { if dot_date < config_date { let _ = write_config_metadata_file(path).await; Ok(true) - } - else { + } else { Ok(false) } }; @@ -271,6 +266,6 @@ pub async fn get_config_file_changed( match (dot_metadata, json_metadata, yaml_metadata) { (Ok(dot), Ok(json), _) => compare(dot.modified()?, json.modified()?).await, (Ok(dot), _, Ok(yaml)) => compare(dot.modified()?, yaml.modified()?).await, - _ => Ok(true) + _ => Ok(true), } } diff --git a/crates/configuration/src/lib.rs b/crates/configuration/src/lib.rs index d7ce160f..c9c2f971 100644 --- a/crates/configuration/src/lib.rs +++ b/crates/configuration/src/lib.rs @@ -1,16 +1,18 @@ mod configuration; mod directory; +mod mongo_scalar_type; pub mod native_mutation; pub mod native_query; pub mod schema; -mod serialized; +pub mod serialized; mod with_name; pub use crate::configuration::Configuration; +pub use crate::directory::get_config_file_changed; pub use crate::directory::list_existing_schemas; +pub use crate::directory::parse_configuration_options_file; pub use crate::directory::read_directory; pub use crate::directory::write_schema_directory; -pub use crate::directory::parse_configuration_options_file; -pub use crate::directory::get_config_file_changed; +pub use crate::mongo_scalar_type::MongoScalarType; pub use crate::serialized::Schema; pub use crate::with_name::{WithName, WithNameRef}; diff --git a/crates/configuration/src/mongo_scalar_type.rs b/crates/configuration/src/mongo_scalar_type.rs new file mode 100644 index 00000000..9eb606f6 --- /dev/null +++ b/crates/configuration/src/mongo_scalar_type.rs @@ -0,0 +1,35 @@ +use mongodb_support::{BsonScalarType, EXTENDED_JSON_TYPE_NAME}; +use ndc_query_plan::QueryPlanError; + +#[derive(Debug, Clone, PartialEq, Eq)] +pub enum MongoScalarType { + /// One of the predefined BSON scalar types + Bson(BsonScalarType), + + /// Any BSON value, represented as Extended JSON. + /// To be used when we don't have any more information + /// about the types of values that a column, field or argument can take. + /// Also used when we unifying two incompatible types in schemas derived + /// from sample documents. + ExtendedJSON, +} + +impl MongoScalarType { + pub fn lookup_scalar_type(name: &str) -> Option { + Self::try_from(name).ok() + } +} + +impl TryFrom<&str> for MongoScalarType { + type Error = QueryPlanError; + + fn try_from(name: &str) -> Result { + if name == EXTENDED_JSON_TYPE_NAME { + Ok(MongoScalarType::ExtendedJSON) + } else { + let t = BsonScalarType::from_bson_name(name) + .map_err(|_| QueryPlanError::UnknownScalarType(name.to_owned()))?; + Ok(MongoScalarType::Bson(t)) + } + } +} diff --git a/crates/configuration/src/native_mutation.rs b/crates/configuration/src/native_mutation.rs index 74efeb0e..c49b5241 100644 --- a/crates/configuration/src/native_mutation.rs +++ b/crates/configuration/src/native_mutation.rs @@ -1,11 +1,12 @@ use std::collections::BTreeMap; +use itertools::Itertools as _; use mongodb::{bson, options::SelectionCriteria}; +use ndc_models as ndc; +use ndc_query_plan as plan; +use plan::{inline_object_types, QueryPlanError}; -use crate::{ - schema::{ObjectField, Type}, - serialized::{self}, -}; +use crate::{serialized, MongoScalarType}; /// Internal representation of Native Mutations. For doc comments see /// [crate::serialized::NativeMutation] @@ -15,21 +16,45 @@ use crate::{ /// Native query values are stored in maps so names should be taken from map keys. #[derive(Clone, Debug)] pub struct NativeMutation { - pub result_type: Type, - pub arguments: BTreeMap, + pub result_type: plan::Type, + pub arguments: BTreeMap>, pub command: bson::Document, pub selection_criteria: Option, pub description: Option, } -impl From for NativeMutation { - fn from(value: serialized::NativeMutation) -> Self { - NativeMutation { - result_type: value.result_type, - arguments: value.arguments, - command: value.command, - selection_criteria: value.selection_criteria, - description: value.description, - } +impl NativeMutation { + pub fn from_serialized( + object_types: &BTreeMap, + input: serialized::NativeMutation, + ) -> Result { + let arguments = input + .arguments + .into_iter() + .map(|(name, object_field)| { + Ok(( + name, + inline_object_types( + object_types, + &object_field.r#type.into(), + MongoScalarType::lookup_scalar_type, + )?, + )) + }) + .try_collect()?; + + let result_type = inline_object_types( + object_types, + &input.result_type.into(), + MongoScalarType::lookup_scalar_type, + )?; + + Ok(NativeMutation { + result_type, + arguments, + command: input.command, + selection_criteria: input.selection_criteria, + description: input.description, + }) } } diff --git a/crates/configuration/src/native_query.rs b/crates/configuration/src/native_query.rs index 00e85169..731b3f69 100644 --- a/crates/configuration/src/native_query.rs +++ b/crates/configuration/src/native_query.rs @@ -1,10 +1,14 @@ use std::collections::BTreeMap; +use itertools::Itertools as _; use mongodb::bson; +use ndc_models as ndc; +use ndc_query_plan as plan; +use plan::{inline_object_types, QueryPlanError}; use schemars::JsonSchema; use serde::Deserialize; -use crate::{schema::ObjectField, serialized}; +use crate::{serialized, MongoScalarType}; /// Internal representation of Native Queries. For doc comments see /// [crate::serialized::NativeQuery] @@ -16,22 +20,40 @@ use crate::{schema::ObjectField, serialized}; pub struct NativeQuery { pub representation: NativeQueryRepresentation, pub input_collection: Option, - pub arguments: BTreeMap, + pub arguments: BTreeMap>, pub result_document_type: String, pub pipeline: Vec, pub description: Option, } -impl From for NativeQuery { - fn from(value: serialized::NativeQuery) -> Self { - NativeQuery { - representation: value.representation, - input_collection: value.input_collection, - arguments: value.arguments, - result_document_type: value.result_document_type, - pipeline: value.pipeline, - description: value.description, - } +impl NativeQuery { + pub fn from_serialized( + object_types: &BTreeMap, + input: serialized::NativeQuery, + ) -> Result { + let arguments = input + .arguments + .into_iter() + .map(|(name, object_field)| { + Ok(( + name, + inline_object_types( + object_types, + &object_field.r#type.into(), + MongoScalarType::lookup_scalar_type, + )?, + )) + }) + .try_collect()?; + + Ok(NativeQuery { + representation: input.representation, + input_collection: input.input_collection, + arguments, + result_document_type: input.result_document_type, + pipeline: input.pipeline, + description: input.description, + }) } } diff --git a/crates/configuration/src/schema/mod.rs b/crates/configuration/src/schema/mod.rs index 4b7418ad..f6524770 100644 --- a/crates/configuration/src/schema/mod.rs +++ b/crates/configuration/src/schema/mod.rs @@ -37,13 +37,6 @@ pub enum Type { } impl Type { - pub fn is_nullable(&self) -> bool { - matches!( - self, - Type::ExtendedJSON | Type::Nullable(_) | Type::Scalar(BsonScalarType::Null) - ) - } - pub fn normalize_type(self) -> Type { match self { Type::ExtendedJSON => Type::ExtendedJSON, @@ -80,7 +73,7 @@ impl From for ndc_models::Type { }), }, Type::Scalar(t) => ndc_models::Type::Named { - name: t.graphql_name(), + name: t.graphql_name().to_owned(), }, Type::Object(t) => ndc_models::Type::Named { name: t.clone() }, Type::ArrayOf(t) => ndc_models::Type::Array { diff --git a/crates/configuration/src/serialized/native_mutation.rs b/crates/configuration/src/serialized/native_mutation.rs index 4f0cec31..9bc6c5d2 100644 --- a/crates/configuration/src/serialized/native_mutation.rs +++ b/crates/configuration/src/serialized/native_mutation.rs @@ -9,7 +9,7 @@ use crate::schema::{ObjectField, ObjectType, Type}; /// An arbitrary database command using MongoDB's runCommand API. /// See https://www.mongodb.com/docs/manual/reference/method/db.runCommand/ /// -/// Native Mutations appear as "mutations" in your data graph. +/// Native Procedures appear as "procedures" in your data graph. #[derive(Clone, Debug, Deserialize, JsonSchema)] #[serde(rename_all = "camelCase")] pub struct NativeMutation { diff --git a/crates/dc-api-test-helpers/Cargo.toml b/crates/dc-api-test-helpers/Cargo.toml deleted file mode 100644 index 2165ebe7..00000000 --- a/crates/dc-api-test-helpers/Cargo.toml +++ /dev/null @@ -1,8 +0,0 @@ -[package] -name = "dc-api-test-helpers" -version = "0.1.0" -edition = "2021" - -[dependencies] -dc-api-types = { path = "../dc-api-types" } -itertools = { workspace = true } diff --git a/crates/dc-api-test-helpers/src/aggregates.rs b/crates/dc-api-test-helpers/src/aggregates.rs deleted file mode 100644 index f880ea61..00000000 --- a/crates/dc-api-test-helpers/src/aggregates.rs +++ /dev/null @@ -1,36 +0,0 @@ -#[macro_export()] -macro_rules! column_aggregate { - ($name:literal => $column:literal, $function:literal : $typ:literal) => { - ( - $name.to_owned(), - dc_api_types::Aggregate::SingleColumn { - column: $column.to_owned(), - function: $function.to_owned(), - result_type: $typ.to_owned(), - }, - ) - }; -} - -#[macro_export()] -macro_rules! star_count_aggregate { - ($name:literal) => { - ( - $name.to_owned(), - dc_api_types::Aggregate::StarCount {}, - ) - }; -} - -#[macro_export()] -macro_rules! column_count_aggregate { - ($name:literal => $column:literal, distinct:$distinct:literal) => { - ( - $name.to_owned(), - dc_api_types::Aggregate::ColumnCount { - column: $column.to_owned(), - distinct: $distinct.to_owned(), - }, - ) - }; -} diff --git a/crates/dc-api-test-helpers/src/column_selector.rs b/crates/dc-api-test-helpers/src/column_selector.rs deleted file mode 100644 index 6c91764e..00000000 --- a/crates/dc-api-test-helpers/src/column_selector.rs +++ /dev/null @@ -1,17 +0,0 @@ -#[macro_export] -macro_rules! select { - ($name:literal) => { - dc_api_types::ColumnSelector::Column($name.to_owned()) - }; -} - -#[macro_export] -macro_rules! select_qualified { - ([$($path_element:literal $(,)?)+]) => { - dc_api_types::ColumnSelector::Path( - nonempty::nonempty![ - $($path_element.to_owned(),)+ - ] - ) - }; -} diff --git a/crates/dc-api-test-helpers/src/comparison_column.rs b/crates/dc-api-test-helpers/src/comparison_column.rs deleted file mode 100644 index c8a549af..00000000 --- a/crates/dc-api-test-helpers/src/comparison_column.rs +++ /dev/null @@ -1,28 +0,0 @@ -#[macro_export] -macro_rules! compare { - ($name:literal: $typ:literal) => { - dc_api_types::ComparisonColumn { - column_type: $typ.to_owned(), - name: dc_api_types::ColumnSelector::Column($name.to_owned()), - path: None, - } - }; - ($path:expr, $name:literal: $typ:literal) => { - dc_api_types::ComparisonColumn { - column_type: $typ.to_owned(), - name: dc_api_types::ColumnSelector::Column($name.to_owned()), - path: Some($path.into_iter().map(|v| v.to_string()).collect()), - } - }; -} - -#[macro_export] -macro_rules! compare_with_path { - ($path:expr, $name:literal: $typ:literal) => { - dc_api_types::ComparisonColumn { - column_type: $typ.to_owned(), - name: dc_api_types::ColumnSelector::Column($name.to_owned()), - path: Some($path.into_iter().map(|v| v.to_string()).collect()), - } - }; -} diff --git a/crates/dc-api-test-helpers/src/comparison_value.rs b/crates/dc-api-test-helpers/src/comparison_value.rs deleted file mode 100644 index 3e2fe1e4..00000000 --- a/crates/dc-api-test-helpers/src/comparison_value.rs +++ /dev/null @@ -1,18 +0,0 @@ -#[macro_export] -macro_rules! column_value { - ($($col:tt)+) => { - dc_api_types::ComparisonValue::AnotherColumnComparison { - column: $crate::compare!($($col)+), - } - }; -} - -#[macro_export] -macro_rules! value { - ($value:expr, $typ:literal) => { - dc_api_types::ComparisonValue::ScalarValueComparison { - value: $value, - value_type: $typ.to_owned(), - } - }; -} diff --git a/crates/dc-api-test-helpers/src/expression.rs b/crates/dc-api-test-helpers/src/expression.rs deleted file mode 100644 index 49917c11..00000000 --- a/crates/dc-api-test-helpers/src/expression.rs +++ /dev/null @@ -1,80 +0,0 @@ -use dc_api_types::{ - ArrayComparisonValue, BinaryArrayComparisonOperator, BinaryComparisonOperator, - ComparisonColumn, ComparisonValue, ExistsInTable, Expression, -}; - -pub fn and(operands: I) -> Expression -where - I: IntoIterator, -{ - Expression::And { - expressions: operands.into_iter().collect(), - } -} - -pub fn or(operands: I) -> Expression -where - I: IntoIterator, -{ - Expression::Or { - expressions: operands.into_iter().collect(), - } -} - -pub fn not(operand: Expression) -> Expression { - Expression::Not { - expression: Box::new(operand), - } -} - -pub fn equal(op1: ComparisonColumn, op2: ComparisonValue) -> Expression { - Expression::ApplyBinaryComparison { - column: op1, - operator: BinaryComparisonOperator::Equal, - value: op2, - } -} - -pub fn binop(oper: S, op1: ComparisonColumn, op2: ComparisonValue) -> Expression -where - S: ToString, -{ - Expression::ApplyBinaryComparison { - column: op1, - operator: BinaryComparisonOperator::CustomBinaryComparisonOperator(oper.to_string()), - value: op2, - } -} - -pub fn is_in(op1: ComparisonColumn, value_type: &str, values: I) -> Expression -where - I: IntoIterator, -{ - Expression::ApplyBinaryArrayComparison { - column: op1, - operator: BinaryArrayComparisonOperator::In, - value_type: value_type.to_owned(), - values: values.into_iter().collect(), - } -} - -pub fn exists(relationship: &str, predicate: Expression) -> Expression { - Expression::Exists { - in_table: ExistsInTable::RelatedTable { - relationship: relationship.to_owned(), - }, - r#where: Box::new(predicate), - } -} - -pub fn exists_unrelated( - table: impl IntoIterator, - predicate: Expression, -) -> Expression { - Expression::Exists { - in_table: ExistsInTable::UnrelatedTable { - table: table.into_iter().map(|v| v.to_string()).collect(), - }, - r#where: Box::new(predicate), - } -} diff --git a/crates/dc-api-test-helpers/src/field.rs b/crates/dc-api-test-helpers/src/field.rs deleted file mode 100644 index 548bc099..00000000 --- a/crates/dc-api-test-helpers/src/field.rs +++ /dev/null @@ -1,76 +0,0 @@ -#[macro_export()] -macro_rules! column { - ($name:literal : $typ:literal) => { - ( - $name.to_owned(), - dc_api_types::Field::Column { - column: $name.to_owned(), - column_type: $typ.to_owned(), - }, - ) - }; - ($name:literal => $column:literal : $typ:literal) => { - ( - $name.to_owned(), - dc_api_types::Field::Column { - column: $column.to_owned(), - column_type: $typ.to_owned(), - }, - ) - }; -} - -#[macro_export] -macro_rules! relation_field { - ($relationship:literal => $name:literal, $query:expr) => { - ( - $name.into(), - dc_api_types::Field::Relationship { - relationship: $relationship.to_owned(), - query: Box::new($query.into()), - }, - ) - }; -} - -#[macro_export()] -macro_rules! nested_object_field { - ($column:literal, $query:expr) => { - dc_api_types::Field::NestedObject { - column: $column.to_owned(), - query: Box::new($query.into()), - } - }; -} - -#[macro_export()] -macro_rules! nested_object { - ($name:literal => $column:literal, $query:expr) => { - ( - $name.to_owned(), - dc_api_test_helpers::nested_object_field!($column, $query), - ) - }; -} - -#[macro_export()] -macro_rules! nested_array_field { - ($field:expr) => { - dc_api_types::Field::NestedArray { - field: Box::new($field), - limit: None, - offset: None, - r#where: None, - } - }; -} - -#[macro_export()] -macro_rules! nested_array { - ($name:literal, $field:expr) => { - ( - $name.to_owned(), - dc_api_test_helpers::nested_array_field!($field), - ) - }; -} diff --git a/crates/dc-api-test-helpers/src/lib.rs b/crates/dc-api-test-helpers/src/lib.rs deleted file mode 100644 index e00cd7b6..00000000 --- a/crates/dc-api-test-helpers/src/lib.rs +++ /dev/null @@ -1,106 +0,0 @@ -//! Defining a DSL using builders cuts out SO MUCH noise from test cases -#![allow(unused_imports)] - -mod aggregates; -mod column_selector; -mod comparison_column; -mod comparison_value; -mod expression; -mod field; -mod query; -mod query_request; - -use dc_api_types::{ - ColumnMapping, ColumnSelector, Relationship, RelationshipType, TableRelationships, Target, -}; - -pub use column_selector::*; -pub use comparison_column::*; -pub use comparison_value::*; -pub use expression::*; -pub use field::*; -pub use query::*; -pub use query_request::*; - -#[derive(Clone, Debug)] -pub struct RelationshipBuilder { - pub column_mapping: ColumnMapping, - pub relationship_type: RelationshipType, - pub target: Target, -} - -pub fn relationship( - target: Target, - column_mapping: [(ColumnSelector, ColumnSelector); S], -) -> RelationshipBuilder { - RelationshipBuilder::new(target, column_mapping) -} - -impl RelationshipBuilder { - pub fn new( - target: Target, - column_mapping: [(ColumnSelector, ColumnSelector); S], - ) -> Self { - RelationshipBuilder { - column_mapping: ColumnMapping(column_mapping.into_iter().collect()), - relationship_type: RelationshipType::Array, - target, - } - } - - pub fn relationship_type(mut self, relationship_type: RelationshipType) -> Self { - self.relationship_type = relationship_type; - self - } - - pub fn object_type(mut self) -> Self { - self.relationship_type = RelationshipType::Object; - self - } -} - -impl From for Relationship { - fn from(value: RelationshipBuilder) -> Self { - Relationship { - column_mapping: value.column_mapping, - relationship_type: value.relationship_type, - target: value.target, - } - } -} - -pub fn source(name: &str) -> Vec { - vec![name.to_owned()] -} - -pub fn target(name: &str) -> Target { - Target::TTable { - name: vec![name.to_owned()], - arguments: Default::default(), - } -} - -#[allow(dead_code)] -pub fn selector_path(path_elements: [&str; S]) -> ColumnSelector { - ColumnSelector::Path( - path_elements - .into_iter() - .map(|e| e.to_owned()) - .collect::>() - .try_into() - .expect("column selector path cannot be empty"), - ) -} - -pub fn table_relationships( - source_table: Vec, - relationships: [(&str, impl Into); S], -) -> TableRelationships { - TableRelationships { - relationships: relationships - .into_iter() - .map(|(name, r)| (name.to_owned(), r.into())) - .collect(), - source_table, - } -} diff --git a/crates/dc-api-test-helpers/src/query.rs b/crates/dc-api-test-helpers/src/query.rs deleted file mode 100644 index 4d73dccd..00000000 --- a/crates/dc-api-test-helpers/src/query.rs +++ /dev/null @@ -1,60 +0,0 @@ -use std::collections::HashMap; - -use dc_api_types::{Aggregate, Expression, Field, OrderBy, Query}; - -#[derive(Clone, Debug, Default)] -pub struct QueryBuilder { - aggregates: Option>, - aggregates_limit: Option, - fields: Option>, - limit: Option, - offset: Option, - order_by: Option, - predicate: Option, -} - -pub fn query() -> QueryBuilder { - Default::default() -} - -impl QueryBuilder { - pub fn fields(mut self, fields: I) -> Self - where - I: IntoIterator, - { - self.fields = Some(fields.into_iter().collect()); - self - } - - pub fn aggregates(mut self, aggregates: I) -> Self - where - I: IntoIterator, - { - self.aggregates = Some(aggregates.into_iter().collect()); - self - } - - pub fn predicate(mut self, predicate: Expression) -> Self { - self.predicate = Some(predicate); - self - } - - pub fn order_by(mut self, order_by: OrderBy) -> Self { - self.order_by = Some(order_by); - self - } -} - -impl From for Query { - fn from(builder: QueryBuilder) -> Self { - Query { - aggregates: builder.aggregates, - aggregates_limit: builder.aggregates_limit, - fields: builder.fields, - limit: builder.limit, - offset: builder.offset, - order_by: builder.order_by, - r#where: builder.predicate, - } - } -} diff --git a/crates/dc-api-test-helpers/src/query_request.rs b/crates/dc-api-test-helpers/src/query_request.rs deleted file mode 100644 index 47437e5a..00000000 --- a/crates/dc-api-test-helpers/src/query_request.rs +++ /dev/null @@ -1,76 +0,0 @@ -use std::collections::HashMap; - -use dc_api_types::{ - Argument, Query, QueryRequest, ScalarValue, TableRelationships, Target, VariableSet, -}; - -#[derive(Clone, Debug, Default)] -pub struct QueryRequestBuilder { - foreach: Option>>, - query: Option, - target: Option, - relationships: Option>, - variables: Option>, -} - -pub fn query_request() -> QueryRequestBuilder { - Default::default() -} - -impl QueryRequestBuilder { - pub fn target(mut self, name: I) -> Self - where - I: IntoIterator, - S: ToString, - { - self.target = Some(Target::TTable { - name: name.into_iter().map(|v| v.to_string()).collect(), - arguments: Default::default(), - }); - self - } - - pub fn target_with_arguments(mut self, name: I, arguments: Args) -> Self - where - I: IntoIterator, - S: ToString, - Args: IntoIterator, - { - self.target = Some(Target::TTable { - name: name.into_iter().map(|v| v.to_string()).collect(), - arguments: arguments - .into_iter() - .map(|(name, arg)| (name.to_string(), arg)) - .collect(), - }); - self - } - - pub fn query(mut self, query: impl Into) -> Self { - self.query = Some(query.into()); - self - } - - pub fn relationships(mut self, relationships: impl Into>) -> Self { - self.relationships = Some(relationships.into()); - self - } -} - -impl From for QueryRequest { - fn from(builder: QueryRequestBuilder) -> Self { - QueryRequest { - foreach: builder.foreach.map(Some), - query: Box::new( - builder - .query - .expect("cannot build from a QueryRequestBuilder without a query"), - ), - target: builder - .target - .expect("cannot build from a QueryRequestBuilder without a target"), - relationships: builder.relationships.unwrap_or_default(), - variables: builder.variables, - } - } -} diff --git a/crates/dc-api-types/Cargo.toml b/crates/dc-api-types/Cargo.toml deleted file mode 100644 index a2b61b0e..00000000 --- a/crates/dc-api-types/Cargo.toml +++ /dev/null @@ -1,20 +0,0 @@ -[package] -name = "dc-api-types" -version = "0.1.0" -edition = "2021" - -# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html - -[dependencies] -itertools = { workspace = true } -nonempty = { version = "0.8.1", features = ["serialize"] } -once_cell = "1" -regex = "1" -serde = { version = "1", features = ["derive"] } -serde_json = { version = "1", features = ["preserve_order"] } -serde_with = "3" - -[dev-dependencies] -anyhow = "1" -mongodb = { workspace = true } -pretty_assertions = "1" diff --git a/crates/dc-api-types/src/aggregate.rs b/crates/dc-api-types/src/aggregate.rs deleted file mode 100644 index 066d72b0..00000000 --- a/crates/dc-api-types/src/aggregate.rs +++ /dev/null @@ -1,51 +0,0 @@ -/* - * - * - * No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) - * - * The version of the OpenAPI document: - * - * Generated by: https://openapi-generator.tech - */ - -use serde::{Deserialize, Serialize}; - -#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] -#[serde(tag = "type")] -pub enum Aggregate { - #[serde(rename = "column_count")] - ColumnCount { - /// The column to apply the count aggregate function to - #[serde(rename = "column")] - column: String, - /// Whether or not only distinct items should be counted - #[serde(rename = "distinct")] - distinct: bool, - }, - #[serde(rename = "single_column")] - SingleColumn { - /// The column to apply the aggregation function to - #[serde(rename = "column")] - column: String, - /// Single column aggregate function name. A valid GraphQL name - #[serde(rename = "function")] - function: String, - #[serde(rename = "result_type")] - result_type: String, - }, - #[serde(rename = "star_count")] - StarCount {}, -} - -/// -#[derive(Clone, Copy, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] -pub enum RHashType { - #[serde(rename = "star_count")] - StarCount, -} - -impl Default for RHashType { - fn default() -> RHashType { - Self::StarCount - } -} diff --git a/crates/dc-api-types/src/and_expression.rs b/crates/dc-api-types/src/and_expression.rs deleted file mode 100644 index df72c32e..00000000 --- a/crates/dc-api-types/src/and_expression.rs +++ /dev/null @@ -1,41 +0,0 @@ -/* - * - * - * No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) - * - * The version of the OpenAPI document: - * - * Generated by: https://openapi-generator.tech - */ - -use serde::{Deserialize, Serialize}; - -#[derive(Clone, Debug, PartialEq, Default, Serialize, Deserialize)] -pub struct AndExpression { - #[serde(rename = "expressions")] - pub expressions: Vec, - #[serde(rename = "type")] - pub r#type: RHashType, -} - -impl AndExpression { - pub fn new(expressions: Vec, r#type: RHashType) -> AndExpression { - AndExpression { - expressions, - r#type, - } - } -} - -/// -#[derive(Clone, Copy, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] -pub enum RHashType { - #[serde(rename = "and")] - And, -} - -impl Default for RHashType { - fn default() -> RHashType { - Self::And - } -} diff --git a/crates/dc-api-types/src/another_column_comparison.rs b/crates/dc-api-types/src/another_column_comparison.rs deleted file mode 100644 index 370bd5a2..00000000 --- a/crates/dc-api-types/src/another_column_comparison.rs +++ /dev/null @@ -1,41 +0,0 @@ -/* - * - * - * No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) - * - * The version of the OpenAPI document: - * - * Generated by: https://openapi-generator.tech - */ - -use serde::{Deserialize, Serialize}; - -#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] -pub struct AnotherColumnComparison { - #[serde(rename = "column")] - pub column: Box, - #[serde(rename = "type")] - pub r#type: RHashType, -} - -impl AnotherColumnComparison { - pub fn new(column: crate::ComparisonColumn, r#type: RHashType) -> AnotherColumnComparison { - AnotherColumnComparison { - column: Box::new(column), - r#type, - } - } -} - -/// -#[derive(Clone, Copy, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] -pub enum RHashType { - #[serde(rename = "column")] - Column, -} - -impl Default for RHashType { - fn default() -> RHashType { - Self::Column - } -} diff --git a/crates/dc-api-types/src/apply_binary_array_comparison_operator.rs b/crates/dc-api-types/src/apply_binary_array_comparison_operator.rs deleted file mode 100644 index bfb932e1..00000000 --- a/crates/dc-api-types/src/apply_binary_array_comparison_operator.rs +++ /dev/null @@ -1,101 +0,0 @@ -/* - * - * - * No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) - * - * The version of the OpenAPI document: - * - * Generated by: https://openapi-generator.tech - */ - -use serde::{Deserialize, Serialize}; - -#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] -pub struct ApplyBinaryArrayComparisonOperator { - #[serde(rename = "column")] - pub column: crate::ComparisonColumn, - #[serde(rename = "operator")] - pub operator: crate::BinaryArrayComparisonOperator, - #[serde(rename = "type")] - pub r#type: RHashType, - #[serde(rename = "value_type")] - pub value_type: String, - #[serde(rename = "values")] - pub values: Vec, -} - -impl ApplyBinaryArrayComparisonOperator { - pub fn new( - column: crate::ComparisonColumn, - operator: crate::BinaryArrayComparisonOperator, - r#type: RHashType, - value_type: String, - values: Vec, - ) -> ApplyBinaryArrayComparisonOperator { - ApplyBinaryArrayComparisonOperator { - column, - operator, - r#type, - value_type, - values, - } - } -} - -/// -#[derive(Clone, Copy, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] -pub enum RHashType { - #[serde(rename = "binary_arr_op")] - BinaryArrOp, -} - -impl Default for RHashType { - fn default() -> RHashType { - Self::BinaryArrOp - } -} - -#[cfg(test)] -mod test { - use mongodb::bson::{bson, from_bson}; - - use crate::comparison_column::ColumnSelector; - use crate::BinaryArrayComparisonOperator; - use crate::ComparisonColumn; - - use super::ApplyBinaryArrayComparisonOperator; - use super::RHashType; - - #[test] - fn parses_rhash_type() -> Result<(), anyhow::Error> { - let input = bson!("binary_arr_op"); - assert_eq!(from_bson::(input)?, RHashType::BinaryArrOp); - Ok(()) - } - - #[test] - fn parses_apply_binary_comparison_operator() -> Result<(), anyhow::Error> { - let input = bson!({ - "type": "binary_arr_op", - "column": {"column_type": "string", "name": "title"}, - "operator": "in", - "value_type": "string", - "values": ["One", "Two"] - }); - assert_eq!( - from_bson::(input)?, - ApplyBinaryArrayComparisonOperator { - r#type: RHashType::BinaryArrOp, - column: ComparisonColumn { - column_type: "string".to_owned(), - name: ColumnSelector::new("title".to_owned()), - path: None - }, - operator: BinaryArrayComparisonOperator::In, - value_type: "string".to_owned(), - values: vec!["One".into(), "Two".into()] - } - ); - Ok(()) - } -} diff --git a/crates/dc-api-types/src/apply_binary_comparison_operator.rs b/crates/dc-api-types/src/apply_binary_comparison_operator.rs deleted file mode 100644 index 96eccb5f..00000000 --- a/crates/dc-api-types/src/apply_binary_comparison_operator.rs +++ /dev/null @@ -1,99 +0,0 @@ -/* - * - * - * No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) - * - * The version of the OpenAPI document: - * - * Generated by: https://openapi-generator.tech - */ - -use serde::{Deserialize, Serialize}; - -#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] -pub struct ApplyBinaryComparisonOperator { - #[serde(rename = "column")] - pub column: crate::ComparisonColumn, - #[serde(rename = "operator")] - pub operator: crate::BinaryComparisonOperator, - #[serde(rename = "type")] - pub r#type: RHashType, - #[serde(rename = "value")] - pub value: crate::ComparisonValue, -} - -impl ApplyBinaryComparisonOperator { - pub fn new( - column: crate::ComparisonColumn, - operator: crate::BinaryComparisonOperator, - r#type: RHashType, - value: crate::ComparisonValue, - ) -> ApplyBinaryComparisonOperator { - ApplyBinaryComparisonOperator { - column, - operator, - r#type, - value, - } - } -} - -/// -#[derive(Clone, Copy, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] -pub enum RHashType { - #[serde(rename = "binary_op")] - BinaryOp, -} - -impl Default for RHashType { - fn default() -> RHashType { - Self::BinaryOp - } -} - -#[cfg(test)] -mod test { - use mongodb::bson::{bson, from_bson}; - - use crate::comparison_column::ColumnSelector; - use crate::BinaryComparisonOperator; - use crate::ComparisonColumn; - use crate::ComparisonValue; - - use super::ApplyBinaryComparisonOperator; - use super::RHashType; - - #[test] - fn parses_rhash_type() -> Result<(), anyhow::Error> { - let input = bson!("binary_op"); - assert_eq!(from_bson::(input)?, RHashType::BinaryOp); - Ok(()) - } - - #[test] - fn parses_apply_binary_comparison_operator() -> Result<(), anyhow::Error> { - let input = bson!({ - "type": "binary_op", - "column": {"column_type": "string", "name": "title"}, - "operator": "equal", - "value": {"type": "scalar", "value": "One", "value_type": "string"} - }); - assert_eq!( - from_bson::(input)?, - ApplyBinaryComparisonOperator { - r#type: RHashType::BinaryOp, - column: ComparisonColumn { - column_type: "string".to_owned(), - name: ColumnSelector::new("title".to_owned()), - path: None - }, - operator: BinaryComparisonOperator::Equal, - value: ComparisonValue::ScalarValueComparison { - value: serde_json::json!("One"), - value_type: "string".to_owned() - } - } - ); - Ok(()) - } -} diff --git a/crates/dc-api-types/src/apply_unary_comparison_operator.rs b/crates/dc-api-types/src/apply_unary_comparison_operator.rs deleted file mode 100644 index 08f6c982..00000000 --- a/crates/dc-api-types/src/apply_unary_comparison_operator.rs +++ /dev/null @@ -1,85 +0,0 @@ -/* - * - * - * No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) - * - * The version of the OpenAPI document: - * - * Generated by: https://openapi-generator.tech - */ - -use serde::{Deserialize, Serialize}; - -#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] -pub struct ApplyUnaryComparisonOperator { - #[serde(rename = "column")] - pub column: crate::ComparisonColumn, - #[serde(rename = "operator")] - pub operator: crate::UnaryComparisonOperator, - #[serde(rename = "type")] - pub r#type: RHashType, -} - -impl ApplyUnaryComparisonOperator { - pub fn new( - column: crate::ComparisonColumn, - operator: crate::UnaryComparisonOperator, - r#type: RHashType, - ) -> ApplyUnaryComparisonOperator { - ApplyUnaryComparisonOperator { - column, - operator, - r#type, - } - } -} - -/// -#[derive(Clone, Copy, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] -pub enum RHashType { - #[serde(rename = "unary_op")] - UnaryOp, -} - -impl Default for RHashType { - fn default() -> RHashType { - Self::UnaryOp - } -} - -#[cfg(test)] -mod test { - use mongodb::bson::{bson, from_bson}; - - use crate::comparison_column::ColumnSelector; - use crate::ComparisonColumn; - use crate::UnaryComparisonOperator; - - use super::ApplyUnaryComparisonOperator; - use super::RHashType; - - #[test] - fn parses_rhash_type() -> Result<(), anyhow::Error> { - let input = bson!("unary_op"); - assert_eq!(from_bson::(input)?, RHashType::UnaryOp); - Ok(()) - } - - #[test] - fn parses_apply_unary_comparison_operator() -> Result<(), anyhow::Error> { - let input = bson!({"column": bson!({"column_type": "foo", "name": "_id"}), "operator": "is_null", "type": "unary_op"}); - assert_eq!( - from_bson::(input)?, - ApplyUnaryComparisonOperator { - column: ComparisonColumn { - column_type: "foo".to_owned(), - name: ColumnSelector::new("_id".to_owned()), - path: None - }, - operator: UnaryComparisonOperator::IsNull, - r#type: RHashType::UnaryOp - } - ); - Ok(()) - } -} diff --git a/crates/dc-api-types/src/array_comparison_value.rs b/crates/dc-api-types/src/array_comparison_value.rs deleted file mode 100644 index 1417f4c9..00000000 --- a/crates/dc-api-types/src/array_comparison_value.rs +++ /dev/null @@ -1,20 +0,0 @@ -use serde::{Deserialize, Serialize}; - -use crate::ComparisonColumn; - -/// Types for values in the `values` field of `ApplyBinaryArrayComparison`. The v2 DC API -/// interprets all such values as scalars, so we want to parse whatever is given as -/// a serde_json::Value. But the v3 NDC API allows column references or variable references here. -/// So this enum is present to support queries translated from the v3 API. -/// -/// For compatibility with the v2 API the enum is designed so that it will always deserialize to -/// the Scalar variant, and other variants will fail to serialize. -#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] -#[serde(untagged)] -pub enum ArrayComparisonValue { - Scalar(serde_json::Value), - #[serde(skip)] - Column(ComparisonColumn), - #[serde(skip)] - Variable(String), -} diff --git a/crates/dc-api-types/src/array_relation_insert_schema.rs b/crates/dc-api-types/src/array_relation_insert_schema.rs deleted file mode 100644 index d56bcebf..00000000 --- a/crates/dc-api-types/src/array_relation_insert_schema.rs +++ /dev/null @@ -1,42 +0,0 @@ -/* - * - * - * No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) - * - * The version of the OpenAPI document: - * - * Generated by: https://openapi-generator.tech - */ - -use serde::{Deserialize, Serialize}; - -#[derive(Clone, Debug, PartialEq, Default, Serialize, Deserialize)] -pub struct ArrayRelationInsertSchema { - /// The name of the array relationship over which the related rows must be inserted - #[serde(rename = "relationship")] - pub relationship: String, - #[serde(rename = "type")] - pub r#type: RHashType, -} - -impl ArrayRelationInsertSchema { - pub fn new(relationship: String, r#type: RHashType) -> ArrayRelationInsertSchema { - ArrayRelationInsertSchema { - relationship, - r#type, - } - } -} - -/// -#[derive(Clone, Copy, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] -pub enum RHashType { - #[serde(rename = "array_relation")] - ArrayRelation, -} - -impl Default for RHashType { - fn default() -> RHashType { - Self::ArrayRelation - } -} diff --git a/crates/dc-api-types/src/atomicity_support_level.rs b/crates/dc-api-types/src/atomicity_support_level.rs deleted file mode 100644 index 23ebffc8..00000000 --- a/crates/dc-api-types/src/atomicity_support_level.rs +++ /dev/null @@ -1,43 +0,0 @@ -/* - * - * - * No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) - * - * The version of the OpenAPI document: - * - * Generated by: https://openapi-generator.tech - */ - -use serde::{Deserialize, Serialize}; - -/// AtomicitySupportLevel : Describes the level of transactional atomicity the agent supports for mutation operations. 'row': If multiple rows are affected in a single operation but one fails, only the failed row's changes will be reverted 'single_operation': If multiple rows are affected in a single operation but one fails, all affected rows in the operation will be reverted 'homogeneous_operations': If multiple operations of only the same type exist in the one mutation request, a failure in one will result in all changes being reverted 'heterogeneous_operations': If multiple operations of any type exist in the one mutation request, a failure in one will result in all changes being reverted - -/// Describes the level of transactional atomicity the agent supports for mutation operations. 'row': If multiple rows are affected in a single operation but one fails, only the failed row's changes will be reverted 'single_operation': If multiple rows are affected in a single operation but one fails, all affected rows in the operation will be reverted 'homogeneous_operations': If multiple operations of only the same type exist in the one mutation request, a failure in one will result in all changes being reverted 'heterogeneous_operations': If multiple operations of any type exist in the one mutation request, a failure in one will result in all changes being reverted -#[derive(Clone, Copy, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] -pub enum AtomicitySupportLevel { - #[serde(rename = "row")] - Row, - #[serde(rename = "single_operation")] - SingleOperation, - #[serde(rename = "homogeneous_operations")] - HomogeneousOperations, - #[serde(rename = "heterogeneous_operations")] - HeterogeneousOperations, -} - -impl ToString for AtomicitySupportLevel { - fn to_string(&self) -> String { - match self { - Self::Row => String::from("row"), - Self::SingleOperation => String::from("single_operation"), - Self::HomogeneousOperations => String::from("homogeneous_operations"), - Self::HeterogeneousOperations => String::from("heterogeneous_operations"), - } - } -} - -impl Default for AtomicitySupportLevel { - fn default() -> AtomicitySupportLevel { - Self::Row - } -} diff --git a/crates/dc-api-types/src/auto_increment_generation_strategy.rs b/crates/dc-api-types/src/auto_increment_generation_strategy.rs deleted file mode 100644 index 3caa81cc..00000000 --- a/crates/dc-api-types/src/auto_increment_generation_strategy.rs +++ /dev/null @@ -1,36 +0,0 @@ -/* - * - * - * No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) - * - * The version of the OpenAPI document: - * - * Generated by: https://openapi-generator.tech - */ - -use serde::{Deserialize, Serialize}; - -#[derive(Clone, Debug, PartialEq, Default, Serialize, Deserialize)] -pub struct AutoIncrementGenerationStrategy { - #[serde(rename = "type")] - pub r#type: RHashType, -} - -impl AutoIncrementGenerationStrategy { - pub fn new(r#type: RHashType) -> AutoIncrementGenerationStrategy { - AutoIncrementGenerationStrategy { r#type } - } -} - -/// -#[derive(Clone, Copy, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] -pub enum RHashType { - #[serde(rename = "auto_increment")] - AutoIncrement, -} - -impl Default for RHashType { - fn default() -> RHashType { - Self::AutoIncrement - } -} diff --git a/crates/dc-api-types/src/binary_array_comparison_operator.rs b/crates/dc-api-types/src/binary_array_comparison_operator.rs deleted file mode 100644 index e1250eb9..00000000 --- a/crates/dc-api-types/src/binary_array_comparison_operator.rs +++ /dev/null @@ -1,87 +0,0 @@ -/* - * - * - * No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) - * - * The version of the OpenAPI document: - * - * Generated by: https://openapi-generator.tech - */ - -use serde::{de, Deserialize, Serialize}; - -#[derive(Clone, Debug, PartialEq, Deserialize)] -#[serde(untagged)] -pub enum BinaryArrayComparisonOperator { - #[serde(deserialize_with = "parse_in")] - In, - CustomBinaryComparisonOperator(String), -} - -impl Serialize for BinaryArrayComparisonOperator { - fn serialize(&self, serializer: S) -> Result - where - S: serde::Serializer, - { - match self { - BinaryArrayComparisonOperator::In => serializer.serialize_str("in"), - BinaryArrayComparisonOperator::CustomBinaryComparisonOperator(s) => { - serializer.serialize_str(s) - } - } - } -} - -fn parse_in<'de, D>(deserializer: D) -> Result<(), D::Error> -where - D: de::Deserializer<'de>, -{ - let s = String::deserialize(deserializer)?; - if s == "in" { - Ok(()) - } else { - Err(de::Error::custom("invalid value")) - } -} - -#[cfg(test)] -mod test { - use mongodb::bson::{bson, from_bson, to_bson}; - - use super::BinaryArrayComparisonOperator; - - #[test] - fn serialize_is_null() -> Result<(), anyhow::Error> { - let input = BinaryArrayComparisonOperator::In; - assert_eq!(to_bson(&input)?, bson!("in")); - Ok(()) - } - - #[test] - fn serialize_custom_unary_comparison_operator() -> Result<(), anyhow::Error> { - let input = - BinaryArrayComparisonOperator::CustomBinaryComparisonOperator("tensor".to_owned()); - assert_eq!(to_bson(&input)?, bson!("tensor")); - Ok(()) - } - - #[test] - fn parses_in() -> Result<(), anyhow::Error> { - let input = bson!("in"); - assert_eq!( - from_bson::(input)?, - BinaryArrayComparisonOperator::In - ); - Ok(()) - } - - #[test] - fn parses_custom_operator() -> Result<(), anyhow::Error> { - let input = bson!("sum"); - assert_eq!( - from_bson::(input)?, - BinaryArrayComparisonOperator::CustomBinaryComparisonOperator("sum".to_owned()) - ); - Ok(()) - } -} diff --git a/crates/dc-api-types/src/binary_comparison_operator.rs b/crates/dc-api-types/src/binary_comparison_operator.rs deleted file mode 100644 index ab27609e..00000000 --- a/crates/dc-api-types/src/binary_comparison_operator.rs +++ /dev/null @@ -1,209 +0,0 @@ -/* - * - * - * No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) - * - * The version of the OpenAPI document: - * - * Generated by: https://openapi-generator.tech - */ - -use serde::{de, Deserialize, Serialize}; - -#[derive(Clone, Debug, PartialEq, Deserialize)] -#[serde(untagged)] -pub enum BinaryComparisonOperator { - #[serde(deserialize_with = "parse_less_than")] - LessThan, - #[serde(deserialize_with = "parse_less_than_or_equal")] - LessThanOrEqual, - #[serde(deserialize_with = "parse_greater_than")] - GreaterThan, - #[serde(deserialize_with = "parse_greater_than_or_equal")] - GreaterThanOrEqual, - #[serde(deserialize_with = "parse_equal")] - Equal, - CustomBinaryComparisonOperator(String), -} - -impl Serialize for BinaryComparisonOperator { - fn serialize(&self, serializer: S) -> Result - where - S: serde::Serializer, - { - match self { - BinaryComparisonOperator::LessThan => serializer.serialize_str("less_than"), - BinaryComparisonOperator::LessThanOrEqual => { - serializer.serialize_str("less_than_or_equal") - } - BinaryComparisonOperator::GreaterThan => serializer.serialize_str("greater_than"), - BinaryComparisonOperator::GreaterThanOrEqual => { - serializer.serialize_str("greater_than_or_equal") - } - BinaryComparisonOperator::Equal => serializer.serialize_str("equal"), - BinaryComparisonOperator::CustomBinaryComparisonOperator(s) => { - serializer.serialize_str(s) - } - } - } -} - -fn parse_less_than<'de, D>(deserializer: D) -> Result<(), D::Error> -where - D: de::Deserializer<'de>, -{ - let s = String::deserialize(deserializer)?; - string_p::<'de, D>(s, "less_than".to_owned()) -} - -fn parse_less_than_or_equal<'de, D>(deserializer: D) -> Result<(), D::Error> -where - D: de::Deserializer<'de>, -{ - let s = String::deserialize(deserializer)?; - string_p::<'de, D>(s, "less_than_or_equal".to_owned()) -} - -fn parse_greater_than<'de, D>(deserializer: D) -> Result<(), D::Error> -where - D: de::Deserializer<'de>, -{ - let s = String::deserialize(deserializer)?; - string_p::<'de, D>(s, "greater_than".to_owned()) -} - -fn parse_greater_than_or_equal<'de, D>(deserializer: D) -> Result<(), D::Error> -where - D: de::Deserializer<'de>, -{ - let s = String::deserialize(deserializer)?; - string_p::<'de, D>(s, "greater_than_or_equal".to_owned()) -} - -fn parse_equal<'de, D>(deserializer: D) -> Result<(), D::Error> -where - D: de::Deserializer<'de>, -{ - let s = String::deserialize(deserializer)?; - string_p::<'de, D>(s, "equal".to_owned()) -} - -fn string_p<'de, D>(expected: String, input: String) -> Result<(), D::Error> -where - D: de::Deserializer<'de>, -{ - if input == expected { - Ok(()) - } else { - Err(de::Error::custom("invalid value")) - } -} - -#[cfg(test)] -mod test { - use mongodb::bson::{bson, from_bson, to_bson}; - - use super::BinaryComparisonOperator; - - #[test] - fn serialize_less_than() -> Result<(), anyhow::Error> { - let input = BinaryComparisonOperator::LessThan; - assert_eq!(to_bson(&input)?, bson!("less_than")); - Ok(()) - } - - #[test] - fn serialize_less_than_or_equal() -> Result<(), anyhow::Error> { - let input = BinaryComparisonOperator::LessThanOrEqual; - assert_eq!(to_bson(&input)?, bson!("less_than_or_equal")); - Ok(()) - } - - #[test] - fn serialize_greater_than() -> Result<(), anyhow::Error> { - let input = BinaryComparisonOperator::GreaterThan; - assert_eq!(to_bson(&input)?, bson!("greater_than")); - Ok(()) - } - - #[test] - fn serialize_greater_than_or_equal() -> Result<(), anyhow::Error> { - let input = BinaryComparisonOperator::GreaterThanOrEqual; - assert_eq!(to_bson(&input)?, bson!("greater_than_or_equal")); - Ok(()) - } - - #[test] - fn serialize_equal() -> Result<(), anyhow::Error> { - let input = BinaryComparisonOperator::Equal; - assert_eq!(to_bson(&input)?, bson!("equal")); - Ok(()) - } - - #[test] - fn serialize_custom_binary_comparison_operator() -> Result<(), anyhow::Error> { - let input = BinaryComparisonOperator::CustomBinaryComparisonOperator("tensor".to_owned()); - assert_eq!(to_bson(&input)?, bson!("tensor")); - Ok(()) - } - - #[test] - fn parses_less_than() -> Result<(), anyhow::Error> { - let input = bson!("less_than"); - assert_eq!( - from_bson::(input)?, - BinaryComparisonOperator::LessThan - ); - Ok(()) - } - - #[test] - fn parses_less_than_or_equal() -> Result<(), anyhow::Error> { - let input = bson!("less_than_or_equal"); - assert_eq!( - from_bson::(input)?, - BinaryComparisonOperator::LessThanOrEqual - ); - Ok(()) - } - - #[test] - fn parses_greater_than() -> Result<(), anyhow::Error> { - let input = bson!("greater_than"); - assert_eq!( - from_bson::(input)?, - BinaryComparisonOperator::GreaterThan - ); - Ok(()) - } - - #[test] - fn parses_greater_than_or_equal() -> Result<(), anyhow::Error> { - let input = bson!("greater_than_or_equal"); - assert_eq!( - from_bson::(input)?, - BinaryComparisonOperator::GreaterThanOrEqual - ); - Ok(()) - } - - #[test] - fn parses_equal() -> Result<(), anyhow::Error> { - let input = bson!("equal"); - assert_eq!( - from_bson::(input)?, - BinaryComparisonOperator::Equal - ); - Ok(()) - } - - #[test] - fn parses_custom_operator() -> Result<(), anyhow::Error> { - let input = bson!("tensor"); - assert_eq!( - from_bson::(input)?, - BinaryComparisonOperator::CustomBinaryComparisonOperator("tensor".to_owned()) - ); - Ok(()) - } -} diff --git a/crates/dc-api-types/src/capabilities.rs b/crates/dc-api-types/src/capabilities.rs deleted file mode 100644 index 90d22870..00000000 --- a/crates/dc-api-types/src/capabilities.rs +++ /dev/null @@ -1,97 +0,0 @@ -/* - * - * - * No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) - * - * The version of the OpenAPI document: - * - * Generated by: https://openapi-generator.tech - */ - -use serde::{Deserialize, Serialize}; - -#[derive(Clone, Debug, PartialEq, Default, Serialize, Deserialize)] -pub struct Capabilities { - #[serde(rename = "comparisons", skip_serializing_if = "Option::is_none")] - pub comparisons: Option>, - #[serde(rename = "data_schema", skip_serializing_if = "Option::is_none")] - pub data_schema: Option>, - #[serde( - rename = "datasets", - default, - with = "::serde_with::rust::double_option", - skip_serializing_if = "Option::is_none" - )] - pub datasets: Option>, - #[serde( - rename = "explain", - default, - with = "::serde_with::rust::double_option", - skip_serializing_if = "Option::is_none" - )] - pub explain: Option>, - #[serde( - rename = "licensing", - default, - with = "::serde_with::rust::double_option", - skip_serializing_if = "Option::is_none" - )] - pub licensing: Option>, - #[serde( - rename = "metrics", - default, - with = "::serde_with::rust::double_option", - skip_serializing_if = "Option::is_none" - )] - pub metrics: Option>, - #[serde(rename = "mutations", skip_serializing_if = "Option::is_none")] - pub mutations: Option>, - #[serde(rename = "post_schema", skip_serializing_if = "Option::is_none")] - pub post_schema: Option>, - #[serde(rename = "queries", skip_serializing_if = "Option::is_none")] - pub queries: Option>, - #[serde( - rename = "raw", - default, - with = "::serde_with::rust::double_option", - skip_serializing_if = "Option::is_none" - )] - pub raw: Option>, - #[serde( - rename = "relationships", - default, - with = "::serde_with::rust::double_option", - skip_serializing_if = "Option::is_none" - )] - pub relationships: Option>, - /// A map from scalar type names to their capabilities. Keys must be valid GraphQL names and must be defined as scalar types in the `graphql_schema` - #[serde(rename = "scalar_types", skip_serializing_if = "Option::is_none")] - pub scalar_types: Option<::std::collections::HashMap>, - #[serde( - rename = "subscriptions", - default, - with = "::serde_with::rust::double_option", - skip_serializing_if = "Option::is_none" - )] - pub subscriptions: Option>, -} - -impl Capabilities { - pub fn new() -> Capabilities { - Capabilities { - comparisons: None, - data_schema: None, - datasets: None, - explain: None, - licensing: None, - metrics: None, - mutations: None, - post_schema: None, - queries: None, - raw: None, - relationships: None, - scalar_types: None, - subscriptions: None, - } - } -} diff --git a/crates/dc-api-types/src/capabilities_response.rs b/crates/dc-api-types/src/capabilities_response.rs deleted file mode 100644 index abd4bebc..00000000 --- a/crates/dc-api-types/src/capabilities_response.rs +++ /dev/null @@ -1,37 +0,0 @@ -/* - * - * - * No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) - * - * The version of the OpenAPI document: - * - * Generated by: https://openapi-generator.tech - */ - -use serde::{Deserialize, Serialize}; - -#[derive(Clone, Debug, PartialEq, Default, Serialize, Deserialize)] -pub struct CapabilitiesResponse { - #[serde(rename = "capabilities")] - pub capabilities: Box, - #[serde(rename = "config_schemas")] - pub config_schemas: Box, - #[serde(rename = "display_name", skip_serializing_if = "Option::is_none")] - pub display_name: Option, - #[serde(rename = "release_name", skip_serializing_if = "Option::is_none")] - pub release_name: Option, -} - -impl CapabilitiesResponse { - pub fn new( - capabilities: crate::Capabilities, - config_schemas: crate::ConfigSchemaResponse, - ) -> CapabilitiesResponse { - CapabilitiesResponse { - capabilities: Box::new(capabilities), - config_schemas: Box::new(config_schemas), - display_name: None, - release_name: None, - } - } -} diff --git a/crates/dc-api-types/src/column_count_aggregate.rs b/crates/dc-api-types/src/column_count_aggregate.rs deleted file mode 100644 index 3eae4fd7..00000000 --- a/crates/dc-api-types/src/column_count_aggregate.rs +++ /dev/null @@ -1,46 +0,0 @@ -/* - * - * - * No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) - * - * The version of the OpenAPI document: - * - * Generated by: https://openapi-generator.tech - */ - -use serde::{Deserialize, Serialize}; - -#[derive(Clone, Debug, PartialEq, Default, Serialize, Deserialize)] -pub struct ColumnCountAggregate { - /// The column to apply the count aggregate function to - #[serde(rename = "column")] - pub column: String, - /// Whether or not only distinct items should be counted - #[serde(rename = "distinct")] - pub distinct: bool, - #[serde(rename = "type")] - pub r#type: RHashType, -} - -impl ColumnCountAggregate { - pub fn new(column: String, distinct: bool, r#type: RHashType) -> ColumnCountAggregate { - ColumnCountAggregate { - column, - distinct, - r#type, - } - } -} - -/// -#[derive(Clone, Copy, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] -pub enum RHashType { - #[serde(rename = "column_count")] - ColumnCount, -} - -impl Default for RHashType { - fn default() -> RHashType { - Self::ColumnCount - } -} diff --git a/crates/dc-api-types/src/column_field.rs b/crates/dc-api-types/src/column_field.rs deleted file mode 100644 index 00e92815..00000000 --- a/crates/dc-api-types/src/column_field.rs +++ /dev/null @@ -1,44 +0,0 @@ -/* - * - * - * No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) - * - * The version of the OpenAPI document: - * - * Generated by: https://openapi-generator.tech - */ - -use serde::{Deserialize, Serialize}; - -#[derive(Clone, Debug, PartialEq, Default, Serialize, Deserialize)] -pub struct ColumnField { - #[serde(rename = "column")] - pub column: String, - #[serde(rename = "column_type")] - pub column_type: String, - #[serde(rename = "type")] - pub r#type: RHashType, -} - -impl ColumnField { - pub fn new(column: String, column_type: String, r#type: RHashType) -> ColumnField { - ColumnField { - column, - column_type, - r#type, - } - } -} - -/// -#[derive(Clone, Copy, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] -pub enum RHashType { - #[serde(rename = "column")] - Column, -} - -impl Default for RHashType { - fn default() -> RHashType { - Self::Column - } -} diff --git a/crates/dc-api-types/src/column_info.rs b/crates/dc-api-types/src/column_info.rs deleted file mode 100644 index 443415e4..00000000 --- a/crates/dc-api-types/src/column_info.rs +++ /dev/null @@ -1,55 +0,0 @@ -/* - * - * - * No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) - * - * The version of the OpenAPI document: - * - * Generated by: https://openapi-generator.tech - */ - -use serde::{Deserialize, Serialize}; - -use super::ColumnType; - -#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] -pub struct ColumnInfo { - /// Column description - #[serde( - rename = "description", - default, - with = "::serde_with::rust::double_option", - skip_serializing_if = "Option::is_none" - )] - pub description: Option>, - /// Whether or not the column can be inserted into - #[serde(rename = "insertable", skip_serializing_if = "Option::is_none")] - pub insertable: Option, - /// Column name - #[serde(rename = "name")] - pub name: String, - /// Is column nullable - #[serde(rename = "nullable")] - pub nullable: bool, - #[serde(rename = "type")] - pub r#type: crate::ColumnType, - /// Whether or not the column can be updated - #[serde(rename = "updatable", skip_serializing_if = "Option::is_none")] - pub updatable: Option, - #[serde(rename = "value_generated", skip_serializing_if = "Option::is_none")] - pub value_generated: Option>, -} - -impl ColumnInfo { - pub fn new(name: String, nullable: bool, r#type: ColumnType) -> ColumnInfo { - ColumnInfo { - description: None, - insertable: None, - name, - nullable, - r#type, - updatable: None, - value_generated: None, - } - } -} diff --git a/crates/dc-api-types/src/column_insert_schema.rs b/crates/dc-api-types/src/column_insert_schema.rs deleted file mode 100644 index 735b6742..00000000 --- a/crates/dc-api-types/src/column_insert_schema.rs +++ /dev/null @@ -1,57 +0,0 @@ -/* - * - * - * No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) - * - * The version of the OpenAPI document: - * - * Generated by: https://openapi-generator.tech - */ - -use serde::{Deserialize, Serialize}; - -#[derive(Clone, Debug, PartialEq, Default, Serialize, Deserialize)] -pub struct ColumnInsertSchema { - /// The name of the column that this field should be inserted into - #[serde(rename = "column")] - pub column: String, - #[serde(rename = "column_type")] - pub column_type: String, - /// Is the column nullable - #[serde(rename = "nullable")] - pub nullable: bool, - #[serde(rename = "type")] - pub r#type: RHashType, - #[serde(rename = "value_generated", skip_serializing_if = "Option::is_none")] - pub value_generated: Option>, -} - -impl ColumnInsertSchema { - pub fn new( - column: String, - column_type: String, - nullable: bool, - r#type: RHashType, - ) -> ColumnInsertSchema { - ColumnInsertSchema { - column, - column_type, - nullable, - r#type, - value_generated: None, - } - } -} - -/// -#[derive(Clone, Copy, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] -pub enum RHashType { - #[serde(rename = "column")] - Column, -} - -impl Default for RHashType { - fn default() -> RHashType { - Self::Column - } -} diff --git a/crates/dc-api-types/src/column_nullability.rs b/crates/dc-api-types/src/column_nullability.rs deleted file mode 100644 index 80bcbe14..00000000 --- a/crates/dc-api-types/src/column_nullability.rs +++ /dev/null @@ -1,35 +0,0 @@ -/* - * - * - * No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) - * - * The version of the OpenAPI document: - * - * Generated by: https://openapi-generator.tech - */ - -use serde::{Deserialize, Serialize}; - -/// -#[derive(Clone, Copy, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] -pub enum ColumnNullability { - #[serde(rename = "only_nullable")] - OnlyNullable, - #[serde(rename = "nullable_and_non_nullable")] - NullableAndNonNullable, -} - -impl ToString for ColumnNullability { - fn to_string(&self) -> String { - match self { - Self::OnlyNullable => String::from("only_nullable"), - Self::NullableAndNonNullable => String::from("nullable_and_non_nullable"), - } - } -} - -impl Default for ColumnNullability { - fn default() -> ColumnNullability { - Self::OnlyNullable - } -} diff --git a/crates/dc-api-types/src/column_type.rs b/crates/dc-api-types/src/column_type.rs deleted file mode 100644 index cc7b011a..00000000 --- a/crates/dc-api-types/src/column_type.rs +++ /dev/null @@ -1,140 +0,0 @@ -use serde::{de, ser::SerializeMap, Deserialize, Serialize}; - -use crate::{GraphQLName, GqlName}; - -#[derive(Clone, Debug, PartialEq, Deserialize)] -#[serde(untagged)] -pub enum ColumnType { - Scalar(String), - #[serde(deserialize_with = "parse_object")] - Object(GraphQLName), - Array { - element_type: Box, - nullable: bool, - }, -} - -impl Serialize for ColumnType { - fn serialize(&self, serializer: S) -> Result - where - S: serde::Serializer, - { - match self { - ColumnType::Scalar(s) => serializer.serialize_str(s), - ColumnType::Object(s) => { - let mut map = serializer.serialize_map(Some(2))?; - map.serialize_entry("type", "object")?; - map.serialize_entry("name", s)?; - map.end() - } - ColumnType::Array { - element_type, - nullable, - } => { - let mut map = serializer.serialize_map(Some(3))?; - map.serialize_entry("type", "array")?; - map.serialize_entry("element_type", element_type)?; - map.serialize_entry("nullable", nullable)?; - map.end() - } - } - } -} - -fn parse_object<'de, D>(deserializer: D) -> Result -where - D: de::Deserializer<'de>, -{ - let v = serde_json::Value::deserialize(deserializer)?; - let obj = v.as_object().and_then(|o| o.get("name")); - - match obj { - Some(name) => match name.as_str() { - Some(s) => Ok(GqlName::from_trusted_safe_str(s).into_owned()), - None => Err(de::Error::custom("invalid value")), - }, - _ => Err(de::Error::custom("invalid value")), - } -} - -#[cfg(test)] -mod test { - use mongodb::bson::{bson, from_bson, to_bson}; - - use super::ColumnType; - - #[test] - fn serialize_scalar() -> Result<(), anyhow::Error> { - let input = ColumnType::Scalar("string".to_owned()); - assert_eq!(to_bson(&input)?, bson!("string".to_owned())); - Ok(()) - } - - #[test] - fn serialize_object() -> Result<(), anyhow::Error> { - let input = ColumnType::Object("documents_place".into()); - assert_eq!( - to_bson(&input)?, - bson!({"type": "object".to_owned(), "name": "documents_place".to_owned()}) - ); - Ok(()) - } - - #[test] - fn serialize_array() -> Result<(), anyhow::Error> { - let input = ColumnType::Array { - element_type: Box::new(ColumnType::Scalar("string".to_owned())), - nullable: false, - }; - assert_eq!( - to_bson(&input)?, - bson!( - { - "type": "array".to_owned(), - "element_type": "string".to_owned(), - "nullable": false - } - ) - ); - Ok(()) - } - - #[test] - fn parses_scalar() -> Result<(), anyhow::Error> { - let input = bson!("string".to_owned()); - assert_eq!( - from_bson::(input)?, - ColumnType::Scalar("string".to_owned()) - ); - Ok(()) - } - - #[test] - fn parses_object() -> Result<(), anyhow::Error> { - let input = bson!({"type": "object".to_owned(), "name": "documents_place".to_owned()}); - assert_eq!( - from_bson::(input)?, - ColumnType::Object("documents_place".into()) - ); - Ok(()) - } - - #[test] - fn parses_array() -> Result<(), anyhow::Error> { - let input = bson!( - { - "type": "array".to_owned(), - "element_type": "string".to_owned(), - "nullable": false - } - ); - assert_eq!( - from_bson::(input)?, - ColumnType::Array { - element_type: Box::new(ColumnType::Scalar("string".to_owned())), - nullable: false, - } - ); - Ok(()) - } -} diff --git a/crates/dc-api-types/src/column_value_generation_strategy.rs b/crates/dc-api-types/src/column_value_generation_strategy.rs deleted file mode 100644 index e7dc79db..00000000 --- a/crates/dc-api-types/src/column_value_generation_strategy.rs +++ /dev/null @@ -1,35 +0,0 @@ -/* - * - * - * No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) - * - * The version of the OpenAPI document: - * - * Generated by: https://openapi-generator.tech - */ - -use serde::{Deserialize, Serialize}; - -#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] -#[serde(tag = "type")] -pub enum ColumnValueGenerationStrategy { - #[serde(rename = "auto_increment")] - AutoIncrement {}, - #[serde(rename = "default_value")] - DefaultValue {}, - #[serde(rename = "unique_identifier")] - UniqueIdentifier {}, -} - -/// -#[derive(Clone, Copy, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] -pub enum RHashType { - #[serde(rename = "unique_identifier")] - UniqueIdentifier, -} - -impl Default for RHashType { - fn default() -> RHashType { - Self::UniqueIdentifier - } -} diff --git a/crates/dc-api-types/src/comparison_capabilities.rs b/crates/dc-api-types/src/comparison_capabilities.rs deleted file mode 100644 index d42c1d74..00000000 --- a/crates/dc-api-types/src/comparison_capabilities.rs +++ /dev/null @@ -1,28 +0,0 @@ -/* - * - * - * No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) - * - * The version of the OpenAPI document: - * - * Generated by: https://openapi-generator.tech - */ - -use serde::{Deserialize, Serialize}; - -#[derive(Clone, Debug, PartialEq, Default, Serialize, Deserialize)] -pub struct ComparisonCapabilities { - #[serde( - rename = "subquery", - default, - with = "::serde_with::rust::double_option", - skip_serializing_if = "Option::is_none" - )] - pub subquery: Option>>, -} - -impl ComparisonCapabilities { - pub fn new() -> ComparisonCapabilities { - ComparisonCapabilities { subquery: None } - } -} diff --git a/crates/dc-api-types/src/comparison_column.rs b/crates/dc-api-types/src/comparison_column.rs deleted file mode 100644 index 748851b9..00000000 --- a/crates/dc-api-types/src/comparison_column.rs +++ /dev/null @@ -1,146 +0,0 @@ -/* - * - * - * No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) - * - * The version of the OpenAPI document: - * - * Generated by: https://openapi-generator.tech - */ - -use nonempty::NonEmpty; -use serde::{Deserialize, Serialize}; - -#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] -pub struct ComparisonColumn { - #[serde(rename = "column_type")] - pub column_type: String, - /// The name of the column - #[serde(rename = "name")] - pub name: ColumnSelector, - /// The path to the table that contains the specified column. Missing or empty array means the current table. [\"$\"] means the query table. No other values are supported at this time. - #[serde(rename = "path", skip_serializing_if = "Option::is_none")] - // TODO: OpenAPI has a default value here. Should we remove the optional? - pub path: Option>, -} - -impl ComparisonColumn { - pub fn new(column_type: String, name: ColumnSelector) -> ComparisonColumn { - ComparisonColumn { - column_type, - name, - path: None, - } - } -} - -#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Serialize, Deserialize)] -#[serde(untagged)] -pub enum ColumnSelector { - Path(NonEmpty), - Column(String), -} - -impl ColumnSelector { - pub fn new(column: String) -> ColumnSelector { - ColumnSelector::Column(column) - } - - pub fn join(&self, separator: &str) -> String { - match self { - ColumnSelector::Path(p) => p - .iter() - .map(|s| s.as_str()) - .collect::>() - .join(separator), - ColumnSelector::Column(c) => c.clone(), - } - } - - pub fn as_var(&self) -> String { - self.join("_") - } - - pub fn as_path(&self) -> String { - self.join(".") - } - - pub fn is_column(&self) -> bool { - match self { - ColumnSelector::Path(_) => false, - ColumnSelector::Column(_) => true, - } - } -} - -#[cfg(test)] -mod test { - use mongodb::bson::{bson, from_bson, to_bson}; - use nonempty::nonempty; - - use super::{ColumnSelector, ComparisonColumn}; - - #[test] - fn serialize_comparison_column() -> Result<(), anyhow::Error> { - let input = ComparisonColumn { - column_type: "string".to_owned(), - name: ColumnSelector::new("title".to_owned()), - path: None, - }; - assert_eq!( - to_bson(&input)?, - bson!({"column_type": "string", "name": "title"}) - ); - Ok(()) - } - - #[test] - fn parses_comparison_column() -> Result<(), anyhow::Error> { - let input = bson!({"column_type": "string", "name": "title"}); - assert_eq!( - from_bson::(input)?, - ComparisonColumn { - column_type: "string".to_owned(), - name: ColumnSelector::new("title".to_owned()), - path: None, - } - ); - Ok(()) - } - - #[test] - fn serialize_column_selector() -> Result<(), anyhow::Error> { - let input = ColumnSelector::Path(nonempty![ - "path".to_owned(), - "to".to_owned(), - "nested".to_owned(), - "field".to_owned() - ]); - assert_eq!(to_bson(&input)?, bson!(["path", "to", "nested", "field"])); - - let input = ColumnSelector::new("singleton".to_owned()); - assert_eq!(to_bson(&input)?, bson!("singleton")); - Ok(()) - } - - #[test] - fn parse_column_selector() -> Result<(), anyhow::Error> { - let input = bson!(["path", "to", "nested", "field"]); - assert_eq!( - from_bson::(input)?, - ColumnSelector::Path(nonempty![ - "path".to_owned(), - "to".to_owned(), - "nested".to_owned(), - "field".to_owned() - ]) - ); - - let input = bson!("singleton"); - assert_eq!( - from_bson::(input)?, - ColumnSelector::new("singleton".to_owned()) - ); - Ok(()) - } -} diff --git a/crates/dc-api-types/src/comparison_value.rs b/crates/dc-api-types/src/comparison_value.rs deleted file mode 100644 index 89308b21..00000000 --- a/crates/dc-api-types/src/comparison_value.rs +++ /dev/null @@ -1,114 +0,0 @@ -/* - * - * - * No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) - * - * The version of the OpenAPI document: - * - * Generated by: https://openapi-generator.tech - */ - -use serde::{Deserialize, Serialize}; - -#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] -#[serde(tag = "type")] -pub enum ComparisonValue { - #[serde(rename = "column")] - AnotherColumnComparison { - #[serde(rename = "column")] - column: crate::ComparisonColumn, - }, - #[serde(rename = "scalar")] - ScalarValueComparison { - #[serde(rename = "value")] - value: serde_json::Value, - #[serde(rename = "value_type")] - value_type: String, - }, - /// The `Variable` variant is not part of the v2 DC API - it is included to support queries - /// translated from the v3 NDC API. - #[serde(skip)] - Variable { name: String }, -} - -/// -#[derive(Clone, Copy, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] -pub enum RHashType { - #[serde(rename = "column")] - Column, -} - -impl Default for RHashType { - fn default() -> RHashType { - Self::Column - } -} - -#[cfg(test)] -mod test { - use mongodb::bson::{bson, from_bson, to_bson}; - - use crate::{comparison_column::ColumnSelector, ComparisonColumn}; - - use super::ComparisonValue; - - #[test] - fn serialize_scalar_value_comparison() -> Result<(), anyhow::Error> { - let input = ComparisonValue::ScalarValueComparison { - value: serde_json::json!("One"), - value_type: "string".to_owned(), - }; - assert_eq!( - to_bson(&input)?, - bson!({"value": "One", "value_type": "string", "type": "scalar"}) - ); - Ok(()) - } - - #[test] - fn serialize_another_column_comparison() -> Result<(), anyhow::Error> { - let input = ComparisonValue::AnotherColumnComparison { - column: ComparisonColumn { - column_type: "string".to_owned(), - name: ColumnSelector::new("title".to_owned()), - path: None, - }, - }; - assert_eq!( - to_bson(&input)?, - bson!({"column": {"column_type": "string", "name": "title"}, "type": "column"}) - ); - Ok(()) - } - - #[test] - fn parses_scalar_value_comparison() -> Result<(), anyhow::Error> { - let input = bson!({"value": "One", "value_type": "string", "type": "scalar"}); - assert_eq!( - from_bson::(input)?, - ComparisonValue::ScalarValueComparison { - value: serde_json::json!("One"), - value_type: "string".to_owned(), - } - ); - Ok(()) - } - - #[test] - fn parses_another_column_comparison() -> Result<(), anyhow::Error> { - let input = bson!({ - "column": {"column_type": "string", "name": "title"}, - "type": "column"}); - assert_eq!( - from_bson::(input)?, - ComparisonValue::AnotherColumnComparison { - column: ComparisonColumn { - column_type: "string".to_owned(), - name: ColumnSelector::new("title".to_owned()), - path: None, - }, - } - ); - Ok(()) - } -} diff --git a/crates/dc-api-types/src/config_schema_response.rs b/crates/dc-api-types/src/config_schema_response.rs deleted file mode 100644 index 96ea0909..00000000 --- a/crates/dc-api-types/src/config_schema_response.rs +++ /dev/null @@ -1,31 +0,0 @@ -/* - * - * - * No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) - * - * The version of the OpenAPI document: - * - * Generated by: https://openapi-generator.tech - */ - -use serde::{Deserialize, Serialize}; - -#[derive(Clone, Debug, PartialEq, Default, Serialize, Deserialize)] -pub struct ConfigSchemaResponse { - #[serde(rename = "config_schema")] - pub config_schema: Box, - #[serde(rename = "other_schemas")] - pub other_schemas: ::std::collections::HashMap, -} - -impl ConfigSchemaResponse { - pub fn new( - config_schema: crate::OpenApiSchema, - other_schemas: ::std::collections::HashMap, - ) -> ConfigSchemaResponse { - ConfigSchemaResponse { - config_schema: Box::new(config_schema), - other_schemas, - } - } -} diff --git a/crates/dc-api-types/src/constraint.rs b/crates/dc-api-types/src/constraint.rs deleted file mode 100644 index 909fe14a..00000000 --- a/crates/dc-api-types/src/constraint.rs +++ /dev/null @@ -1,33 +0,0 @@ -/* - * - * - * No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) - * - * The version of the OpenAPI document: - * - * Generated by: https://openapi-generator.tech - */ - -use serde::{Deserialize, Serialize}; - -#[derive(Clone, Debug, PartialEq, Default, Serialize, Deserialize)] -pub struct Constraint { - /// The columns on which you want want to define the foreign key. - #[serde(rename = "column_mapping")] - pub column_mapping: ::std::collections::HashMap, - /// The fully qualified name of a table, where the last item in the array is the table name and any earlier items represent the namespacing of the table name - #[serde(rename = "foreign_table")] - pub foreign_table: Vec, -} - -impl Constraint { - pub fn new( - column_mapping: ::std::collections::HashMap, - foreign_table: Vec, - ) -> Constraint { - Constraint { - column_mapping, - foreign_table, - } - } -} diff --git a/crates/dc-api-types/src/custom_update_column_operator_row_update.rs b/crates/dc-api-types/src/custom_update_column_operator_row_update.rs deleted file mode 100644 index 3f58854b..00000000 --- a/crates/dc-api-types/src/custom_update_column_operator_row_update.rs +++ /dev/null @@ -1,58 +0,0 @@ -/* - * - * - * No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) - * - * The version of the OpenAPI document: - * - * Generated by: https://openapi-generator.tech - */ - -use serde::{Deserialize, Serialize}; - -#[derive(Clone, Debug, PartialEq, Default, Serialize, Deserialize)] -pub struct CustomUpdateColumnOperatorRowUpdate { - /// The name of the column in the row - #[serde(rename = "column")] - pub column: String, - #[serde(rename = "operator_name")] - pub operator_name: String, - #[serde(rename = "type")] - pub r#type: RHashType, - /// The value to use with the column operator - #[serde(rename = "value")] - pub value: ::std::collections::HashMap, - #[serde(rename = "value_type")] - pub value_type: String, -} - -impl CustomUpdateColumnOperatorRowUpdate { - pub fn new( - column: String, - operator_name: String, - r#type: RHashType, - value: ::std::collections::HashMap, - value_type: String, - ) -> CustomUpdateColumnOperatorRowUpdate { - CustomUpdateColumnOperatorRowUpdate { - column, - operator_name, - r#type, - value, - value_type, - } - } -} - -/// -#[derive(Clone, Copy, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] -pub enum RHashType { - #[serde(rename = "custom_operator")] - CustomOperator, -} - -impl Default for RHashType { - fn default() -> RHashType { - Self::CustomOperator - } -} diff --git a/crates/dc-api-types/src/data_schema_capabilities.rs b/crates/dc-api-types/src/data_schema_capabilities.rs deleted file mode 100644 index f16a499c..00000000 --- a/crates/dc-api-types/src/data_schema_capabilities.rs +++ /dev/null @@ -1,45 +0,0 @@ -/* - * - * - * No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) - * - * The version of the OpenAPI document: - * - * Generated by: https://openapi-generator.tech - */ - -use serde::{Deserialize, Serialize}; - -#[derive(Clone, Debug, PartialEq, Default, Serialize, Deserialize)] -pub struct DataSchemaCapabilities { - #[serde(rename = "column_nullability", skip_serializing_if = "Option::is_none")] - pub column_nullability: Option, - /// Whether tables can have foreign keys - #[serde( - rename = "supports_foreign_keys", - skip_serializing_if = "Option::is_none" - )] - pub supports_foreign_keys: Option, - /// Whether tables can have primary keys - #[serde( - rename = "supports_primary_keys", - skip_serializing_if = "Option::is_none" - )] - pub supports_primary_keys: Option, - #[serde( - rename = "supports_schemaless_tables", - skip_serializing_if = "Option::is_none" - )] - pub supports_schemaless_tables: Option, -} - -impl DataSchemaCapabilities { - pub fn new() -> DataSchemaCapabilities { - DataSchemaCapabilities { - column_nullability: None, - supports_foreign_keys: None, - supports_primary_keys: None, - supports_schemaless_tables: None, - } - } -} diff --git a/crates/dc-api-types/src/dataset_create_clone_request.rs b/crates/dc-api-types/src/dataset_create_clone_request.rs deleted file mode 100644 index cff08ac9..00000000 --- a/crates/dc-api-types/src/dataset_create_clone_request.rs +++ /dev/null @@ -1,23 +0,0 @@ -/* - * - * - * No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) - * - * The version of the OpenAPI document: - * - * Generated by: https://openapi-generator.tech - */ - -use serde::{Deserialize, Serialize}; - -#[derive(Clone, Debug, PartialEq, Default, Serialize, Deserialize)] -pub struct DatasetCreateCloneRequest { - #[serde(rename = "from")] - pub from: String, -} - -impl DatasetCreateCloneRequest { - pub fn new(from: String) -> DatasetCreateCloneRequest { - DatasetCreateCloneRequest { from } - } -} diff --git a/crates/dc-api-types/src/dataset_create_clone_response.rs b/crates/dc-api-types/src/dataset_create_clone_response.rs deleted file mode 100644 index 75b86ad6..00000000 --- a/crates/dc-api-types/src/dataset_create_clone_response.rs +++ /dev/null @@ -1,29 +0,0 @@ -/* - * - * - * No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) - * - * The version of the OpenAPI document: - * - * Generated by: https://openapi-generator.tech - */ - -use serde::{Deserialize, Serialize}; - -#[derive(Clone, Debug, PartialEq, Default, Serialize, Deserialize)] -pub struct DatasetCreateCloneResponse { - #[serde(rename = "config")] - pub config: - ::std::collections::HashMap>, -} - -impl DatasetCreateCloneResponse { - pub fn new( - config: ::std::collections::HashMap< - String, - ::std::collections::HashMap, - >, - ) -> DatasetCreateCloneResponse { - DatasetCreateCloneResponse { config } - } -} diff --git a/crates/dc-api-types/src/dataset_delete_clone_response.rs b/crates/dc-api-types/src/dataset_delete_clone_response.rs deleted file mode 100644 index 01aa64df..00000000 --- a/crates/dc-api-types/src/dataset_delete_clone_response.rs +++ /dev/null @@ -1,24 +0,0 @@ -/* - * - * - * No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) - * - * The version of the OpenAPI document: - * - * Generated by: https://openapi-generator.tech - */ - -use serde::{Deserialize, Serialize}; - -#[derive(Clone, Debug, PartialEq, Default, Serialize, Deserialize)] -pub struct DatasetDeleteCloneResponse { - /// The named dataset to clone from - #[serde(rename = "message")] - pub message: String, -} - -impl DatasetDeleteCloneResponse { - pub fn new(message: String) -> DatasetDeleteCloneResponse { - DatasetDeleteCloneResponse { message } - } -} diff --git a/crates/dc-api-types/src/dataset_get_template_response.rs b/crates/dc-api-types/src/dataset_get_template_response.rs deleted file mode 100644 index a633eac9..00000000 --- a/crates/dc-api-types/src/dataset_get_template_response.rs +++ /dev/null @@ -1,24 +0,0 @@ -/* - * - * - * No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) - * - * The version of the OpenAPI document: - * - * Generated by: https://openapi-generator.tech - */ - -use serde::{Deserialize, Serialize}; - -#[derive(Clone, Debug, PartialEq, Default, Serialize, Deserialize)] -pub struct DatasetGetTemplateResponse { - /// Message detailing if the dataset exists - #[serde(rename = "exists")] - pub exists: bool, -} - -impl DatasetGetTemplateResponse { - pub fn new(exists: bool) -> DatasetGetTemplateResponse { - DatasetGetTemplateResponse { exists } - } -} diff --git a/crates/dc-api-types/src/default_value_generation_strategy.rs b/crates/dc-api-types/src/default_value_generation_strategy.rs deleted file mode 100644 index c7179a85..00000000 --- a/crates/dc-api-types/src/default_value_generation_strategy.rs +++ /dev/null @@ -1,36 +0,0 @@ -/* - * - * - * No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) - * - * The version of the OpenAPI document: - * - * Generated by: https://openapi-generator.tech - */ - -use serde::{Deserialize, Serialize}; - -#[derive(Clone, Debug, PartialEq, Default, Serialize, Deserialize)] -pub struct DefaultValueGenerationStrategy { - #[serde(rename = "type")] - pub r#type: RHashType, -} - -impl DefaultValueGenerationStrategy { - pub fn new(r#type: RHashType) -> DefaultValueGenerationStrategy { - DefaultValueGenerationStrategy { r#type } - } -} - -/// -#[derive(Clone, Copy, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] -pub enum RHashType { - #[serde(rename = "default_value")] - DefaultValue, -} - -impl Default for RHashType { - fn default() -> RHashType { - Self::DefaultValue - } -} diff --git a/crates/dc-api-types/src/delete_mutation_operation.rs b/crates/dc-api-types/src/delete_mutation_operation.rs deleted file mode 100644 index 8b1615c5..00000000 --- a/crates/dc-api-types/src/delete_mutation_operation.rs +++ /dev/null @@ -1,54 +0,0 @@ -/* - * - * - * No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) - * - * The version of the OpenAPI document: - * - * Generated by: https://openapi-generator.tech - */ - -use serde::{Deserialize, Serialize}; - -#[derive(Clone, Debug, PartialEq, Default, Serialize, Deserialize)] -pub struct DeleteMutationOperation { - /// The fields to return for the rows affected by this delete operation - #[serde( - rename = "returning_fields", - default, - with = "::serde_with::rust::double_option", - skip_serializing_if = "Option::is_none" - )] - pub returning_fields: Option>>, - /// The fully qualified name of a table, where the last item in the array is the table name and any earlier items represent the namespacing of the table name - #[serde(rename = "table")] - pub table: Vec, - #[serde(rename = "type")] - pub r#type: RHashType, - #[serde(rename = "where", skip_serializing_if = "Option::is_none")] - pub r#where: Option>, -} - -impl DeleteMutationOperation { - pub fn new(table: Vec, r#type: RHashType) -> DeleteMutationOperation { - DeleteMutationOperation { - returning_fields: None, - table, - r#type, - r#where: None, - } - } -} - -/// -#[derive(Clone, Copy, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] -pub enum RHashType { - #[serde(rename = "delete")] - Delete, -} - -impl Default for RHashType { - fn default() -> RHashType { - Self::Delete - } -} diff --git a/crates/dc-api-types/src/error_response.rs b/crates/dc-api-types/src/error_response.rs deleted file mode 100644 index 1f793150..00000000 --- a/crates/dc-api-types/src/error_response.rs +++ /dev/null @@ -1,38 +0,0 @@ -/* - * - * - * No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) - * - * The version of the OpenAPI document: - * - * Generated by: https://openapi-generator.tech - */ - -use std::fmt::Display; - -use serde::{Deserialize, Serialize}; - -#[derive(Clone, Debug, PartialEq, Default, Serialize, Deserialize)] -pub struct ErrorResponse { - /// Error details - #[serde(rename = "details", skip_serializing_if = "Option::is_none")] - pub details: Option<::std::collections::HashMap>, - /// Error message - #[serde(rename = "message")] - pub message: String, - #[serde(rename = "type", skip_serializing_if = "Option::is_none")] - pub r#type: Option, -} - -impl ErrorResponse { - pub fn new(message: &T) -> ErrorResponse - where - T: Display + ?Sized, - { - ErrorResponse { - details: None, - message: format!("{message}"), - r#type: None, - } - } -} diff --git a/crates/dc-api-types/src/error_response_type.rs b/crates/dc-api-types/src/error_response_type.rs deleted file mode 100644 index 2aff729e..00000000 --- a/crates/dc-api-types/src/error_response_type.rs +++ /dev/null @@ -1,40 +0,0 @@ -/* - * - * - * No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) - * - * The version of the OpenAPI document: - * - * Generated by: https://openapi-generator.tech - */ - -/// -use serde::{Deserialize, Serialize}; - -#[derive(Clone, Copy, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] -pub enum ErrorResponseType { - #[serde(rename = "uncaught-error")] - UncaughtError, - #[serde(rename = "mutation-constraint-violation")] - MutationConstraintViolation, - #[serde(rename = "mutation-permission-check-failure")] - MutationPermissionCheckFailure, -} - -impl ToString for ErrorResponseType { - fn to_string(&self) -> String { - match self { - Self::UncaughtError => String::from("uncaught-error"), - Self::MutationConstraintViolation => String::from("mutation-constraint-violation"), - Self::MutationPermissionCheckFailure => { - String::from("mutation-permission-check-failure") - } - } - } -} - -impl Default for ErrorResponseType { - fn default() -> ErrorResponseType { - Self::UncaughtError - } -} diff --git a/crates/dc-api-types/src/exists_expression.rs b/crates/dc-api-types/src/exists_expression.rs deleted file mode 100644 index a4f51615..00000000 --- a/crates/dc-api-types/src/exists_expression.rs +++ /dev/null @@ -1,48 +0,0 @@ -/* - * - * - * No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) - * - * The version of the OpenAPI document: - * - * Generated by: https://openapi-generator.tech - */ - -use serde::{Deserialize, Serialize}; - -#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] -pub struct ExistsExpression { - #[serde(rename = "in_table")] - pub in_table: Box, - #[serde(rename = "type")] - pub r#type: RHashType, - #[serde(rename = "where")] - pub r#where: Box, -} - -impl ExistsExpression { - pub fn new( - in_table: crate::ExistsInTable, - r#type: RHashType, - r#where: crate::Expression, - ) -> ExistsExpression { - ExistsExpression { - in_table: Box::new(in_table), - r#type, - r#where: Box::new(r#where), - } - } -} - -/// -#[derive(Clone, Copy, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] -pub enum RHashType { - #[serde(rename = "exists")] - Exists, -} - -impl Default for RHashType { - fn default() -> RHashType { - Self::Exists - } -} diff --git a/crates/dc-api-types/src/exists_in_table.rs b/crates/dc-api-types/src/exists_in_table.rs deleted file mode 100644 index b865f8de..00000000 --- a/crates/dc-api-types/src/exists_in_table.rs +++ /dev/null @@ -1,88 +0,0 @@ -/* - * - * - * No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) - * - * The version of the OpenAPI document: - * - * Generated by: https://openapi-generator.tech - */ - -use serde::{Deserialize, Serialize}; - -#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] -#[serde(tag = "type")] -pub enum ExistsInTable { - #[serde(rename = "related")] - RelatedTable { - #[serde(rename = "relationship")] - relationship: String, - }, - #[serde(rename = "unrelated")] - UnrelatedTable { - /// The fully qualified name of a table, where the last item in the array is the table name and any earlier items represent the namespacing of the table name - #[serde(rename = "table")] - table: Vec, - }, -} - -/// -#[derive(Clone, Copy, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] -pub enum RHashType { - #[serde(rename = "related")] - Related, -} - -impl Default for RHashType { - fn default() -> RHashType { - Self::Related - } -} - -#[cfg(test)] -mod test { - use mongodb::bson::{bson, from_bson, to_bson}; - - use super::ExistsInTable; - - #[test] - fn serialize_related_table() -> Result<(), anyhow::Error> { - let input = ExistsInTable::RelatedTable { - relationship: "foo".to_owned(), - }; - assert_eq!( - to_bson(&input)?, - bson!({"type": "related", "relationship": "foo".to_owned()}) - ); - Ok(()) - } - - #[test] - fn serialize_unrelated_table() -> Result<(), anyhow::Error> { - let input = ExistsInTable::UnrelatedTable { table: vec![] }; - assert_eq!(to_bson(&input)?, bson!({"type": "unrelated", "table": []})); - Ok(()) - } - - #[test] - fn parses_related_table() -> Result<(), anyhow::Error> { - let input = bson!({"type": "related", "relationship": "foo".to_owned()}); - assert_eq!( - from_bson::(input)?, - ExistsInTable::RelatedTable { - relationship: "foo".to_owned(), - } - ); - Ok(()) - } - - #[test] - fn parses_unrelated_table() -> Result<(), anyhow::Error> { - let input = bson!({"type": "unrelated", "table": []}); - assert_eq!( - from_bson::(input)?, - ExistsInTable::UnrelatedTable { table: vec![] } - ); - Ok(()) - } -} diff --git a/crates/dc-api-types/src/explain_response.rs b/crates/dc-api-types/src/explain_response.rs deleted file mode 100644 index 5dc54bb4..00000000 --- a/crates/dc-api-types/src/explain_response.rs +++ /dev/null @@ -1,27 +0,0 @@ -/* - * - * - * No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) - * - * The version of the OpenAPI document: - * - * Generated by: https://openapi-generator.tech - */ - -use serde::{Deserialize, Serialize}; - -#[derive(Clone, Debug, PartialEq, Default, Serialize, Deserialize)] -pub struct ExplainResponse { - /// Lines of the formatted explain plan response - #[serde(rename = "lines")] - pub lines: Vec, - /// The generated query - i.e. SQL for a relational DB - #[serde(rename = "query")] - pub query: String, -} - -impl ExplainResponse { - pub fn new(lines: Vec, query: String) -> ExplainResponse { - ExplainResponse { lines, query } - } -} diff --git a/crates/dc-api-types/src/expression.rs b/crates/dc-api-types/src/expression.rs deleted file mode 100644 index c77c41bc..00000000 --- a/crates/dc-api-types/src/expression.rs +++ /dev/null @@ -1,231 +0,0 @@ -/* - * - * - * No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) - * - * The version of the OpenAPI document: - * - * Generated by: https://openapi-generator.tech - */ - -use serde::{Deserialize, Serialize}; - -use crate::ArrayComparisonValue; - -#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] -#[serde(tag = "type")] -pub enum Expression { - #[serde(rename = "and")] - And { - #[serde(rename = "expressions")] - expressions: Vec, - }, - #[serde(rename = "binary_arr_op")] - ApplyBinaryArrayComparison { - #[serde(rename = "column")] - column: crate::ComparisonColumn, - #[serde(rename = "operator")] - operator: crate::BinaryArrayComparisonOperator, - #[serde(rename = "value_type")] - value_type: String, - #[serde(rename = "values")] - values: Vec, - }, - #[serde(rename = "binary_op")] - ApplyBinaryComparison { - #[serde(rename = "column")] - column: crate::ComparisonColumn, - #[serde(rename = "operator")] - operator: crate::BinaryComparisonOperator, - #[serde(rename = "value")] - value: crate::ComparisonValue, - }, - #[serde(rename = "exists")] - Exists { - #[serde(rename = "in_table")] - in_table: crate::ExistsInTable, - #[serde(rename = "where")] - r#where: Box, - }, - #[serde(rename = "not")] - Not { - #[serde(rename = "expression")] - expression: Box, - }, - #[serde(rename = "or")] - Or { - #[serde(rename = "expressions")] - expressions: Vec, - }, - #[serde(rename = "unary_op")] - ApplyUnaryComparison { - #[serde(rename = "column")] - column: crate::ComparisonColumn, - #[serde(rename = "operator")] - operator: crate::UnaryComparisonOperator, - }, -} - -impl Expression { - pub fn and(self, other: Expression) -> Expression { - match other { - Expression::And { mut expressions } => { - expressions.push(self); - Expression::And { expressions } - } - _ => Expression::And { - expressions: vec![self, other], - }, - } - } -} - -/// -#[derive(Clone, Copy, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] -pub enum RHashType { - #[serde(rename = "and")] - And, -} - -impl Default for RHashType { - fn default() -> RHashType { - Self::And - } -} - -#[cfg(test)] -mod test { - use mongodb::bson::{bson, from_bson, to_bson}; - use pretty_assertions::assert_eq; - - use crate::{ - comparison_column::ColumnSelector, BinaryComparisonOperator, ComparisonColumn, - ComparisonValue, - }; - - use super::Expression; - - #[test] - fn serialize_apply_binary_comparison() -> Result<(), anyhow::Error> { - let input = Expression::ApplyBinaryComparison { - column: ComparisonColumn { - column_type: "string".to_owned(), - name: ColumnSelector::new("title".to_owned()), - path: None, - }, - operator: BinaryComparisonOperator::Equal, - value: ComparisonValue::ScalarValueComparison { - value: serde_json::json!("One"), - value_type: "string".to_owned(), - }, - }; - assert_eq!( - to_bson(&input)?, - bson!({ - "type": "binary_op", - "column": {"column_type": "string", "name": "title"}, - "operator": "equal", - "value": {"type": "scalar", "value": "One", "value_type": "string"} - }) - ); - Ok(()) - } - - #[test] - fn parses_apply_binary_comparison() -> Result<(), anyhow::Error> { - let input = bson!({ - "type": "binary_op", - "column": {"column_type": "string", "name": "title"}, - "operator": "equal", - "value": {"type": "scalar", "value": "One", "value_type": "string"} - }); - assert_eq!( - from_bson::(input)?, - Expression::ApplyBinaryComparison { - column: ComparisonColumn { - column_type: "string".to_owned(), - name: ColumnSelector::new("title".to_owned()), - path: None, - }, - operator: BinaryComparisonOperator::Equal, - value: ComparisonValue::ScalarValueComparison { - value: serde_json::json!("One"), - value_type: "string".to_owned(), - }, - } - ); - Ok(()) - } - - fn sample_expressions() -> (Expression, Expression, Expression) { - ( - Expression::ApplyBinaryComparison { - column: ComparisonColumn { - column_type: "int".to_owned(), - name: ColumnSelector::Column("age".to_owned()), - path: None, - }, - operator: BinaryComparisonOperator::GreaterThan, - value: ComparisonValue::ScalarValueComparison { - value: 25.into(), - value_type: "int".to_owned(), - }, - }, - Expression::ApplyBinaryComparison { - column: ComparisonColumn { - column_type: "string".to_owned(), - name: ColumnSelector::Column("location".to_owned()), - path: None, - }, - operator: BinaryComparisonOperator::Equal, - value: ComparisonValue::ScalarValueComparison { - value: "US".into(), - value_type: "string".to_owned(), - }, - }, - Expression::ApplyBinaryComparison { - column: ComparisonColumn { - column_type: "int".to_owned(), - name: ColumnSelector::Column("group_id".to_owned()), - path: None, - }, - operator: BinaryComparisonOperator::Equal, - value: ComparisonValue::ScalarValueComparison { - value: 4.into(), - value_type: "int".to_owned(), - }, - }, - ) - } - - #[test] - fn and_merges_with_existing_and_expression() { - let (a, b, c) = sample_expressions(); - let other = Expression::And { - expressions: vec![a.clone(), b.clone()], - }; - let expected = Expression::And { - expressions: vec![a, b, c.clone()], - }; - let actual = c.and(other); - assert_eq!(actual, expected); - } - - #[test] - fn and_combines_existing_expression_using_operator() { - let (a, b, c) = sample_expressions(); - let other = Expression::Or { - expressions: vec![a.clone(), b.clone()], - }; - let expected = Expression::And { - expressions: vec![ - c.clone(), - Expression::Or { - expressions: vec![a, b], - }, - ], - }; - let actual = c.and(other); - assert_eq!(actual, expected); - } -} diff --git a/crates/dc-api-types/src/field.rs b/crates/dc-api-types/src/field.rs deleted file mode 100644 index c9f48e76..00000000 --- a/crates/dc-api-types/src/field.rs +++ /dev/null @@ -1,61 +0,0 @@ -/* - * - * - * No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) - * - * The version of the OpenAPI document: - * - * Generated by: https://openapi-generator.tech - */ - -use serde::{Deserialize, Serialize}; - -use super::OrderBy; - -#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] -#[serde(tag = "type")] -pub enum Field { - #[serde(rename = "column")] - Column { - #[serde(rename = "column")] - column: String, - #[serde(rename = "column_type")] - column_type: String, - }, - #[serde(rename = "object")] - NestedObject { - #[serde(rename = "column")] - column: String, - #[serde(rename = "query")] - query: Box, - }, - #[serde(rename = "array")] - NestedArray { - field: Box, - limit: Option, - offset: Option, - #[serde(rename = "where")] - r#where: Option, - }, - #[serde(rename = "relationship")] - Relationship { - #[serde(rename = "query")] - query: Box, - /// The name of the relationship to follow for the subquery - #[serde(rename = "relationship")] - relationship: String, - }, -} - -/// -#[derive(Clone, Copy, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] -pub enum RHashType { - #[serde(rename = "column")] - Column, -} - -impl Default for RHashType { - fn default() -> RHashType { - Self::Column - } -} diff --git a/crates/dc-api-types/src/graph_ql_type.rs b/crates/dc-api-types/src/graph_ql_type.rs deleted file mode 100644 index 6bfbab23..00000000 --- a/crates/dc-api-types/src/graph_ql_type.rs +++ /dev/null @@ -1,44 +0,0 @@ -/* - * - * - * No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) - * - * The version of the OpenAPI document: - * - * Generated by: https://openapi-generator.tech - */ - -/// -use serde::{Deserialize, Serialize}; - -#[derive(Clone, Copy, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] -pub enum GraphQlType { - #[serde(rename = "Int")] - Int, - #[serde(rename = "Float")] - Float, - #[serde(rename = "String")] - String, - #[serde(rename = "Boolean")] - Boolean, - #[serde(rename = "ID")] - Id, -} - -impl ToString for GraphQlType { - fn to_string(&self) -> String { - match self { - Self::Int => String::from("Int"), - Self::Float => String::from("Float"), - Self::String => String::from("String"), - Self::Boolean => String::from("Boolean"), - Self::Id => String::from("ID"), - } - } -} - -impl Default for GraphQlType { - fn default() -> GraphQlType { - Self::Int - } -} diff --git a/crates/dc-api-types/src/graphql_name.rs b/crates/dc-api-types/src/graphql_name.rs deleted file mode 100644 index 5d6630be..00000000 --- a/crates/dc-api-types/src/graphql_name.rs +++ /dev/null @@ -1,260 +0,0 @@ -use std::{borrow::Cow, fmt::Display}; - -use once_cell::sync::Lazy; -use regex::{Captures, Regex, Replacer}; -use serde::{Deserialize, Serialize}; - -/// MongoDB identifiers (field names, collection names) can contain characters that are not valid -/// in GraphQL identifiers. These mappings provide GraphQL-safe escape sequences that can be -/// reversed to recover the original MongoDB identifiers. -/// -/// CHANGES TO THIS MAPPING ARE API-BREAKING. -/// -/// Maps from regular expressions to replacement sequences. -/// -/// For invalid characters that do not have mappings here the fallback escape sequence is -/// `__u123D__` where `123D` is replaced with the Unicode codepoint of the escaped character. -/// -/// Input sequences of `__` are a special case that are escaped as `____`. -const GRAPHQL_ESCAPE_SEQUENCES: [(char, &str); 2] = [('.', "__dot__"), ('$', "__dollar__")]; - -/// Make a valid GraphQL name from a string that might contain characters that are not valid in -/// that context. Replaces invalid characters with escape sequences so that the original name can -/// be recovered by reversing the escapes. -/// -/// From conversions from string types automatically apply escapes to maintain the invariant that -/// a GqlName is a valid GraphQL name. BUT conversions to strings do not automatically reverse -/// those escape sequences. To recover the original, unescaped name use GqlName::unescape. -#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Deserialize, Serialize)] -#[serde(transparent)] -pub struct GqlName<'a>(Cow<'a, str>); - -/// Alias for owned case of GraphQLId -pub type GraphQLName = GqlName<'static>; - -impl<'a> GqlName<'a> { - pub fn from_trusted_safe_string(name: String) -> GraphQLName { - GqlName(name.into()) - } - - pub fn from_trusted_safe_str(name: &str) -> GqlName<'_> { - GqlName(name.into()) - } - - /// Replace invalid characters in the given string with escape sequences that are safe in - /// GraphQL names. - pub fn escape(name: &str) -> GqlName<'_> { - // Matches characters that are not alphanumeric or underscores. For the first character of - // the name the expression is more strict: it does not allow numbers. - // - // In addition to invalid characters, this expression replaces sequences of two - // underscores. We are using two underscores to begin escape sequences, so we need to - // escape those too. - static INVALID_SEQUENCES: Lazy = - Lazy::new(|| Regex::new(r"(?:^[^_A-Za-z])|[^_0-9A-Za-z]|__").unwrap()); - - let replacement = - INVALID_SEQUENCES.replace_all(name, |captures: &Captures| -> Cow<'static, str> { - let sequence = &captures[0]; - if sequence == "__" { - return Cow::from("____"); - } - let char = sequence - .chars() - .next() - .expect("invalid sequence contains a charecter"); - match GRAPHQL_ESCAPE_SEQUENCES - .into_iter() - .find(|(invalid_char, _)| char == *invalid_char) - { - Some((_, replacement)) => Cow::from(replacement), - None => Cow::Owned(format!("__u{:X}__", char as u32)), - } - }); - - GqlName(replacement) - } - - /// Replace escape sequences to recover the original name. - pub fn unescape(self) -> Cow<'a, str> { - static ESCAPE_SEQUENCE_EXPRESSIONS: Lazy = Lazy::new(|| { - let sequences = GRAPHQL_ESCAPE_SEQUENCES.into_iter().map(|(_, seq)| seq); - Regex::new(&format!( - r"(?____)|__u(?[0-9A-F]{{1,8}})__|{}", - itertools::join(sequences, "|") - )) - .unwrap() - }); - ESCAPE_SEQUENCE_EXPRESSIONS.replace_all_cow(self.0, |captures: &Captures| { - if captures.name("underscores").is_some() { - "__".to_owned() - } else if let Some(code_str) = captures.name("codepoint") { - let code = u32::from_str_radix(code_str.as_str(), 16) - .expect("parsing a sequence of 1-8 digits shouldn't fail"); - char::from_u32(code).unwrap().to_string() - } else { - let (invalid_char, _) = GRAPHQL_ESCAPE_SEQUENCES - .into_iter() - .find(|(_, seq)| *seq == &captures[0]) - .unwrap(); - invalid_char.to_string() - } - }) - } - - pub fn as_str(&self) -> &str { - self.0.as_ref() - } - - /// Clones underlying string only if it's borrowed. - pub fn into_owned(self) -> GraphQLName { - GqlName(Cow::Owned(self.0.into_owned())) - } -} - -impl From for GqlName<'static> { - fn from(value: String) -> Self { - let inner = match GqlName::escape(&value).0 { - // If we have a borrowed value then no replacements were made so we can grab the - // original string instead of allocating a new one. - Cow::Borrowed(_) => value, - Cow::Owned(s) => s, - }; - GqlName(Cow::Owned(inner)) - } -} - -impl<'a> From<&'a String> for GqlName<'a> { - fn from(value: &'a String) -> Self { - GqlName::escape(value) - } -} - -impl<'a> From<&'a str> for GqlName<'a> { - fn from(value: &'a str) -> Self { - GqlName::escape(value) - } -} - -impl<'a> Display for GqlName<'a> { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - self.0.fmt(f) - } -} - -impl<'a> From> for String { - fn from(value: GqlName<'a>) -> Self { - value.0.into_owned() - } -} - -impl<'a, 'b> From<&'b GqlName<'a>> for &'b str { - fn from(value: &'b GqlName<'a>) -> Self { - &value.0 - } -} - -/// Extension methods for `Regex` that operate on `Cow` instead of `&str`. Avoids allocating -/// new strings on chains of multiple replace calls if no replacements were made. -/// See https://github.com/rust-lang/regex/issues/676#issuecomment-1328973183 -trait RegexCowExt { - /// [`Regex::replace`], but taking text as `Cow` instead of `&str`. - fn replace_cow<'t, R: Replacer>(&self, text: Cow<'t, str>, rep: R) -> Cow<'t, str>; - - /// [`Regex::replace_all`], but taking text as `Cow` instead of `&str`. - fn replace_all_cow<'t, R: Replacer>(&self, text: Cow<'t, str>, rep: R) -> Cow<'t, str>; - - /// [`Regex::replacen`], but taking text as `Cow` instead of `&str`. - fn replacen_cow<'t, R: Replacer>( - &self, - text: Cow<'t, str>, - limit: usize, - rep: R, - ) -> Cow<'t, str>; -} - -impl RegexCowExt for Regex { - fn replace_cow<'t, R: Replacer>(&self, text: Cow<'t, str>, rep: R) -> Cow<'t, str> { - match self.replace(&text, rep) { - Cow::Owned(result) => Cow::Owned(result), - Cow::Borrowed(_) => text, - } - } - - fn replace_all_cow<'t, R: Replacer>(&self, text: Cow<'t, str>, rep: R) -> Cow<'t, str> { - match self.replace_all(&text, rep) { - Cow::Owned(result) => Cow::Owned(result), - Cow::Borrowed(_) => text, - } - } - - fn replacen_cow<'t, R: Replacer>( - &self, - text: Cow<'t, str>, - limit: usize, - rep: R, - ) -> Cow<'t, str> { - match self.replacen(&text, limit, rep) { - Cow::Owned(result) => Cow::Owned(result), - Cow::Borrowed(_) => text, - } - } -} - -#[cfg(test)] -mod tests { - use super::GqlName; - - use pretty_assertions::assert_eq; - - fn assert_escapes(input: &str, expected: &str) { - let id = GqlName::from(input); - assert_eq!(id.as_str(), expected); - assert_eq!(id.unescape(), input); - } - - #[test] - fn escapes_invalid_characters() { - assert_escapes( - "system.buckets.time_series", - "system__dot__buckets__dot__time_series", - ); - } - - #[test] - fn escapes_runs_of_underscores() { - assert_escapes("a_____b", "a_________b"); - } - - #[test] - fn escapes_invalid_with_no_predefined_mapping() { - assert_escapes("ascii_!", "ascii___u21__"); - assert_escapes("friends♥", "friends__u2665__"); - assert_escapes("👨‍👩‍👧", "__u1F468____u200D____u1F469____u200D____u1F467__"); - } - - #[test] - fn respects_words_that_appear_in_escape_sequences() { - assert_escapes("a.dot__", "a__dot__dot____"); - assert_escapes("a.dollar__dot", "a__dot__dollar____dot"); - } - - #[test] - fn does_not_escape_input_when_deserializing() -> Result<(), anyhow::Error> { - let input = r#""some__name""#; - let actual = serde_json::from_str::(input)?; - assert_eq!(actual.as_str(), "some__name"); - Ok(()) - } - - #[test] - fn does_not_unescape_input_when_serializing() -> Result<(), anyhow::Error> { - let output = GqlName::from("system.buckets.time_series"); - let actual = serde_json::to_string(&output)?; - assert_eq!( - actual.as_str(), - r#""system__dot__buckets__dot__time_series""# - ); - Ok(()) - } -} diff --git a/crates/dc-api-types/src/insert_capabilities.rs b/crates/dc-api-types/src/insert_capabilities.rs deleted file mode 100644 index 3dd17949..00000000 --- a/crates/dc-api-types/src/insert_capabilities.rs +++ /dev/null @@ -1,29 +0,0 @@ -/* - * - * - * No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) - * - * The version of the OpenAPI document: - * - * Generated by: https://openapi-generator.tech - */ - -use serde::{Deserialize, Serialize}; - -#[derive(Clone, Debug, PartialEq, Default, Serialize, Deserialize)] -pub struct InsertCapabilities { - /// Whether or not nested inserts to related tables are supported - #[serde( - rename = "supports_nested_inserts", - skip_serializing_if = "Option::is_none" - )] - pub supports_nested_inserts: Option, -} - -impl InsertCapabilities { - pub fn new() -> InsertCapabilities { - InsertCapabilities { - supports_nested_inserts: None, - } - } -} diff --git a/crates/dc-api-types/src/insert_field_schema.rs b/crates/dc-api-types/src/insert_field_schema.rs deleted file mode 100644 index eb86822e..00000000 --- a/crates/dc-api-types/src/insert_field_schema.rs +++ /dev/null @@ -1,56 +0,0 @@ -/* - * - * - * No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) - * - * The version of the OpenAPI document: - * - * Generated by: https://openapi-generator.tech - */ - -use serde::{Deserialize, Serialize}; - -#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] -#[serde(tag = "type")] -pub enum InsertFieldSchema { - #[serde(rename = "array_relation")] - ArrayRelation { - /// The name of the array relationship over which the related rows must be inserted - #[serde(rename = "relationship")] - relationship: String, - }, - #[serde(rename = "column")] - Column { - /// The name of the column that this field should be inserted into - #[serde(rename = "column")] - column: String, - #[serde(rename = "column_type")] - column_type: String, - /// Is the column nullable - #[serde(rename = "nullable")] - nullable: bool, - #[serde(rename = "value_generated", skip_serializing_if = "Option::is_none")] - value_generated: Option>, - }, - #[serde(rename = "object_relation")] - ObjectRelation { - #[serde(rename = "insertion_order")] - insertion_order: crate::ObjectRelationInsertionOrder, - /// The name of the object relationship over which the related row must be inserted - #[serde(rename = "relationship")] - relationship: String, - }, -} - -/// -#[derive(Clone, Copy, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] -pub enum RHashType { - #[serde(rename = "column")] - Column, -} - -impl Default for RHashType { - fn default() -> RHashType { - Self::Column - } -} diff --git a/crates/dc-api-types/src/insert_mutation_operation.rs b/crates/dc-api-types/src/insert_mutation_operation.rs deleted file mode 100644 index 44b2b0ae..00000000 --- a/crates/dc-api-types/src/insert_mutation_operation.rs +++ /dev/null @@ -1,62 +0,0 @@ -/* - * - * - * No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) - * - * The version of the OpenAPI document: - * - * Generated by: https://openapi-generator.tech - */ - -use serde::{Deserialize, Serialize}; - -#[derive(Clone, Debug, PartialEq, Default, Serialize, Deserialize)] -pub struct InsertMutationOperation { - #[serde(rename = "post_insert_check", skip_serializing_if = "Option::is_none")] - pub post_insert_check: Option>, - /// The fields to return for the rows affected by this insert operation - #[serde( - rename = "returning_fields", - default, - with = "::serde_with::rust::double_option", - skip_serializing_if = "Option::is_none" - )] - pub returning_fields: Option>>, - /// The rows to insert into the table - #[serde(rename = "rows")] - pub rows: Vec<::std::collections::HashMap>, - /// The fully qualified name of a table, where the last item in the array is the table name and any earlier items represent the namespacing of the table name - #[serde(rename = "table")] - pub table: Vec, - #[serde(rename = "type")] - pub r#type: RHashType, -} - -impl InsertMutationOperation { - pub fn new( - rows: Vec<::std::collections::HashMap>, - table: Vec, - r#type: RHashType, - ) -> InsertMutationOperation { - InsertMutationOperation { - post_insert_check: None, - returning_fields: None, - rows, - table, - r#type, - } - } -} - -/// -#[derive(Clone, Copy, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] -pub enum RHashType { - #[serde(rename = "insert")] - Insert, -} - -impl Default for RHashType { - fn default() -> RHashType { - Self::Insert - } -} diff --git a/crates/dc-api-types/src/lib.rs b/crates/dc-api-types/src/lib.rs deleted file mode 100644 index 04de9b21..00000000 --- a/crates/dc-api-types/src/lib.rs +++ /dev/null @@ -1,199 +0,0 @@ -pub mod aggregate; -pub use self::aggregate::Aggregate; -pub mod and_expression; -pub use self::and_expression::AndExpression; -pub mod another_column_comparison; -pub use self::another_column_comparison::AnotherColumnComparison; -pub mod apply_binary_array_comparison_operator; -pub use self::apply_binary_array_comparison_operator::ApplyBinaryArrayComparisonOperator; -pub mod apply_binary_comparison_operator; -pub use self::apply_binary_comparison_operator::ApplyBinaryComparisonOperator; -pub mod apply_unary_comparison_operator; -pub use self::apply_unary_comparison_operator::ApplyUnaryComparisonOperator; -pub mod array_comparison_value; -pub use self::array_comparison_value::ArrayComparisonValue; -pub mod array_relation_insert_schema; -pub use self::array_relation_insert_schema::ArrayRelationInsertSchema; -pub mod atomicity_support_level; -pub use self::atomicity_support_level::AtomicitySupportLevel; -pub mod auto_increment_generation_strategy; -pub use self::auto_increment_generation_strategy::AutoIncrementGenerationStrategy; -pub mod binary_array_comparison_operator; -pub use self::binary_array_comparison_operator::BinaryArrayComparisonOperator; -pub mod binary_comparison_operator; -pub use self::binary_comparison_operator::BinaryComparisonOperator; -pub mod capabilities; -pub use self::capabilities::Capabilities; -pub mod capabilities_response; -pub use self::capabilities_response::CapabilitiesResponse; -pub mod column_count_aggregate; -pub use self::column_count_aggregate::ColumnCountAggregate; -pub mod column_field; -pub use self::column_field::ColumnField; -pub mod column_info; -pub use self::column_info::ColumnInfo; -pub mod column_type; -pub use self::column_type::ColumnType; -pub mod column_insert_schema; -pub use self::column_insert_schema::ColumnInsertSchema; -pub mod column_nullability; -pub use self::column_nullability::ColumnNullability; -pub mod column_value_generation_strategy; -pub use self::column_value_generation_strategy::ColumnValueGenerationStrategy; -pub mod comparison_capabilities; -pub use self::comparison_capabilities::ComparisonCapabilities; -pub mod comparison_column; -pub use self::comparison_column::{ColumnSelector, ComparisonColumn}; -pub mod comparison_value; -pub use self::comparison_value::ComparisonValue; -pub mod config_schema_response; -pub use self::config_schema_response::ConfigSchemaResponse; -pub mod constraint; -pub use self::constraint::Constraint; -pub mod custom_update_column_operator_row_update; -pub use self::custom_update_column_operator_row_update::CustomUpdateColumnOperatorRowUpdate; -pub mod data_schema_capabilities; -pub use self::data_schema_capabilities::DataSchemaCapabilities; -pub mod dataset_create_clone_request; -pub use self::dataset_create_clone_request::DatasetCreateCloneRequest; -pub mod dataset_create_clone_response; -pub use self::dataset_create_clone_response::DatasetCreateCloneResponse; -pub mod dataset_delete_clone_response; -pub use self::dataset_delete_clone_response::DatasetDeleteCloneResponse; -pub mod dataset_get_template_response; -pub use self::dataset_get_template_response::DatasetGetTemplateResponse; -pub mod default_value_generation_strategy; -pub use self::default_value_generation_strategy::DefaultValueGenerationStrategy; -pub mod delete_mutation_operation; -pub use self::delete_mutation_operation::DeleteMutationOperation; -pub mod error_response; -pub use self::error_response::ErrorResponse; -pub mod error_response_type; -pub use self::error_response_type::ErrorResponseType; -pub mod exists_expression; -pub use self::exists_expression::ExistsExpression; -pub mod exists_in_table; -pub use self::exists_in_table::ExistsInTable; -pub mod explain_response; -pub use self::explain_response::ExplainResponse; -pub mod expression; -pub use self::expression::Expression; -pub mod field; -pub use self::field::Field; -pub mod graphql_name; -pub use self::graphql_name::{GqlName, GraphQLName}; -pub mod graph_ql_type; -pub use self::graph_ql_type::GraphQlType; -pub mod insert_capabilities; -pub use self::insert_capabilities::InsertCapabilities; -pub mod insert_field_schema; -pub use self::insert_field_schema::InsertFieldSchema; -pub mod insert_mutation_operation; -pub use self::insert_mutation_operation::InsertMutationOperation; -pub mod mutation_capabilities; -pub use self::mutation_capabilities::MutationCapabilities; -pub mod mutation_operation; -pub use self::mutation_operation::MutationOperation; -pub mod mutation_operation_results; -pub use self::mutation_operation_results::MutationOperationResults; -pub mod mutation_request; -pub use self::mutation_request::MutationRequest; -pub mod mutation_response; -pub use self::mutation_response::MutationResponse; -pub mod nested_object_field; -pub use self::nested_object_field::NestedObjectField; -pub mod not_expression; -pub use self::not_expression::NotExpression; -pub mod object_relation_insert_schema; -pub use self::object_relation_insert_schema::ObjectRelationInsertSchema; -pub mod object_relation_insertion_order; -pub use self::object_relation_insertion_order::ObjectRelationInsertionOrder; -pub mod object_type_definition; -pub use self::object_type_definition::ObjectTypeDefinition; -pub mod open_api_discriminator; -pub use self::open_api_discriminator::OpenApiDiscriminator; -pub mod open_api_external_documentation; -pub use self::open_api_external_documentation::OpenApiExternalDocumentation; -pub mod open_api_reference; -pub use self::open_api_reference::OpenApiReference; -pub mod open_api_schema; -pub use self::open_api_schema::OpenApiSchema; -pub use self::open_api_schema::SchemaOrReference; -pub mod open_api_xml; -pub use self::open_api_xml::OpenApiXml; -pub mod or_expression; -pub use self::or_expression::OrExpression; -pub mod order_by; -pub use self::order_by::OrderBy; -pub mod order_by_column; -pub use self::order_by_column::OrderByColumn; -pub mod order_by_element; -pub use self::order_by_element::OrderByElement; -pub mod order_by_relation; -pub use self::order_by_relation::OrderByRelation; -pub mod order_by_single_column_aggregate; -pub use self::order_by_single_column_aggregate::OrderBySingleColumnAggregate; -pub mod order_by_star_count_aggregate; -pub use self::order_by_star_count_aggregate::OrderByStarCountAggregate; -pub mod order_by_target; -pub use self::order_by_target::OrderByTarget; -pub mod order_direction; -pub use self::order_direction::OrderDirection; -pub mod query; -pub use self::query::Query; -pub mod query_capabilities; -pub use self::query_capabilities::QueryCapabilities; -pub mod query_request; -pub use self::query_request::{QueryRequest, VariableSet}; -pub mod query_response; -pub use self::query_response::{QueryResponse, ResponseFieldValue, RowSet}; -pub mod raw_request; -pub use self::raw_request::RawRequest; -pub mod raw_response; -pub use self::raw_response::RawResponse; -pub mod related_table; -pub use self::related_table::RelatedTable; -pub mod relationship; -pub use self::relationship::{ColumnMapping, Relationship}; -pub mod relationship_field; -pub use self::relationship_field::RelationshipField; -pub mod relationship_type; -pub use self::relationship_type::RelationshipType; -pub mod row_object_value; -pub use self::row_object_value::RowObjectValue; -pub mod row_update; -pub use self::row_update::RowUpdate; -pub mod scalar_type_capabilities; -pub use self::scalar_type_capabilities::ScalarTypeCapabilities; -pub mod scalar_value; -pub use self::scalar_value::ScalarValue; -pub mod schema_response; -pub use self::schema_response::SchemaResponse; -pub mod set_column_row_update; -pub use self::set_column_row_update::SetColumnRowUpdate; -pub mod single_column_aggregate; -pub use self::single_column_aggregate::SingleColumnAggregate; -pub mod star_count_aggregate; -pub use self::star_count_aggregate::StarCountAggregate; -pub mod subquery_comparison_capabilities; -pub use self::subquery_comparison_capabilities::SubqueryComparisonCapabilities; -pub mod table_info; -pub use self::table_info::TableInfo; -pub mod table_insert_schema; -pub use self::table_insert_schema::TableInsertSchema; -pub mod table_relationships; -pub use self::table_relationships::TableRelationships; -pub mod table_type; -pub use self::table_type::TableType; -pub mod target; -pub use self::target::{Argument, Target}; -pub mod unary_comparison_operator; -pub use self::unary_comparison_operator::UnaryComparisonOperator; -pub mod unique_identifier_generation_strategy; -pub use self::unique_identifier_generation_strategy::UniqueIdentifierGenerationStrategy; -pub mod unrelated_table; -pub use self::unrelated_table::UnrelatedTable; -pub mod update_column_operator_definition; -pub use self::update_column_operator_definition::UpdateColumnOperatorDefinition; -pub mod update_mutation_operation; -pub use self::update_mutation_operation::UpdateMutationOperation; diff --git a/crates/dc-api-types/src/mutation_capabilities.rs b/crates/dc-api-types/src/mutation_capabilities.rs deleted file mode 100644 index fd987967..00000000 --- a/crates/dc-api-types/src/mutation_capabilities.rs +++ /dev/null @@ -1,55 +0,0 @@ -/* - * - * - * No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) - * - * The version of the OpenAPI document: - * - * Generated by: https://openapi-generator.tech - */ - -use serde::{Deserialize, Serialize}; - -#[derive(Clone, Debug, PartialEq, Default, Serialize, Deserialize)] -pub struct MutationCapabilities { - #[serde( - rename = "atomicity_support_level", - skip_serializing_if = "Option::is_none" - )] - pub atomicity_support_level: Option, - #[serde( - rename = "delete", - default, - with = "::serde_with::rust::double_option", - skip_serializing_if = "Option::is_none" - )] - pub delete: Option>, - #[serde(rename = "insert", skip_serializing_if = "Option::is_none")] - pub insert: Option>, - #[serde( - rename = "returning", - default, - with = "::serde_with::rust::double_option", - skip_serializing_if = "Option::is_none" - )] - pub returning: Option>, - #[serde( - rename = "update", - default, - with = "::serde_with::rust::double_option", - skip_serializing_if = "Option::is_none" - )] - pub update: Option>, -} - -impl MutationCapabilities { - pub fn new() -> MutationCapabilities { - MutationCapabilities { - atomicity_support_level: None, - delete: None, - insert: None, - returning: None, - update: None, - } - } -} diff --git a/crates/dc-api-types/src/mutation_operation.rs b/crates/dc-api-types/src/mutation_operation.rs deleted file mode 100644 index 09689a36..00000000 --- a/crates/dc-api-types/src/mutation_operation.rs +++ /dev/null @@ -1,70 +0,0 @@ -/* - * - * - * No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) - * - * The version of the OpenAPI document: - * - * Generated by: https://openapi-generator.tech - */ - -use serde::{Deserialize, Serialize}; - -#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] -#[serde(tag = "type")] -pub enum MutationOperation { - #[serde(rename = "delete")] - Delete { - /// The fields to return for the rows affected by this delete operation - #[serde(rename = "returning_fields", skip_serializing_if = "Option::is_none")] - returning_fields: Option<::std::collections::HashMap>, - /// The fully qualified name of a table, where the last item in the array is the table name and any earlier items represent the namespacing of the table name - #[serde(rename = "table")] - table: Vec, - #[serde(rename = "where", skip_serializing_if = "Option::is_none")] - r#where: Option>, - }, - #[serde(rename = "insert")] - Insert { - #[serde(rename = "post_insert_check", skip_serializing_if = "Option::is_none")] - post_insert_check: Option>, - /// The fields to return for the rows affected by this insert operation - #[serde(rename = "returning_fields", skip_serializing_if = "Option::is_none")] - returning_fields: Option<::std::collections::HashMap>, - /// The rows to insert into the table - #[serde(rename = "rows")] - rows: Vec<::std::collections::HashMap>, - /// The fully qualified name of a table, where the last item in the array is the table name and any earlier items represent the namespacing of the table name - #[serde(rename = "table")] - table: Vec, - }, - #[serde(rename = "update")] - Update { - #[serde(rename = "post_update_check", skip_serializing_if = "Option::is_none")] - post_update_check: Option>, - /// The fields to return for the rows affected by this update operation - #[serde(rename = "returning_fields", skip_serializing_if = "Option::is_none")] - returning_fields: Option<::std::collections::HashMap>, - /// The fully qualified name of a table, where the last item in the array is the table name and any earlier items represent the namespacing of the table name - #[serde(rename = "table")] - table: Vec, - /// The updates to make to the matched rows in the table - #[serde(rename = "updates")] - updates: Vec, - #[serde(rename = "where", skip_serializing_if = "Option::is_none")] - r#where: Option>, - }, -} - -/// -#[derive(Clone, Copy, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] -pub enum RHashType { - #[serde(rename = "update")] - Update, -} - -impl Default for RHashType { - fn default() -> RHashType { - Self::Update - } -} diff --git a/crates/dc-api-types/src/mutation_operation_results.rs b/crates/dc-api-types/src/mutation_operation_results.rs deleted file mode 100644 index 973bb065..00000000 --- a/crates/dc-api-types/src/mutation_operation_results.rs +++ /dev/null @@ -1,37 +0,0 @@ -/* - * - * - * No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) - * - * The version of the OpenAPI document: - * - * Generated by: https://openapi-generator.tech - */ - -use ::std::collections::HashMap; - -use serde::{Deserialize, Serialize}; - -#[derive(Clone, Debug, PartialEq, Default, Serialize, Deserialize)] -pub struct MutationOperationResults { - /// The number of rows affected by the mutation operation - #[serde(rename = "affected_rows")] - pub affected_rows: f32, - /// The rows affected by the mutation operation - #[serde( - rename = "returning", - default, - with = "::serde_with::rust::double_option", - skip_serializing_if = "Option::is_none" - )] - pub returning: Option>>>, -} - -impl MutationOperationResults { - pub fn new(affected_rows: f32) -> MutationOperationResults { - MutationOperationResults { - affected_rows, - returning: None, - } - } -} diff --git a/crates/dc-api-types/src/mutation_request.rs b/crates/dc-api-types/src/mutation_request.rs deleted file mode 100644 index 2443fd4d..00000000 --- a/crates/dc-api-types/src/mutation_request.rs +++ /dev/null @@ -1,38 +0,0 @@ -/* - * - * - * No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) - * - * The version of the OpenAPI document: - * - * Generated by: https://openapi-generator.tech - */ - -use serde::{Deserialize, Serialize}; - -#[derive(Clone, Debug, PartialEq, Default, Serialize, Deserialize)] -pub struct MutationRequest { - /// The schema by which to interpret row data specified in any insert operations in this request - #[serde(rename = "insert_schema")] - pub insert_schema: Vec, - /// The mutation operations to perform - #[serde(rename = "operations")] - pub operations: Vec, - /// The relationships between tables involved in the entire mutation request - #[serde(rename = "relationships", alias = "table_relationships")] - pub relationships: Vec, -} - -impl MutationRequest { - pub fn new( - insert_schema: Vec, - operations: Vec, - relationships: Vec, - ) -> MutationRequest { - MutationRequest { - insert_schema, - operations, - relationships, - } - } -} diff --git a/crates/dc-api-types/src/mutation_response.rs b/crates/dc-api-types/src/mutation_response.rs deleted file mode 100644 index ed72ccc8..00000000 --- a/crates/dc-api-types/src/mutation_response.rs +++ /dev/null @@ -1,24 +0,0 @@ -/* - * - * - * No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) - * - * The version of the OpenAPI document: - * - * Generated by: https://openapi-generator.tech - */ - -use serde::{Deserialize, Serialize}; - -#[derive(Clone, Debug, PartialEq, Default, Serialize, Deserialize)] -pub struct MutationResponse { - /// The results of each mutation operation, in the same order as they were received - #[serde(rename = "operation_results")] - pub operation_results: Vec, -} - -impl MutationResponse { - pub fn new(operation_results: Vec) -> MutationResponse { - MutationResponse { operation_results } - } -} diff --git a/crates/dc-api-types/src/nested_object_field.rs b/crates/dc-api-types/src/nested_object_field.rs deleted file mode 100644 index 0be0bf26..00000000 --- a/crates/dc-api-types/src/nested_object_field.rs +++ /dev/null @@ -1,44 +0,0 @@ -/* - * - * - * No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) - * - * The version of the OpenAPI document: - * - * Generated by: https://openapi-generator.tech - */ - -use serde::{Deserialize, Serialize}; - -#[derive(Clone, Debug, PartialEq, Default, Serialize, Deserialize)] -pub struct NestedObjectField { - #[serde(rename = "column")] - pub column: String, - #[serde(rename = "query")] - pub query: Box, - #[serde(rename = "type")] - pub r#type: RHashType, -} - -impl NestedObjectField { - pub fn new(column: String, query: crate::Query, r#type: RHashType) -> NestedObjectField { - NestedObjectField { - column, - query: Box::new(query), - r#type, - } - } -} - -/// -#[derive(Clone, Copy, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] -pub enum RHashType { - #[serde(rename = "object")] - Object, -} - -impl Default for RHashType { - fn default() -> RHashType { - Self::Object - } -} diff --git a/crates/dc-api-types/src/not_expression.rs b/crates/dc-api-types/src/not_expression.rs deleted file mode 100644 index 4dae04f9..00000000 --- a/crates/dc-api-types/src/not_expression.rs +++ /dev/null @@ -1,41 +0,0 @@ -/* - * - * - * No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) - * - * The version of the OpenAPI document: - * - * Generated by: https://openapi-generator.tech - */ - -use serde::{Deserialize, Serialize}; - -#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] -pub struct NotExpression { - #[serde(rename = "expression")] - pub expression: Box, - #[serde(rename = "type")] - pub r#type: RHashType, -} - -impl NotExpression { - pub fn new(expression: crate::Expression, r#type: RHashType) -> NotExpression { - NotExpression { - expression: Box::new(expression), - r#type, - } - } -} - -/// -#[derive(Clone, Copy, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] -pub enum RHashType { - #[serde(rename = "not")] - Not, -} - -impl Default for RHashType { - fn default() -> RHashType { - Self::Not - } -} diff --git a/crates/dc-api-types/src/object_relation_insert_schema.rs b/crates/dc-api-types/src/object_relation_insert_schema.rs deleted file mode 100644 index 377aeeaf..00000000 --- a/crates/dc-api-types/src/object_relation_insert_schema.rs +++ /dev/null @@ -1,49 +0,0 @@ -/* - * - * - * No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) - * - * The version of the OpenAPI document: - * - * Generated by: https://openapi-generator.tech - */ - -use serde::{Deserialize, Serialize}; - -#[derive(Clone, Debug, PartialEq, Default, Serialize, Deserialize)] -pub struct ObjectRelationInsertSchema { - #[serde(rename = "insertion_order")] - pub insertion_order: crate::ObjectRelationInsertionOrder, - /// The name of the object relationship over which the related row must be inserted - #[serde(rename = "relationship")] - pub relationship: String, - #[serde(rename = "type")] - pub r#type: RHashType, -} - -impl ObjectRelationInsertSchema { - pub fn new( - insertion_order: crate::ObjectRelationInsertionOrder, - relationship: String, - r#type: RHashType, - ) -> ObjectRelationInsertSchema { - ObjectRelationInsertSchema { - insertion_order, - relationship, - r#type, - } - } -} - -/// -#[derive(Clone, Copy, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] -pub enum RHashType { - #[serde(rename = "object_relation")] - ObjectRelation, -} - -impl Default for RHashType { - fn default() -> RHashType { - Self::ObjectRelation - } -} diff --git a/crates/dc-api-types/src/object_relation_insertion_order.rs b/crates/dc-api-types/src/object_relation_insertion_order.rs deleted file mode 100644 index e18368ed..00000000 --- a/crates/dc-api-types/src/object_relation_insertion_order.rs +++ /dev/null @@ -1,35 +0,0 @@ -/* - * - * - * No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) - * - * The version of the OpenAPI document: - * - * Generated by: https://openapi-generator.tech - */ - -/// -use serde::{Deserialize, Serialize}; - -#[derive(Clone, Copy, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] -pub enum ObjectRelationInsertionOrder { - #[serde(rename = "before_parent")] - BeforeParent, - #[serde(rename = "after_parent")] - AfterParent, -} - -impl ToString for ObjectRelationInsertionOrder { - fn to_string(&self) -> String { - match self { - Self::BeforeParent => String::from("before_parent"), - Self::AfterParent => String::from("after_parent"), - } - } -} - -impl Default for ObjectRelationInsertionOrder { - fn default() -> ObjectRelationInsertionOrder { - Self::BeforeParent - } -} diff --git a/crates/dc-api-types/src/object_type_definition.rs b/crates/dc-api-types/src/object_type_definition.rs deleted file mode 100644 index e4f92a43..00000000 --- a/crates/dc-api-types/src/object_type_definition.rs +++ /dev/null @@ -1,36 +0,0 @@ -/* - * - * - * No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) - * - * The version of the OpenAPI document: - * - * Generated by: https://openapi-generator.tech - */ - -use serde::{Deserialize, Serialize}; - -use crate::GraphQLName; - -#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] -pub struct ObjectTypeDefinition { - /// The columns of the type - #[serde(rename = "columns")] - pub columns: Vec, - /// The description of the type - #[serde(rename = "description", skip_serializing_if = "Option::is_none")] - pub description: Option, - /// The name of the type - #[serde(rename = "name")] - pub name: GraphQLName, -} - -impl ObjectTypeDefinition { - pub fn new(columns: Vec, name: GraphQLName) -> ObjectTypeDefinition { - ObjectTypeDefinition { - columns, - description: None, - name, - } - } -} diff --git a/crates/dc-api-types/src/open_api_discriminator.rs b/crates/dc-api-types/src/open_api_discriminator.rs deleted file mode 100644 index d271b20c..00000000 --- a/crates/dc-api-types/src/open_api_discriminator.rs +++ /dev/null @@ -1,28 +0,0 @@ -/* - * - * - * No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) - * - * The version of the OpenAPI document: - * - * Generated by: https://openapi-generator.tech - */ - -use serde::{Deserialize, Serialize}; - -#[derive(Clone, Debug, PartialEq, Default, Serialize, Deserialize)] -pub struct OpenApiDiscriminator { - #[serde(rename = "mapping", skip_serializing_if = "Option::is_none")] - pub mapping: Option<::std::collections::HashMap>, - #[serde(rename = "propertyName")] - pub property_name: String, -} - -impl OpenApiDiscriminator { - pub fn new(property_name: String) -> OpenApiDiscriminator { - OpenApiDiscriminator { - mapping: None, - property_name, - } - } -} diff --git a/crates/dc-api-types/src/open_api_external_documentation.rs b/crates/dc-api-types/src/open_api_external_documentation.rs deleted file mode 100644 index 79b39b26..00000000 --- a/crates/dc-api-types/src/open_api_external_documentation.rs +++ /dev/null @@ -1,28 +0,0 @@ -/* - * - * - * No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) - * - * The version of the OpenAPI document: - * - * Generated by: https://openapi-generator.tech - */ - -use serde::{Deserialize, Serialize}; - -#[derive(Clone, Debug, PartialEq, Default, Serialize, Deserialize)] -pub struct OpenApiExternalDocumentation { - #[serde(rename = "description", skip_serializing_if = "Option::is_none")] - pub description: Option, - #[serde(rename = "url")] - pub url: String, -} - -impl OpenApiExternalDocumentation { - pub fn new(url: String) -> OpenApiExternalDocumentation { - OpenApiExternalDocumentation { - description: None, - url, - } - } -} diff --git a/crates/dc-api-types/src/open_api_reference.rs b/crates/dc-api-types/src/open_api_reference.rs deleted file mode 100644 index fb98b391..00000000 --- a/crates/dc-api-types/src/open_api_reference.rs +++ /dev/null @@ -1,23 +0,0 @@ -/* - * - * - * No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) - * - * The version of the OpenAPI document: - * - * Generated by: https://openapi-generator.tech - */ - -use serde::{Deserialize, Serialize}; - -#[derive(Clone, Debug, PartialEq, Default, Serialize, Deserialize)] -pub struct OpenApiReference { - #[serde(rename = "$ref")] - pub dollar_ref: String, -} - -impl OpenApiReference { - pub fn new(dollar_ref: String) -> OpenApiReference { - OpenApiReference { dollar_ref } - } -} diff --git a/crates/dc-api-types/src/open_api_schema.rs b/crates/dc-api-types/src/open_api_schema.rs deleted file mode 100644 index a3962ea8..00000000 --- a/crates/dc-api-types/src/open_api_schema.rs +++ /dev/null @@ -1,172 +0,0 @@ -/* - * - * - * No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) - * - * The version of the OpenAPI document: - * - * Generated by: https://openapi-generator.tech - */ - -use serde::{Deserialize, Serialize}; - -use super::OpenApiReference; - -#[derive(Clone, Debug, PartialEq, Default, Serialize, Deserialize)] -pub struct OpenApiSchema { - #[serde( - rename = "additionalProperties", - skip_serializing_if = "Option::is_none" - )] - pub additional_properties: Option<::std::collections::HashMap>, - #[serde(rename = "allOf", skip_serializing_if = "Option::is_none")] - pub all_of: Option>, - #[serde(rename = "anyOf", skip_serializing_if = "Option::is_none")] - pub any_of: Option>, - #[serde( - rename = "default", - default, - with = "::serde_with::rust::double_option", - skip_serializing_if = "Option::is_none" - )] - pub default: Option>, - #[serde(rename = "deprecated", skip_serializing_if = "Option::is_none")] - pub deprecated: Option, - #[serde(rename = "description", skip_serializing_if = "Option::is_none")] - pub description: Option, - #[serde(rename = "discriminator", skip_serializing_if = "Option::is_none")] - pub discriminator: Option>, - #[serde(rename = "enum", skip_serializing_if = "Option::is_none")] - pub r#enum: Option>, - #[serde( - rename = "example", - default, - with = "::serde_with::rust::double_option", - skip_serializing_if = "Option::is_none" - )] - pub example: Option>, - #[serde(rename = "exclusiveMaximum", skip_serializing_if = "Option::is_none")] - pub exclusive_maximum: Option, - #[serde(rename = "exclusiveMinimum", skip_serializing_if = "Option::is_none")] - pub exclusive_minimum: Option, - #[serde(rename = "externalDocs", skip_serializing_if = "Option::is_none")] - pub external_docs: Option>, - #[serde(rename = "format", skip_serializing_if = "Option::is_none")] - pub format: Option, - #[serde(rename = "items", skip_serializing_if = "Option::is_none")] - pub items: Option>, - #[serde(rename = "maxItems", skip_serializing_if = "Option::is_none")] - pub max_items: Option, - #[serde(rename = "maxLength", skip_serializing_if = "Option::is_none")] - pub max_length: Option, - #[serde(rename = "maxProperties", skip_serializing_if = "Option::is_none")] - pub max_properties: Option, - #[serde(rename = "maximum", skip_serializing_if = "Option::is_none")] - pub maximum: Option, - #[serde(rename = "minItems", skip_serializing_if = "Option::is_none")] - pub min_items: Option, - #[serde(rename = "minLength", skip_serializing_if = "Option::is_none")] - pub min_length: Option, - #[serde(rename = "minProperties", skip_serializing_if = "Option::is_none")] - pub min_properties: Option, - #[serde(rename = "minimum", skip_serializing_if = "Option::is_none")] - pub minimum: Option, - #[serde(rename = "multipleOf", skip_serializing_if = "Option::is_none")] - pub multiple_of: Option, - #[serde(rename = "not", skip_serializing_if = "Option::is_none")] - pub not: Option>, - #[serde(rename = "nullable", skip_serializing_if = "Option::is_none")] - pub nullable: Option, - #[serde(rename = "oneOf", skip_serializing_if = "Option::is_none")] - pub one_of: Option>, - #[serde(rename = "pattern", skip_serializing_if = "Option::is_none")] - pub pattern: Option, - #[serde(rename = "properties", skip_serializing_if = "Option::is_none")] - pub properties: Option<::std::collections::HashMap>, - #[serde(rename = "readOnly", skip_serializing_if = "Option::is_none")] - pub read_only: Option, - #[serde(rename = "required", skip_serializing_if = "Option::is_none")] - pub required: Option>, - #[serde(rename = "title", skip_serializing_if = "Option::is_none")] - pub title: Option, - #[serde(rename = "type", skip_serializing_if = "Option::is_none")] - pub r#type: Option, - #[serde(rename = "uniqueItems", skip_serializing_if = "Option::is_none")] - pub unique_items: Option, - #[serde(rename = "writeOnly", skip_serializing_if = "Option::is_none")] - pub write_only: Option, - #[serde(rename = "xml", skip_serializing_if = "Option::is_none")] - pub xml: Option>, -} - -impl OpenApiSchema { - pub fn new() -> OpenApiSchema { - OpenApiSchema { - additional_properties: None, - all_of: None, - any_of: None, - default: None, - deprecated: None, - description: None, - discriminator: None, - r#enum: None, - example: None, - exclusive_maximum: None, - exclusive_minimum: None, - external_docs: None, - format: None, - items: None, - max_items: None, - max_length: None, - max_properties: None, - maximum: None, - min_items: None, - min_length: None, - min_properties: None, - minimum: None, - multiple_of: None, - not: None, - nullable: None, - one_of: None, - pattern: None, - properties: None, - read_only: None, - required: None, - title: None, - r#type: None, - unique_items: None, - write_only: None, - xml: None, - } - } -} - -/// -#[derive(Clone, Copy, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] -pub enum RHashType { - #[serde(rename = "array")] - Array, - #[serde(rename = "boolean")] - Boolean, - #[serde(rename = "integer")] - Integer, - #[serde(rename = "number")] - Number, - #[serde(rename = "object")] - Object, - #[serde(rename = "string")] - String, -} - -impl Default for RHashType { - fn default() -> RHashType { - Self::Array - } -} - -#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] -#[serde(untagged)] -pub enum SchemaOrReference { - OpenApiSchema(OpenApiSchema), - OpenApiReference(OpenApiReference), -} diff --git a/crates/dc-api-types/src/open_api_xml.rs b/crates/dc-api-types/src/open_api_xml.rs deleted file mode 100644 index 57075e04..00000000 --- a/crates/dc-api-types/src/open_api_xml.rs +++ /dev/null @@ -1,37 +0,0 @@ -/* - * - * - * No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) - * - * The version of the OpenAPI document: - * - * Generated by: https://openapi-generator.tech - */ - -use serde::{Deserialize, Serialize}; - -#[derive(Clone, Debug, PartialEq, Default, Serialize, Deserialize)] -pub struct OpenApiXml { - #[serde(rename = "attribute", skip_serializing_if = "Option::is_none")] - pub attribute: Option, - #[serde(rename = "name", skip_serializing_if = "Option::is_none")] - pub name: Option, - #[serde(rename = "namespace", skip_serializing_if = "Option::is_none")] - pub namespace: Option, - #[serde(rename = "prefix", skip_serializing_if = "Option::is_none")] - pub prefix: Option, - #[serde(rename = "wrapped", skip_serializing_if = "Option::is_none")] - pub wrapped: Option, -} - -impl OpenApiXml { - pub fn new() -> OpenApiXml { - OpenApiXml { - attribute: None, - name: None, - namespace: None, - prefix: None, - wrapped: None, - } - } -} diff --git a/crates/dc-api-types/src/or_expression.rs b/crates/dc-api-types/src/or_expression.rs deleted file mode 100644 index c148e269..00000000 --- a/crates/dc-api-types/src/or_expression.rs +++ /dev/null @@ -1,41 +0,0 @@ -/* - * - * - * No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) - * - * The version of the OpenAPI document: - * - * Generated by: https://openapi-generator.tech - */ - -use serde::{Deserialize, Serialize}; - -#[derive(Clone, Debug, PartialEq, Default, Serialize, Deserialize)] -pub struct OrExpression { - #[serde(rename = "expressions")] - pub expressions: Vec, - #[serde(rename = "type")] - pub r#type: RHashType, -} - -impl OrExpression { - pub fn new(expressions: Vec, r#type: RHashType) -> OrExpression { - OrExpression { - expressions, - r#type, - } - } -} - -/// -#[derive(Clone, Copy, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] -pub enum RHashType { - #[serde(rename = "or")] - Or, -} - -impl Default for RHashType { - fn default() -> RHashType { - Self::Or - } -} diff --git a/crates/dc-api-types/src/order_by.rs b/crates/dc-api-types/src/order_by.rs deleted file mode 100644 index 3743673e..00000000 --- a/crates/dc-api-types/src/order_by.rs +++ /dev/null @@ -1,33 +0,0 @@ -/* - * - * - * No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) - * - * The version of the OpenAPI document: - * - * Generated by: https://openapi-generator.tech - */ - -use serde::{Deserialize, Serialize}; - -#[derive(Clone, Debug, PartialEq, Default, Serialize, Deserialize)] -pub struct OrderBy { - /// The elements to order by, in priority order - #[serde(rename = "elements")] - pub elements: Vec, - /// A map of relationships from the current query table to target tables. The key of the map is the relationship name. The relationships are used within the order by elements. - #[serde(rename = "relations")] - pub relations: ::std::collections::HashMap, -} - -impl OrderBy { - pub fn new( - elements: Vec, - relations: ::std::collections::HashMap, - ) -> OrderBy { - OrderBy { - elements, - relations, - } - } -} diff --git a/crates/dc-api-types/src/order_by_column.rs b/crates/dc-api-types/src/order_by_column.rs deleted file mode 100644 index 562f0e17..00000000 --- a/crates/dc-api-types/src/order_by_column.rs +++ /dev/null @@ -1,38 +0,0 @@ -/* - * - * - * No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) - * - * The version of the OpenAPI document: - * - * Generated by: https://openapi-generator.tech - */ - -use serde::{Deserialize, Serialize}; - -#[derive(Clone, Debug, PartialEq, Default, Serialize, Deserialize)] -pub struct OrderByColumn { - #[serde(rename = "column")] - pub column: String, - #[serde(rename = "type")] - pub r#type: RHashType, -} - -impl OrderByColumn { - pub fn new(column: String, r#type: RHashType) -> OrderByColumn { - OrderByColumn { column, r#type } - } -} - -/// -#[derive(Clone, Copy, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] -pub enum RHashType { - #[serde(rename = "column")] - Column, -} - -impl Default for RHashType { - fn default() -> RHashType { - Self::Column - } -} diff --git a/crates/dc-api-types/src/order_by_element.rs b/crates/dc-api-types/src/order_by_element.rs deleted file mode 100644 index a871837f..00000000 --- a/crates/dc-api-types/src/order_by_element.rs +++ /dev/null @@ -1,36 +0,0 @@ -/* - * - * - * No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) - * - * The version of the OpenAPI document: - * - * Generated by: https://openapi-generator.tech - */ - -use serde::{Deserialize, Serialize}; - -#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] -pub struct OrderByElement { - #[serde(rename = "order_direction")] - pub order_direction: crate::OrderDirection, - #[serde(rename = "target")] - pub target: crate::OrderByTarget, - /// The relationship path from the current query table to the table that contains the target to order by. This is always non-empty for aggregate order by targets - #[serde(rename = "target_path")] - pub target_path: Vec, -} - -impl OrderByElement { - pub fn new( - order_direction: crate::OrderDirection, - target: crate::OrderByTarget, - target_path: Vec, - ) -> OrderByElement { - OrderByElement { - order_direction, - target, - target_path, - } - } -} diff --git a/crates/dc-api-types/src/order_by_relation.rs b/crates/dc-api-types/src/order_by_relation.rs deleted file mode 100644 index 7e6f86ec..00000000 --- a/crates/dc-api-types/src/order_by_relation.rs +++ /dev/null @@ -1,31 +0,0 @@ -/* - * - * - * No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) - * - * The version of the OpenAPI document: - * - * Generated by: https://openapi-generator.tech - */ - -use serde::{Deserialize, Serialize}; - -#[derive(Clone, Debug, PartialEq, Default, Serialize, Deserialize)] -pub struct OrderByRelation { - /// Further relationships to follow from the relationship's target table. The key of the map is the relationship name. - #[serde(rename = "subrelations")] - pub subrelations: ::std::collections::HashMap, - #[serde(rename = "where", skip_serializing_if = "Option::is_none")] - pub r#where: Option>, -} - -impl OrderByRelation { - pub fn new( - subrelations: ::std::collections::HashMap, - ) -> OrderByRelation { - OrderByRelation { - subrelations, - r#where: None, - } - } -} diff --git a/crates/dc-api-types/src/order_by_single_column_aggregate.rs b/crates/dc-api-types/src/order_by_single_column_aggregate.rs deleted file mode 100644 index 3fbe8d5a..00000000 --- a/crates/dc-api-types/src/order_by_single_column_aggregate.rs +++ /dev/null @@ -1,54 +0,0 @@ -/* - * - * - * No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) - * - * The version of the OpenAPI document: - * - * Generated by: https://openapi-generator.tech - */ - -use serde::{Deserialize, Serialize}; - -#[derive(Clone, Debug, PartialEq, Default, Serialize, Deserialize)] -pub struct OrderBySingleColumnAggregate { - /// The column to apply the aggregation function to - #[serde(rename = "column")] - pub column: String, - /// Single column aggregate function name. A valid GraphQL name - #[serde(rename = "function")] - pub function: String, - #[serde(rename = "result_type")] - pub result_type: String, - #[serde(rename = "type")] - pub r#type: RHashType, -} - -impl OrderBySingleColumnAggregate { - pub fn new( - column: String, - function: String, - result_type: String, - r#type: RHashType, - ) -> OrderBySingleColumnAggregate { - OrderBySingleColumnAggregate { - column, - function, - result_type, - r#type, - } - } -} - -/// -#[derive(Clone, Copy, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] -pub enum RHashType { - #[serde(rename = "single_column_aggregate")] - SingleColumnAggregate, -} - -impl Default for RHashType { - fn default() -> RHashType { - Self::SingleColumnAggregate - } -} diff --git a/crates/dc-api-types/src/order_by_star_count_aggregate.rs b/crates/dc-api-types/src/order_by_star_count_aggregate.rs deleted file mode 100644 index 5056d1b7..00000000 --- a/crates/dc-api-types/src/order_by_star_count_aggregate.rs +++ /dev/null @@ -1,36 +0,0 @@ -/* - * - * - * No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) - * - * The version of the OpenAPI document: - * - * Generated by: https://openapi-generator.tech - */ - -use serde::{Deserialize, Serialize}; - -#[derive(Clone, Debug, PartialEq, Default, Serialize, Deserialize)] -pub struct OrderByStarCountAggregate { - #[serde(rename = "type")] - pub r#type: RHashType, -} - -impl OrderByStarCountAggregate { - pub fn new(r#type: RHashType) -> OrderByStarCountAggregate { - OrderByStarCountAggregate { r#type } - } -} - -/// -#[derive(Clone, Copy, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] -pub enum RHashType { - #[serde(rename = "star_count_aggregate")] - StarCountAggregate, -} - -impl Default for RHashType { - fn default() -> RHashType { - Self::StarCountAggregate - } -} diff --git a/crates/dc-api-types/src/order_by_target.rs b/crates/dc-api-types/src/order_by_target.rs deleted file mode 100644 index df54b6f0..00000000 --- a/crates/dc-api-types/src/order_by_target.rs +++ /dev/null @@ -1,49 +0,0 @@ -/* - * - * - * No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) - * - * The version of the OpenAPI document: - * - * Generated by: https://openapi-generator.tech - */ - -use serde::{Deserialize, Serialize}; - -use crate::comparison_column::ColumnSelector; - -#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] -#[serde(tag = "type")] -pub enum OrderByTarget { - #[serde(rename = "column")] - Column { - #[serde(rename = "column")] - column: ColumnSelector, - }, - #[serde(rename = "single_column_aggregate")] - SingleColumnAggregate { - /// The column to apply the aggregation function to - #[serde(rename = "column")] - column: String, - /// Single column aggregate function name. A valid GraphQL name - #[serde(rename = "function")] - function: String, - #[serde(rename = "result_type")] - result_type: String, - }, - #[serde(rename = "star_count_aggregate")] - StarCountAggregate {}, -} - -/// -#[derive(Clone, Copy, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] -pub enum RHashType { - #[serde(rename = "column")] - Column, -} - -impl Default for RHashType { - fn default() -> RHashType { - Self::Column - } -} diff --git a/crates/dc-api-types/src/order_direction.rs b/crates/dc-api-types/src/order_direction.rs deleted file mode 100644 index ea4c4bcc..00000000 --- a/crates/dc-api-types/src/order_direction.rs +++ /dev/null @@ -1,35 +0,0 @@ -/* - * - * - * No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) - * - * The version of the OpenAPI document: - * - * Generated by: https://openapi-generator.tech - */ - -/// -use serde::{Deserialize, Serialize}; - -#[derive(Clone, Copy, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] -pub enum OrderDirection { - #[serde(rename = "asc")] - Asc, - #[serde(rename = "desc")] - Desc, -} - -impl ToString for OrderDirection { - fn to_string(&self) -> String { - match self { - Self::Asc => String::from("asc"), - Self::Desc => String::from("desc"), - } - } -} - -impl Default for OrderDirection { - fn default() -> OrderDirection { - Self::Asc - } -} diff --git a/crates/dc-api-types/src/query.rs b/crates/dc-api-types/src/query.rs deleted file mode 100644 index 9d106123..00000000 --- a/crates/dc-api-types/src/query.rs +++ /dev/null @@ -1,56 +0,0 @@ -/* - * - * - * No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) - * - * The version of the OpenAPI document: - * - * Generated by: https://openapi-generator.tech - */ - -use serde::{Deserialize, Serialize}; - -#[derive(Clone, Debug, PartialEq, Default, Serialize, Deserialize)] -pub struct Query { - /// Aggregate fields of the query - #[serde( - rename = "aggregates", - default, - skip_serializing_if = "Option::is_none" - )] - pub aggregates: Option<::std::collections::HashMap>, - /// Optionally limit the maximum number of rows considered while applying aggregations. This limit does not apply to returned rows. - #[serde( - rename = "aggregates_limit", - default, - skip_serializing_if = "Option::is_none" - )] - pub aggregates_limit: Option, - /// Fields of the query - #[serde(rename = "fields", default, skip_serializing_if = "Option::is_none")] - pub fields: Option<::std::collections::HashMap>, - /// Optionally limit the maximum number of returned rows. This limit does not apply to records considered while apply aggregations. - #[serde(rename = "limit", default, skip_serializing_if = "Option::is_none")] - pub limit: Option, - /// Optionally offset from the Nth result. This applies to both row and aggregation results. - #[serde(rename = "offset", default, skip_serializing_if = "Option::is_none")] - pub offset: Option, - #[serde(rename = "order_by", default, skip_serializing_if = "Option::is_none")] - pub order_by: Option, - #[serde(rename = "where", skip_serializing_if = "Option::is_none")] - pub r#where: Option, -} - -impl Query { - pub fn new() -> Query { - Query { - aggregates: None, - aggregates_limit: None, - fields: None, - limit: None, - offset: None, - order_by: None, - r#where: None, - } - } -} diff --git a/crates/dc-api-types/src/query_capabilities.rs b/crates/dc-api-types/src/query_capabilities.rs deleted file mode 100644 index 6cfb92f5..00000000 --- a/crates/dc-api-types/src/query_capabilities.rs +++ /dev/null @@ -1,30 +0,0 @@ -/* - * - * - * No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) - * - * The version of the OpenAPI document: - * - * Generated by: https://openapi-generator.tech - */ - -use serde::{Deserialize, Serialize}; - -#[derive(Clone, Debug, PartialEq, Default, Serialize, Deserialize)] -pub struct QueryCapabilities { - #[serde( - rename = "foreach", - default, - with = "::serde_with::rust::double_option", - skip_serializing_if = "Option::is_none" - )] - pub foreach: Option>, -} - -impl QueryCapabilities { - pub fn new() -> QueryCapabilities { - QueryCapabilities { - foreach: Some(Some(serde_json::json!({}))), - } - } -} diff --git a/crates/dc-api-types/src/query_request.rs b/crates/dc-api-types/src/query_request.rs deleted file mode 100644 index e70507d7..00000000 --- a/crates/dc-api-types/src/query_request.rs +++ /dev/null @@ -1,66 +0,0 @@ -/* - * - * - * No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) - * - * The version of the OpenAPI document: - * - * Generated by: https://openapi-generator.tech - */ - -use std::collections::BTreeMap; - -use crate::target::target_or_table_name; -use serde::{Deserialize, Serialize}; - -#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] -pub struct QueryRequest { - /// If present, a list of columns and values for the columns that the query must be repeated for, applying the column values as a filter for each query. - #[serde( - rename = "foreach", - default, - with = "::serde_with::rust::double_option", - skip_serializing_if = "Option::is_none" - )] - pub foreach: Option>>>, - - #[serde(rename = "query")] - pub query: Box, - - /// The target of the query. - /// For backwards compatibility with previous versions of dc-api we allow the alternative property name "table" and allow table names to be parsed into Target::TTable - #[serde( - rename = "target", - alias = "table", - deserialize_with = "target_or_table_name" - )] - pub target: crate::Target, - - /// The relationships between tables involved in the entire query request - #[serde(rename = "relationships", alias = "table_relationships")] - pub relationships: Vec, - - /// This field is not part of the v2 DC Agent API - it is included to support queries - /// translated from the v3 NDC API. A query request may include either `foreach` or - /// `variables`, but should not include both. - #[serde(skip)] - pub variables: Option>, -} - -pub type VariableSet = BTreeMap; - -impl QueryRequest { - pub fn new( - query: crate::Query, - target: crate::Target, - relationships: Vec, - ) -> QueryRequest { - QueryRequest { - foreach: None, - query: Box::new(query), - target, - relationships, - variables: None, - } - } -} diff --git a/crates/dc-api-types/src/query_response.rs b/crates/dc-api-types/src/query_response.rs deleted file mode 100644 index 0c48d215..00000000 --- a/crates/dc-api-types/src/query_response.rs +++ /dev/null @@ -1,59 +0,0 @@ -/* - * - * - * No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) - * - * The version of the OpenAPI document: - * - * Generated by: https://openapi-generator.tech - */ - -use ::std::collections::HashMap; - -use serde::{Deserialize, Serialize}; -use serde_with::skip_serializing_none; - -#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] -#[serde(untagged)] -pub enum QueryResponse { - /// In a foreach query we respond with multiple result sets, one for each foreach predicate. - /// This variant uses a struct constructor to reflect the API JSON format. - ForEach { rows: Vec }, - /// In a non-foreach query we respond with a single result set. - /// This variant uses a tuple constructor to reflect the lack of a wrapping object in the API - /// JSON format. - Single(RowSet), -} - -impl QueryResponse { - pub fn new() -> QueryResponse { - QueryResponse::Single(Default::default()) - } -} - -impl Default for QueryResponse { - fn default() -> Self { - Self::new() - } -} - -#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] -pub struct ForEachRow { - pub query: RowSet, -} - -#[skip_serializing_none] -#[derive(Clone, Debug, Default, PartialEq, Serialize, Deserialize)] -pub struct RowSet { - /// The results of the aggregates returned by the query - pub aggregates: Option>, - /// The rows returned by the query, corresponding to the query's fields - pub rows: Option>>, -} - -#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] -#[serde(untagged)] -pub enum ResponseFieldValue { - Relationship(Box), - Column(serde_json::Value), -} diff --git a/crates/dc-api-types/src/raw_request.rs b/crates/dc-api-types/src/raw_request.rs deleted file mode 100644 index ff1d39a6..00000000 --- a/crates/dc-api-types/src/raw_request.rs +++ /dev/null @@ -1,24 +0,0 @@ -/* - * - * - * No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) - * - * The version of the OpenAPI document: - * - * Generated by: https://openapi-generator.tech - */ - -use serde::{Deserialize, Serialize}; - -#[derive(Clone, Debug, PartialEq, Default, Serialize, Deserialize)] -pub struct RawRequest { - /// A string representing a raw query - #[serde(rename = "query")] - pub query: String, -} - -impl RawRequest { - pub fn new(query: String) -> RawRequest { - RawRequest { query } - } -} diff --git a/crates/dc-api-types/src/raw_response.rs b/crates/dc-api-types/src/raw_response.rs deleted file mode 100644 index 7c876e7b..00000000 --- a/crates/dc-api-types/src/raw_response.rs +++ /dev/null @@ -1,33 +0,0 @@ -/* - * - * - * No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) - * - * The version of the OpenAPI document: - * - * Generated by: https://openapi-generator.tech - */ - -use serde::{Deserialize, Serialize}; - -#[derive(Clone, Debug, PartialEq, Default, Serialize, Deserialize)] -pub struct RawResponse { - /// The rows returned by the raw query. - #[serde(rename = "rows")] - pub rows: Vec< - ::std::collections::HashMap>, - >, -} - -impl RawResponse { - pub fn new( - rows: Vec< - ::std::collections::HashMap< - String, - ::std::collections::HashMap, - >, - >, - ) -> RawResponse { - RawResponse { rows } - } -} diff --git a/crates/dc-api-types/src/related_table.rs b/crates/dc-api-types/src/related_table.rs deleted file mode 100644 index b8938cbd..00000000 --- a/crates/dc-api-types/src/related_table.rs +++ /dev/null @@ -1,41 +0,0 @@ -/* - * - * - * No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) - * - * The version of the OpenAPI document: - * - * Generated by: https://openapi-generator.tech - */ - -use serde::{Deserialize, Serialize}; - -#[derive(Clone, Debug, PartialEq, Default, Serialize, Deserialize)] -pub struct RelatedTable { - #[serde(rename = "relationship")] - pub relationship: String, - #[serde(rename = "type")] - pub r#type: RHashType, -} - -impl RelatedTable { - pub fn new(relationship: String, r#type: RHashType) -> RelatedTable { - RelatedTable { - relationship, - r#type, - } - } -} - -/// -#[derive(Clone, Copy, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] -pub enum RHashType { - #[serde(rename = "related")] - Related, -} - -impl Default for RHashType { - fn default() -> RHashType { - Self::Related - } -} diff --git a/crates/dc-api-types/src/relationship.rs b/crates/dc-api-types/src/relationship.rs deleted file mode 100644 index f0bb5d11..00000000 --- a/crates/dc-api-types/src/relationship.rs +++ /dev/null @@ -1,156 +0,0 @@ -/* - * - * - * No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) - * - * The version of the OpenAPI document: - * - * Generated by: https://openapi-generator.tech - */ - -use std::{collections::HashMap, fmt}; - -use crate::comparison_column::ColumnSelector; -use crate::target::target_or_table_name; -use serde::{ - de::{self, Visitor}, - Deserialize, Deserializer, Serialize, -}; - -#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] -pub struct Relationship { - /// A mapping between columns on the source table to columns on the target table - #[serde(rename = "column_mapping")] - pub column_mapping: ColumnMapping, - - #[serde(rename = "relationship_type")] - pub relationship_type: crate::RelationshipType, - - /// The target of the relationship. - /// For backwards compatibility with previous versions of dc-api we allow the alternative property name "target_table" and allow table names to be parsed into Target::TTable - #[serde( - rename = "target", - alias = "target_table", - deserialize_with = "target_or_table_name" - )] - pub target: crate::Target, -} - -#[derive(Clone, Debug, PartialEq)] -pub struct ColumnMapping(pub HashMap); - -impl Serialize for ColumnMapping { - fn serialize(&self, serializer: S) -> Result - where - S: serde::Serializer, - { - if self.0.keys().all(|k| k.is_column()) { - return self.0.serialize(serializer); - } - self.0.iter().collect::>().serialize(serializer) - } -} - -impl<'de> Deserialize<'de> for ColumnMapping { - fn deserialize(deserializer: D) -> Result - where - D: Deserializer<'de>, - { - struct ColumnMappingVisitor; - - impl<'de> Visitor<'de> for ColumnMappingVisitor { - type Value = ColumnMapping; - - fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - formatter.write_str("Column mapping object or array") - } - - fn visit_map(self, map: A) -> Result - where - A: de::MapAccess<'de>, - { - let m: HashMap = - Deserialize::deserialize(de::value::MapAccessDeserializer::new(map))?; - Ok(ColumnMapping( - m.into_iter() - .map(|(k, v)| (ColumnSelector::new(k), v)) - .collect(), - )) - } - - fn visit_seq(self, seq: A) -> Result - where - A: de::SeqAccess<'de>, - { - let s: Vec<(ColumnSelector, ColumnSelector)> = - Deserialize::deserialize(de::value::SeqAccessDeserializer::new(seq))?; - Ok(ColumnMapping(s.into_iter().collect())) - } - } - deserializer.deserialize_any(ColumnMappingVisitor) - } -} - -impl Relationship { - pub fn new( - column_mapping: ColumnMapping, - relationship_type: crate::RelationshipType, - target: crate::Target, - ) -> Relationship { - Relationship { - column_mapping, - relationship_type, - target, - } - } -} - -#[cfg(test)] -mod test { - use std::collections::HashMap; - - use mongodb::bson::{bson, from_bson, to_bson}; - use nonempty::nonempty; - - use crate::comparison_column::ColumnSelector; - - use super::ColumnMapping; - - #[test] - fn serialize_column_mapping() -> Result<(), anyhow::Error> { - let input = ColumnMapping(HashMap::from_iter(vec![( - ColumnSelector::new("k".to_owned()), - ColumnSelector::new("v".to_owned()), - )])); - assert_eq!(to_bson(&input)?, bson!({"k": "v"})); - - let input = ColumnMapping(HashMap::from_iter(vec![( - ColumnSelector::Path(nonempty!["k".to_owned(), "j".to_owned()]), - ColumnSelector::new("v".to_owned()), - )])); - assert_eq!(to_bson(&input)?, bson!([[["k", "j"], "v"]])); - Ok(()) - } - - #[test] - fn parse_column_mapping() -> Result<(), anyhow::Error> { - let input = bson!({"k": "v"}); - assert_eq!( - from_bson::(input)?, - ColumnMapping(HashMap::from_iter(vec![( - ColumnSelector::new("k".to_owned()), - ColumnSelector::new("v".to_owned()) - )])) - ); - - let input = bson!([[["k", "j"], "v"]]); - assert_eq!( - from_bson::(input)?, - ColumnMapping(HashMap::from_iter(vec![( - ColumnSelector::Path(nonempty!["k".to_owned(), "j".to_owned()]), - ColumnSelector::new("v".to_owned()) - )])) - ); - Ok(()) - } -} diff --git a/crates/dc-api-types/src/relationship_field.rs b/crates/dc-api-types/src/relationship_field.rs deleted file mode 100644 index 2d54fa48..00000000 --- a/crates/dc-api-types/src/relationship_field.rs +++ /dev/null @@ -1,45 +0,0 @@ -/* - * - * - * No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) - * - * The version of the OpenAPI document: - * - * Generated by: https://openapi-generator.tech - */ - -use serde::{Deserialize, Serialize}; - -#[derive(Clone, Debug, PartialEq, Default, Serialize, Deserialize)] -pub struct RelationshipField { - #[serde(rename = "query")] - pub query: Box, - /// The name of the relationship to follow for the subquery - #[serde(rename = "relationship")] - pub relationship: String, - #[serde(rename = "type")] - pub r#type: RHashType, -} - -impl RelationshipField { - pub fn new(query: crate::Query, relationship: String, r#type: RHashType) -> RelationshipField { - RelationshipField { - query: Box::new(query), - relationship, - r#type, - } - } -} - -/// -#[derive(Clone, Copy, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] -pub enum RHashType { - #[serde(rename = "relationship")] - Relationship, -} - -impl Default for RHashType { - fn default() -> RHashType { - Self::Relationship - } -} diff --git a/crates/dc-api-types/src/relationship_type.rs b/crates/dc-api-types/src/relationship_type.rs deleted file mode 100644 index c4b45352..00000000 --- a/crates/dc-api-types/src/relationship_type.rs +++ /dev/null @@ -1,35 +0,0 @@ -/* - * - * - * No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) - * - * The version of the OpenAPI document: - * - * Generated by: https://openapi-generator.tech - */ - -/// -use serde::{Deserialize, Serialize}; - -#[derive(Clone, Copy, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] -pub enum RelationshipType { - #[serde(rename = "object")] - Object, - #[serde(rename = "array")] - Array, -} - -impl ToString for RelationshipType { - fn to_string(&self) -> String { - match self { - Self::Object => String::from("object"), - Self::Array => String::from("array"), - } - } -} - -impl Default for RelationshipType { - fn default() -> RelationshipType { - Self::Object - } -} diff --git a/crates/dc-api-types/src/row_object_value.rs b/crates/dc-api-types/src/row_object_value.rs deleted file mode 100644 index 02c81504..00000000 --- a/crates/dc-api-types/src/row_object_value.rs +++ /dev/null @@ -1,20 +0,0 @@ -/* - * - * - * No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) - * - * The version of the OpenAPI document: - * - * Generated by: https://openapi-generator.tech - */ - -use serde::{Deserialize, Serialize}; - -#[derive(Clone, Debug, PartialEq, Default, Serialize, Deserialize)] -pub struct RowObjectValue {} - -impl RowObjectValue { - pub fn new() -> RowObjectValue { - RowObjectValue {} - } -} diff --git a/crates/dc-api-types/src/row_update.rs b/crates/dc-api-types/src/row_update.rs deleted file mode 100644 index 5912174f..00000000 --- a/crates/dc-api-types/src/row_update.rs +++ /dev/null @@ -1,53 +0,0 @@ -/* - * - * - * No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) - * - * The version of the OpenAPI document: - * - * Generated by: https://openapi-generator.tech - */ - -use serde::{Deserialize, Serialize}; - -#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] -#[serde(tag = "type")] -pub enum RowUpdate { - #[serde(rename = "custom_operator")] - CustomUpdateColumnOperatorRowUpdate { - /// The name of the column in the row - #[serde(rename = "column")] - column: String, - #[serde(rename = "operator_name")] - operator_name: String, - /// The value to use with the column operator - #[serde(rename = "value")] - value: ::std::collections::HashMap, - #[serde(rename = "value_type")] - value_type: String, - }, - #[serde(rename = "set")] - SetColumnRowUpdate { - /// The name of the column in the row - #[serde(rename = "column")] - column: String, - /// The value to use with the column operator - #[serde(rename = "value")] - value: ::std::collections::HashMap, - #[serde(rename = "value_type")] - value_type: String, - }, -} - -/// -#[derive(Clone, Copy, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] -pub enum RHashType { - #[serde(rename = "set")] - Set, -} - -impl Default for RHashType { - fn default() -> RHashType { - Self::Set - } -} diff --git a/crates/dc-api-types/src/scalar_type_capabilities.rs b/crates/dc-api-types/src/scalar_type_capabilities.rs deleted file mode 100644 index 489d2068..00000000 --- a/crates/dc-api-types/src/scalar_type_capabilities.rs +++ /dev/null @@ -1,49 +0,0 @@ -/* - * - * - * No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) - * - * The version of the OpenAPI document: - * - * Generated by: https://openapi-generator.tech - */ - -use serde::{Deserialize, Serialize}; - -/// ScalarTypeCapabilities : Capabilities of a scalar type. comparison_operators: The comparison operators supported by the scalar type. aggregate_functions: The aggregate functions supported by the scalar type. update_column_operators: The update column operators supported by the scalar type. graphql_type: Associates the custom scalar type with one of the built-in GraphQL scalar types. If a `graphql_type` is specified then HGE will use the parser for that built-in type when parsing values of the custom type. If not given then any JSON value will be accepted. -#[derive(Clone, Debug, PartialEq, Default, Serialize, Deserialize)] -pub struct ScalarTypeCapabilities { - /// A map from aggregate function names to their result types. Function and result type names must be valid GraphQL names. Result type names must be defined scalar types declared in ScalarTypesCapabilities. - #[serde( - rename = "aggregate_functions", - skip_serializing_if = "Option::is_none" - )] - pub aggregate_functions: Option<::std::collections::HashMap>, - /// A map from comparison operator names to their argument types. Operator and argument type names must be valid GraphQL names. Argument type names must be defined scalar types declared in ScalarTypesCapabilities. - #[serde( - rename = "comparison_operators", - skip_serializing_if = "Option::is_none" - )] - pub comparison_operators: Option<::std::collections::HashMap>, - #[serde(rename = "graphql_type", skip_serializing_if = "Option::is_none")] - pub graphql_type: Option, - /// A map from update column operator names to their definitions. Operator names must be valid GraphQL names. - #[serde( - rename = "update_column_operators", - skip_serializing_if = "Option::is_none" - )] - pub update_column_operators: - Option<::std::collections::HashMap>, -} - -impl ScalarTypeCapabilities { - /// Capabilities of a scalar type. comparison_operators: The comparison operators supported by the scalar type. aggregate_functions: The aggregate functions supported by the scalar type. update_column_operators: The update column operators supported by the scalar type. graphql_type: Associates the custom scalar type with one of the built-in GraphQL scalar types. If a `graphql_type` is specified then HGE will use the parser for that built-in type when parsing values of the custom type. If not given then any JSON value will be accepted. - pub fn new() -> ScalarTypeCapabilities { - ScalarTypeCapabilities { - aggregate_functions: None, - comparison_operators: None, - graphql_type: None, - update_column_operators: None, - } - } -} diff --git a/crates/dc-api-types/src/scalar_value.rs b/crates/dc-api-types/src/scalar_value.rs deleted file mode 100644 index 5211fd25..00000000 --- a/crates/dc-api-types/src/scalar_value.rs +++ /dev/null @@ -1,58 +0,0 @@ -/* - * - * - * No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) - * - * The version of the OpenAPI document: - * - * Generated by: https://openapi-generator.tech - */ - -use serde::{Deserialize, Serialize}; - -#[derive(Clone, Debug, PartialEq, Default, Serialize, Deserialize)] -pub struct ScalarValue { - #[serde(rename = "value")] - pub value: serde_json::Value, - #[serde(rename = "value_type")] - pub value_type: String, -} - -impl ScalarValue { - pub fn new(value: serde_json::Value, value_type: String) -> ScalarValue { - ScalarValue { value, value_type } - } -} - -#[cfg(test)] -mod test { - use mongodb::bson::{bson, from_bson, to_bson}; - - use super::ScalarValue; - - #[test] - fn serialize_scalar_value() -> Result<(), anyhow::Error> { - let input = ScalarValue { - value: serde_json::json!("One"), - value_type: "string".to_owned(), - }; - assert_eq!( - to_bson(&input)?, - bson!({"value": "One", "value_type": "string"}) - ); - Ok(()) - } - - #[test] - fn parses_scalar_value() -> Result<(), anyhow::Error> { - let input = bson!({"value": "One", "value_type": "string"}); - assert_eq!( - from_bson::(input)?, - ScalarValue { - value: serde_json::json!("One"), - value_type: "string".to_owned(), - } - ); - Ok(()) - } -} diff --git a/crates/dc-api-types/src/schema_response.rs b/crates/dc-api-types/src/schema_response.rs deleted file mode 100644 index a4b94cee..00000000 --- a/crates/dc-api-types/src/schema_response.rs +++ /dev/null @@ -1,30 +0,0 @@ -/* - * - * - * No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) - * - * The version of the OpenAPI document: - * - * Generated by: https://openapi-generator.tech - */ - -use serde::{Deserialize, Serialize}; - -#[derive(Clone, Debug, PartialEq, Default, Serialize, Deserialize)] -pub struct SchemaResponse { - /// Object type definitions referenced in this schema - #[serde(rename = "objectTypes", skip_serializing_if = "Vec::is_empty", default)] - pub object_types: Vec, - /// Available tables - #[serde(rename = "tables")] - pub tables: Vec, -} - -impl SchemaResponse { - pub fn new(tables: Vec) -> SchemaResponse { - SchemaResponse { - object_types: vec![], - tables, - } - } -} diff --git a/crates/dc-api-types/src/set_column_row_update.rs b/crates/dc-api-types/src/set_column_row_update.rs deleted file mode 100644 index 09b3d9e6..00000000 --- a/crates/dc-api-types/src/set_column_row_update.rs +++ /dev/null @@ -1,54 +0,0 @@ -/* - * - * - * No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) - * - * The version of the OpenAPI document: - * - * Generated by: https://openapi-generator.tech - */ - -use serde::{Deserialize, Serialize}; - -#[derive(Clone, Debug, PartialEq, Default, Serialize, Deserialize)] -pub struct SetColumnRowUpdate { - /// The name of the column in the row - #[serde(rename = "column")] - pub column: String, - #[serde(rename = "type")] - pub r#type: RHashType, - /// The value to use with the column operator - #[serde(rename = "value")] - pub value: ::std::collections::HashMap, - #[serde(rename = "value_type")] - pub value_type: String, -} - -impl SetColumnRowUpdate { - pub fn new( - column: String, - r#type: RHashType, - value: ::std::collections::HashMap, - value_type: String, - ) -> SetColumnRowUpdate { - SetColumnRowUpdate { - column, - r#type, - value, - value_type, - } - } -} - -/// -#[derive(Clone, Copy, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] -pub enum RHashType { - #[serde(rename = "set")] - Set, -} - -impl Default for RHashType { - fn default() -> RHashType { - Self::Set - } -} diff --git a/crates/dc-api-types/src/single_column_aggregate.rs b/crates/dc-api-types/src/single_column_aggregate.rs deleted file mode 100644 index e0789acb..00000000 --- a/crates/dc-api-types/src/single_column_aggregate.rs +++ /dev/null @@ -1,54 +0,0 @@ -/* - * - * - * No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) - * - * The version of the OpenAPI document: - * - * Generated by: https://openapi-generator.tech - */ - -use serde::{Deserialize, Serialize}; - -#[derive(Clone, Debug, PartialEq, Default, Serialize, Deserialize)] -pub struct SingleColumnAggregate { - /// The column to apply the aggregation function to - #[serde(rename = "column")] - pub column: String, - /// Single column aggregate function name. A valid GraphQL name - #[serde(rename = "function")] - pub function: String, - #[serde(rename = "result_type")] - pub result_type: String, - #[serde(rename = "type")] - pub r#type: RHashType, -} - -impl SingleColumnAggregate { - pub fn new( - column: String, - function: String, - result_type: String, - r#type: RHashType, - ) -> SingleColumnAggregate { - SingleColumnAggregate { - column, - function, - result_type, - r#type, - } - } -} - -/// -#[derive(Clone, Copy, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] -pub enum RHashType { - #[serde(rename = "single_column")] - SingleColumn, -} - -impl Default for RHashType { - fn default() -> RHashType { - Self::SingleColumn - } -} diff --git a/crates/dc-api-types/src/star_count_aggregate.rs b/crates/dc-api-types/src/star_count_aggregate.rs deleted file mode 100644 index 00f6d03f..00000000 --- a/crates/dc-api-types/src/star_count_aggregate.rs +++ /dev/null @@ -1,36 +0,0 @@ -/* - * - * - * No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) - * - * The version of the OpenAPI document: - * - * Generated by: https://openapi-generator.tech - */ - -use serde::{Deserialize, Serialize}; - -#[derive(Clone, Debug, PartialEq, Default, Serialize, Deserialize)] -pub struct StarCountAggregate { - #[serde(rename = "type")] - pub r#type: RHashType, -} - -impl StarCountAggregate { - pub fn new(r#type: RHashType) -> StarCountAggregate { - StarCountAggregate { r#type } - } -} - -/// -#[derive(Clone, Copy, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] -pub enum RHashType { - #[serde(rename = "star_count")] - StarCount, -} - -impl Default for RHashType { - fn default() -> RHashType { - Self::StarCount - } -} diff --git a/crates/dc-api-types/src/subquery_comparison_capabilities.rs b/crates/dc-api-types/src/subquery_comparison_capabilities.rs deleted file mode 100644 index b33d5d8a..00000000 --- a/crates/dc-api-types/src/subquery_comparison_capabilities.rs +++ /dev/null @@ -1,26 +0,0 @@ -/* - * - * - * No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) - * - * The version of the OpenAPI document: - * - * Generated by: https://openapi-generator.tech - */ - -use serde::{Deserialize, Serialize}; - -#[derive(Clone, Debug, PartialEq, Default, Serialize, Deserialize)] -pub struct SubqueryComparisonCapabilities { - /// Does the agent support comparisons that involve related tables (ie. joins)? - #[serde(rename = "supports_relations", skip_serializing_if = "Option::is_none")] - pub supports_relations: Option, -} - -impl SubqueryComparisonCapabilities { - pub fn new() -> SubqueryComparisonCapabilities { - SubqueryComparisonCapabilities { - supports_relations: None, - } - } -} diff --git a/crates/dc-api-types/src/table_info.rs b/crates/dc-api-types/src/table_info.rs deleted file mode 100644 index fb16780a..00000000 --- a/crates/dc-api-types/src/table_info.rs +++ /dev/null @@ -1,62 +0,0 @@ -/* - * - * - * No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) - * - * The version of the OpenAPI document: - * - * Generated by: https://openapi-generator.tech - */ - -use serde::{Deserialize, Serialize}; - -#[derive(Clone, Debug, PartialEq, Default, Serialize, Deserialize)] -pub struct TableInfo { - /// The columns of the table - #[serde(rename = "columns")] - pub columns: Vec, - /// Whether or not existing rows can be deleted in the table - #[serde(rename = "deletable", skip_serializing_if = "Option::is_none")] - pub deletable: Option, - /// Description of the table - #[serde( - rename = "description", - default, - with = "::serde_with::rust::double_option", - skip_serializing_if = "Option::is_none" - )] - pub description: Option>, - /// Foreign key constraints - #[serde(rename = "foreign_keys", skip_serializing_if = "Option::is_none")] - pub foreign_keys: Option<::std::collections::HashMap>, - /// Whether or not new rows can be inserted into the table - #[serde(rename = "insertable", skip_serializing_if = "Option::is_none")] - pub insertable: Option, - /// The fully qualified name of a table, where the last item in the array is the table name and any earlier items represent the namespacing of the table name - #[serde(rename = "name")] - pub name: Vec, - /// The primary key of the table - #[serde(rename = "primary_key", skip_serializing_if = "Option::is_none")] - pub primary_key: Option>, - #[serde(rename = "type", skip_serializing_if = "Option::is_none")] - pub r#type: Option, - /// Whether or not existing rows can be updated in the table - #[serde(rename = "updatable", skip_serializing_if = "Option::is_none")] - pub updatable: Option, -} - -impl TableInfo { - pub fn new(columns: Vec, name: Vec) -> TableInfo { - TableInfo { - columns, - deletable: None, - description: None, - foreign_keys: None, - insertable: None, - name, - primary_key: None, - r#type: None, - updatable: None, - } - } -} diff --git a/crates/dc-api-types/src/table_insert_schema.rs b/crates/dc-api-types/src/table_insert_schema.rs deleted file mode 100644 index a155b931..00000000 --- a/crates/dc-api-types/src/table_insert_schema.rs +++ /dev/null @@ -1,42 +0,0 @@ -/* - * - * - * No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) - * - * The version of the OpenAPI document: - * - * Generated by: https://openapi-generator.tech - */ - -use serde::{Deserialize, Serialize}; - -#[derive(Clone, Debug, PartialEq, Default, Serialize, Deserialize)] -pub struct TableInsertSchema { - /// The fields that will be found in the insert row data for the table and the schema for each field - #[serde(rename = "fields")] - pub fields: ::std::collections::HashMap, - /// The names of the columns that make up the table's primary key - #[serde( - rename = "primary_key", - default, - with = "::serde_with::rust::double_option", - skip_serializing_if = "Option::is_none" - )] - pub primary_key: Option>>, - /// The fully qualified name of a table, where the last item in the array is the table name and any earlier items represent the namespacing of the table name - #[serde(rename = "table")] - pub table: Vec, -} - -impl TableInsertSchema { - pub fn new( - fields: ::std::collections::HashMap, - table: Vec, - ) -> TableInsertSchema { - TableInsertSchema { - fields, - primary_key: None, - table, - } - } -} diff --git a/crates/dc-api-types/src/table_relationships.rs b/crates/dc-api-types/src/table_relationships.rs deleted file mode 100644 index 123b76ec..00000000 --- a/crates/dc-api-types/src/table_relationships.rs +++ /dev/null @@ -1,33 +0,0 @@ -/* - * - * - * No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) - * - * The version of the OpenAPI document: - * - * Generated by: https://openapi-generator.tech - */ - -use serde::{Deserialize, Serialize}; - -#[derive(Clone, Debug, PartialEq, Default, Serialize, Deserialize)] -pub struct TableRelationships { - /// A map of relationships from the source table to target tables. The key of the map is the relationship name - #[serde(rename = "relationships")] - pub relationships: ::std::collections::HashMap, - /// The fully qualified name of a table, where the last item in the array is the table name and any earlier items represent the namespacing of the table name - #[serde(rename = "source_table")] - pub source_table: Vec, -} - -impl TableRelationships { - pub fn new( - relationships: ::std::collections::HashMap, - source_table: Vec, - ) -> TableRelationships { - TableRelationships { - relationships, - source_table, - } - } -} diff --git a/crates/dc-api-types/src/table_type.rs b/crates/dc-api-types/src/table_type.rs deleted file mode 100644 index 9c7d635b..00000000 --- a/crates/dc-api-types/src/table_type.rs +++ /dev/null @@ -1,35 +0,0 @@ -/* - * - * - * No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) - * - * The version of the OpenAPI document: - * - * Generated by: https://openapi-generator.tech - */ - -/// -use serde::{Deserialize, Serialize}; - -#[derive(Clone, Copy, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] -pub enum TableType { - #[serde(rename = "table")] - Table, - #[serde(rename = "view")] - View, -} - -impl ToString for TableType { - fn to_string(&self) -> String { - match self { - Self::Table => String::from("table"), - Self::View => String::from("view"), - } - } -} - -impl Default for TableType { - fn default() -> TableType { - Self::Table - } -} diff --git a/crates/dc-api-types/src/target.rs b/crates/dc-api-types/src/target.rs deleted file mode 100644 index 3888ae22..00000000 --- a/crates/dc-api-types/src/target.rs +++ /dev/null @@ -1,90 +0,0 @@ -use serde::de::{self, MapAccess, Visitor}; -use serde::{Deserialize, Deserializer, Serialize}; -use std::collections::HashMap; -use std::fmt; - -#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] -#[serde(tag = "type")] -pub enum Target { - #[serde(rename = "table")] - TTable { - /// The fully qualified name of a table, where the last item in the array is the table name and any earlier items represent the namespacing of the table name - #[serde(rename = "name")] - name: Vec, - - /// This field is not part of the v2 DC Agent API - it is included to support queries - /// translated from the v3 NDC API. These arguments correspond to `arguments` fields on the - /// v3 `QueryRequest` and `Relationship` types. - #[serde(skip, default)] - arguments: HashMap, - }, // TODO: variants TInterpolated and TFunction should be immplemented if/when we add support for (interpolated) native queries and functions -} - -impl Target { - pub fn name(&self) -> &Vec { - match self { - Target::TTable { name, .. } => name, - } - } - - pub fn arguments(&self) -> &HashMap { - match self { - Target::TTable { arguments, .. } => arguments, - } - } -} - -// Allow a table name (represented as a Vec) to be deserialized into a Target::TTable. -// This provides backwards compatibility with previous version of DC API. -pub fn target_or_table_name<'de, D>(deserializer: D) -> Result -where - D: Deserializer<'de>, -{ - struct TargetOrTableName; - - impl<'de> Visitor<'de> for TargetOrTableName { - type Value = Target; - - fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - formatter.write_str("Target or TableName") - } - - fn visit_seq(self, seq: A) -> Result - where - A: de::SeqAccess<'de>, - { - let name = Deserialize::deserialize(de::value::SeqAccessDeserializer::new(seq))?; - Ok(Target::TTable { - name, - arguments: Default::default(), - }) - } - - fn visit_map(self, map: M) -> Result - where - M: MapAccess<'de>, - { - Deserialize::deserialize(de::value::MapAccessDeserializer::new(map)) - } - } - - deserializer.deserialize_any(TargetOrTableName) -} - -/// Optional arguments to the target of a query request or a relationship. This is a v3 feature -/// which corresponds to the `Argument` and `RelationshipArgument` ndc-client types. -#[derive(Clone, Debug, PartialEq, Eq)] -pub enum Argument { - /// The argument is provided by reference to a variable - Variable { - name: String, - }, - /// The argument is provided as a literal value - Literal { - value: serde_json::Value, - }, - // The argument is provided based on a column of the source collection - Column { - name: String, - }, -} diff --git a/crates/dc-api-types/src/unary_comparison_operator.rs b/crates/dc-api-types/src/unary_comparison_operator.rs deleted file mode 100644 index f727a026..00000000 --- a/crates/dc-api-types/src/unary_comparison_operator.rs +++ /dev/null @@ -1,86 +0,0 @@ -/* - * - * - * No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) - * - * The version of the OpenAPI document: - * - * Generated by: https://openapi-generator.tech - */ - -use serde::{de, Deserialize, Serialize}; - -#[derive(Clone, Debug, PartialEq, Deserialize)] -#[serde(untagged)] -pub enum UnaryComparisonOperator { - #[serde(deserialize_with = "parse_is_null")] - IsNull, - CustomUnaryComparisonOperator(String), -} - -impl Serialize for UnaryComparisonOperator { - fn serialize(&self, serializer: S) -> Result - where - S: serde::Serializer, - { - match self { - UnaryComparisonOperator::IsNull => serializer.serialize_str("is_null"), - UnaryComparisonOperator::CustomUnaryComparisonOperator(s) => { - serializer.serialize_str(s) - } - } - } -} - -fn parse_is_null<'de, D>(deserializer: D) -> Result<(), D::Error> -where - D: de::Deserializer<'de>, -{ - let s = String::deserialize(deserializer)?; - if s == "is_null" { - Ok(()) - } else { - Err(de::Error::custom("invalid value")) - } -} - -#[cfg(test)] -mod test { - use mongodb::bson::{bson, from_bson, to_bson}; - - use super::UnaryComparisonOperator; - - #[test] - fn serialize_is_null() -> Result<(), anyhow::Error> { - let input = UnaryComparisonOperator::IsNull; - assert_eq!(to_bson(&input)?, bson!("is_null")); - Ok(()) - } - - #[test] - fn serialize_custom_unary_comparison_operator() -> Result<(), anyhow::Error> { - let input = UnaryComparisonOperator::CustomUnaryComparisonOperator("square".to_owned()); - assert_eq!(to_bson(&input)?, bson!("square")); - Ok(()) - } - - #[test] - fn parses_is_null() -> Result<(), anyhow::Error> { - let input = bson!("is_null"); - assert_eq!( - from_bson::(input)?, - UnaryComparisonOperator::IsNull - ); - Ok(()) - } - - #[test] - fn parses_custom_operator() -> Result<(), anyhow::Error> { - let input = bson!("square"); - assert_eq!( - from_bson::(input)?, - UnaryComparisonOperator::CustomUnaryComparisonOperator("square".to_owned()) - ); - Ok(()) - } -} diff --git a/crates/dc-api-types/src/unique_identifier_generation_strategy.rs b/crates/dc-api-types/src/unique_identifier_generation_strategy.rs deleted file mode 100644 index 17d6176f..00000000 --- a/crates/dc-api-types/src/unique_identifier_generation_strategy.rs +++ /dev/null @@ -1,36 +0,0 @@ -/* - * - * - * No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) - * - * The version of the OpenAPI document: - * - * Generated by: https://openapi-generator.tech - */ - -use serde::{Deserialize, Serialize}; - -#[derive(Clone, Debug, PartialEq, Default, Serialize, Deserialize)] -pub struct UniqueIdentifierGenerationStrategy { - #[serde(rename = "type")] - pub r#type: RHashType, -} - -impl UniqueIdentifierGenerationStrategy { - pub fn new(r#type: RHashType) -> UniqueIdentifierGenerationStrategy { - UniqueIdentifierGenerationStrategy { r#type } - } -} - -/// -#[derive(Clone, Copy, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] -pub enum RHashType { - #[serde(rename = "unique_identifier")] - UniqueIdentifier, -} - -impl Default for RHashType { - fn default() -> RHashType { - Self::UniqueIdentifier - } -} diff --git a/crates/dc-api-types/src/unrelated_table.rs b/crates/dc-api-types/src/unrelated_table.rs deleted file mode 100644 index 8b7b871d..00000000 --- a/crates/dc-api-types/src/unrelated_table.rs +++ /dev/null @@ -1,39 +0,0 @@ -/* - * - * - * No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) - * - * The version of the OpenAPI document: - * - * Generated by: https://openapi-generator.tech - */ - -use serde::{Deserialize, Serialize}; - -#[derive(Clone, Debug, PartialEq, Default, Serialize, Deserialize)] -pub struct UnrelatedTable { - /// The fully qualified name of a table, where the last item in the array is the table name and any earlier items represent the namespacing of the table name - #[serde(rename = "table")] - pub table: Vec, - #[serde(rename = "type")] - pub r#type: RHashType, -} - -impl UnrelatedTable { - pub fn new(table: Vec, r#type: RHashType) -> UnrelatedTable { - UnrelatedTable { table, r#type } - } -} - -/// -#[derive(Clone, Copy, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] -pub enum RHashType { - #[serde(rename = "unrelated")] - Unrelated, -} - -impl Default for RHashType { - fn default() -> RHashType { - Self::Unrelated - } -} diff --git a/crates/dc-api-types/src/update_column_operator_definition.rs b/crates/dc-api-types/src/update_column_operator_definition.rs deleted file mode 100644 index 8e978543..00000000 --- a/crates/dc-api-types/src/update_column_operator_definition.rs +++ /dev/null @@ -1,23 +0,0 @@ -/* - * - * - * No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) - * - * The version of the OpenAPI document: - * - * Generated by: https://openapi-generator.tech - */ - -use serde::{Deserialize, Serialize}; - -#[derive(Clone, Debug, PartialEq, Default, Serialize, Deserialize)] -pub struct UpdateColumnOperatorDefinition { - #[serde(rename = "argument_type")] - pub argument_type: String, -} - -impl UpdateColumnOperatorDefinition { - pub fn new(argument_type: String) -> UpdateColumnOperatorDefinition { - UpdateColumnOperatorDefinition { argument_type } - } -} diff --git a/crates/dc-api-types/src/update_mutation_operation.rs b/crates/dc-api-types/src/update_mutation_operation.rs deleted file mode 100644 index 850c97a0..00000000 --- a/crates/dc-api-types/src/update_mutation_operation.rs +++ /dev/null @@ -1,65 +0,0 @@ -/* - * - * - * No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) - * - * The version of the OpenAPI document: - * - * Generated by: https://openapi-generator.tech - */ - -use serde::{Deserialize, Serialize}; - -#[derive(Clone, Debug, PartialEq, Default, Serialize, Deserialize)] -pub struct UpdateMutationOperation { - #[serde(rename = "post_update_check", skip_serializing_if = "Option::is_none")] - pub post_update_check: Option>, - /// The fields to return for the rows affected by this update operation - #[serde( - rename = "returning_fields", - default, - with = "::serde_with::rust::double_option", - skip_serializing_if = "Option::is_none" - )] - pub returning_fields: Option>>, - /// The fully qualified name of a table, where the last item in the array is the table name and any earlier items represent the namespacing of the table name - #[serde(rename = "table")] - pub table: Vec, - #[serde(rename = "type")] - pub r#type: RHashType, - /// The updates to make to the matched rows in the table - #[serde(rename = "updates")] - pub updates: Vec, - #[serde(rename = "where", skip_serializing_if = "Option::is_none")] - pub r#where: Option>, -} - -impl UpdateMutationOperation { - pub fn new( - table: Vec, - r#type: RHashType, - updates: Vec, - ) -> UpdateMutationOperation { - UpdateMutationOperation { - post_update_check: None, - returning_fields: None, - table, - r#type, - updates, - r#where: None, - } - } -} - -/// -#[derive(Clone, Copy, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] -pub enum RHashType { - #[serde(rename = "update")] - Update, -} - -impl Default for RHashType { - fn default() -> RHashType { - Self::Update - } -} diff --git a/crates/dc-api/Cargo.toml b/crates/dc-api/Cargo.toml deleted file mode 100644 index 762f9573..00000000 --- a/crates/dc-api/Cargo.toml +++ /dev/null @@ -1,20 +0,0 @@ -[package] -name = "dc-api" -version = "0.1.0" -edition = "2021" - -[dependencies] -axum = { version = "0.6.18", features = ["headers"] } -bytes = "^1" -dc-api-types = { path = "../dc-api-types" } -http = "^0.2" -jsonwebtoken = "8" -mime = "^0.3" -serde = { version = "1.0", features = ["derive"] } -serde_json = { version = "1.0", features = ["preserve_order"] } -thiserror = "1.0.40" -tracing = "0.1.37" - -[dev-dependencies] -axum-test-helper = "0.3.0" -tokio = "1" diff --git a/crates/dc-api/src/interface_types/agent_error.rs b/crates/dc-api/src/interface_types/agent_error.rs deleted file mode 100644 index fb39ab73..00000000 --- a/crates/dc-api/src/interface_types/agent_error.rs +++ /dev/null @@ -1,88 +0,0 @@ -use std::fmt; - -use axum::{ - extract::rejection::{JsonRejection, TypedHeaderRejection}, - http::StatusCode, - response::IntoResponse, - Json, -}; -use thiserror::Error; - -use dc_api_types::ErrorResponse; - -/// Type for all errors that might occur as a result of requests sent to the agent. -#[derive(Debug, Error)] -pub enum AgentError { - BadHeader(#[from] TypedHeaderRejection), - BadJWT(#[from] jsonwebtoken::errors::Error), - BadJWTNoKID, - BadJSONRequestBody(#[from] JsonRejection), - /// Default case for deserialization failures *not including* parsing request bodies. - Deserialization(#[from] serde_json::Error), - InvalidLicenseKey, - NotFound(axum::http::Uri), -} - -use AgentError::*; - -impl AgentError { - pub fn status_and_error_response(&self) -> (StatusCode, ErrorResponse) { - match self { - BadHeader(err) => (StatusCode::BAD_REQUEST, ErrorResponse::new(&err)), - BadJWT(err) => ( - StatusCode::UNAUTHORIZED, - ErrorResponse { - message: "Could not decode JWT".to_owned(), - details: Some( - [( - "error".to_owned(), - serde_json::Value::String(err.to_string()), - )] - .into(), - ), - r#type: None, - }, - ), - BadJWTNoKID => ( - StatusCode::UNAUTHORIZED, - ErrorResponse::new("License Token doesn't have a `kid` header field"), - ), - BadJSONRequestBody(err) => (StatusCode::BAD_REQUEST, ErrorResponse::new(&err)), - Deserialization(err) => (StatusCode::BAD_REQUEST, ErrorResponse::new(&err)), - InvalidLicenseKey => ( - StatusCode::UNAUTHORIZED, - ErrorResponse::new("Invalid License Key"), - ), - NotFound(uri) => ( - StatusCode::NOT_FOUND, - ErrorResponse::new(&format!("No Route {uri}")), - ), - } - } -} - -impl fmt::Display for AgentError { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - let (_, err) = self.status_and_error_response(); - write!(f, "{}", err.message) - } -} - -impl IntoResponse for AgentError { - fn into_response(self) -> axum::response::Response { - if cfg!(debug_assertions) { - // Log certain errors in development only. The `debug_assertions` feature is present in - // debug builds, which we use during development. It is not present in release builds. - match &self { - BadHeader(err) => tracing::warn!(error = %err, "error reading rquest header"), - BadJSONRequestBody(err) => { - tracing::warn!(error = %err, "error parsing request body") - } - InvalidLicenseKey => tracing::warn!("invalid license key"), - _ => (), - } - } - let (status, resp) = self.status_and_error_response(); - (status, Json(resp)).into_response() - } -} diff --git a/crates/dc-api/src/interface_types/mod.rs b/crates/dc-api/src/interface_types/mod.rs deleted file mode 100644 index e584429c..00000000 --- a/crates/dc-api/src/interface_types/mod.rs +++ /dev/null @@ -1,3 +0,0 @@ -mod agent_error; - -pub use self::agent_error::AgentError; diff --git a/crates/dc-api/src/lib.rs b/crates/dc-api/src/lib.rs deleted file mode 100644 index 6b182571..00000000 --- a/crates/dc-api/src/lib.rs +++ /dev/null @@ -1,3 +0,0 @@ -mod interface_types; - -pub use self::interface_types::AgentError; diff --git a/crates/integration-tests/src/tests/local_relationship.rs b/crates/integration-tests/src/tests/local_relationship.rs index 842d83e5..83c818a1 100644 --- a/crates/integration-tests/src/tests/local_relationship.rs +++ b/crates/integration-tests/src/tests/local_relationship.rs @@ -44,3 +44,25 @@ async fn joins_local_relationships() -> anyhow::Result<()> { Ok(()) } +// TODO: Tests an upcoming change in MBD-14 +#[ignore] +#[tokio::test] +async fn filters_by_field_of_related_collection() -> anyhow::Result<()> { + assert_yaml_snapshot!( + graphql_query( + r#" + query { + comments(limit: 10, where: {movie: {title: {_is_null: false}}}) { + movie { + title + } + } + } + "# + ) + .variables(json!({ "limit": 11, "movies_limit": 2 })) + .run() + .await? + ); + Ok(()) +} diff --git a/crates/integration-tests/src/tests/remote_relationship.rs b/crates/integration-tests/src/tests/remote_relationship.rs index 9864f860..c5558d2e 100644 --- a/crates/integration-tests/src/tests/remote_relationship.rs +++ b/crates/integration-tests/src/tests/remote_relationship.rs @@ -1,6 +1,6 @@ use crate::{graphql_query, run_connector_query}; use insta::assert_yaml_snapshot; -use ndc_test_helpers::{equal, field, query, query_request, target, variable}; +use ndc_test_helpers::{binop, field, query, query_request, target, variable}; use serde_json::json; #[tokio::test] @@ -33,10 +33,10 @@ async fn handles_request_with_single_variable_set() -> anyhow::Result<()> { run_connector_query( query_request() .collection("movies") - .variables([vec![("id", json!("573a1390f29313caabcd50e5"))]]) + .variables([[("id", json!("573a1390f29313caabcd50e5"))]]) .query( query() - .predicate(equal(target!("_id"), variable!(id))) + .predicate(binop("_eq", target!("_id"), variable!(id))) .fields([field!("title")]), ), ) diff --git a/crates/mongodb-agent-common/Cargo.toml b/crates/mongodb-agent-common/Cargo.toml index 80871a40..941bfd7e 100644 --- a/crates/mongodb-agent-common/Cargo.toml +++ b/crates/mongodb-agent-common/Cargo.toml @@ -6,9 +6,8 @@ edition = "2021" [dependencies] configuration = { path = "../configuration" } -dc-api = { path = "../dc-api" } -dc-api-types = { path = "../dc-api-types" } mongodb-support = { path = "../mongodb-support" } +ndc-query-plan = { path = "../ndc-query-plan" } anyhow = "1.0.71" async-trait = "^0.1" @@ -18,10 +17,12 @@ enum-iterator = "^2.0.0" futures = "0.3.28" futures-util = "0.3.28" http = "^0.2" -indexmap = { version = "1", features = ["serde"] } # must match the version that ndc-client uses +indexmap = { workspace = true } indent = "^0.1" itertools = { workspace = true } +lazy_static = "^1.4.0" mongodb = { workspace = true } +ndc-models = { workspace = true } once_cell = "1" regex = "1" schemars = { version = "^0.8.12", features = ["smol_str"] } @@ -33,8 +34,8 @@ time = { version = "0.3.29", features = ["formatting", "parsing", "serde"] } tracing = "0.1" [dev-dependencies] -dc-api-test-helpers = { path = "../dc-api-test-helpers" } mongodb-cli-plugin = { path = "../cli" } +ndc-test-helpers = { path = "../ndc-test-helpers" } test-helpers = { path = "../test-helpers" } mockall = "^0.12.1" diff --git a/crates/mongodb-agent-common/src/aggregation_function.rs b/crates/mongodb-agent-common/src/aggregation_function.rs index bdd3492d..c22fdc0e 100644 --- a/crates/mongodb-agent-common/src/aggregation_function.rs +++ b/crates/mongodb-agent-common/src/aggregation_function.rs @@ -10,10 +10,9 @@ pub enum AggregationFunction { Sum, } +use ndc_query_plan::QueryPlanError; use AggregationFunction as A; -use crate::interface_types::MongoAgentError; - impl AggregationFunction { pub fn graphql_name(self) -> &'static str { match self { @@ -25,9 +24,11 @@ impl AggregationFunction { } } - pub fn from_graphql_name(s: &str) -> Result { + pub fn from_graphql_name(s: &str) -> Result { all::() .find(|variant| variant.graphql_name() == s) - .ok_or(MongoAgentError::UnknownAggregationFunction(s.to_owned())) + .ok_or(QueryPlanError::UnknownAggregateFunction { + aggregate_function: s.to_owned(), + }) } } diff --git a/crates/mongodb-agent-common/src/comparison_function.rs b/crates/mongodb-agent-common/src/comparison_function.rs index 6ca57cf6..0c049b05 100644 --- a/crates/mongodb-agent-common/src/comparison_function.rs +++ b/crates/mongodb-agent-common/src/comparison_function.rs @@ -1,4 +1,3 @@ -use dc_api_types::BinaryComparisonOperator; use enum_iterator::{all, Sequence}; use mongodb::bson::{doc, Bson, Document}; @@ -22,11 +21,9 @@ pub enum ComparisonFunction { IRegex, } -use BinaryComparisonOperator as B; +use ndc_query_plan::QueryPlanError; use ComparisonFunction as C; -use crate::interface_types::MongoAgentError; - impl ComparisonFunction { pub fn graphql_name(self) -> &'static str { match self { @@ -54,10 +51,10 @@ impl ComparisonFunction { } } - pub fn from_graphql_name(s: &str) -> Result { + pub fn from_graphql_name(s: &str) -> Result { all::() .find(|variant| variant.graphql_name() == s) - .ok_or(MongoAgentError::UnknownAggregationFunction(s.to_owned())) + .ok_or(QueryPlanError::UnknownComparisonOperator(s.to_owned())) } /// Produce a MongoDB expression that applies this function to the given operands. @@ -70,18 +67,3 @@ impl ComparisonFunction { } } } - -impl TryFrom<&BinaryComparisonOperator> for ComparisonFunction { - type Error = MongoAgentError; - - fn try_from(operator: &BinaryComparisonOperator) -> Result { - match operator { - B::LessThan => Ok(C::LessThan), - B::LessThanOrEqual => Ok(C::LessThanOrEqual), - B::GreaterThan => Ok(C::GreaterThan), - B::GreaterThanOrEqual => Ok(C::GreaterThanOrEqual), - B::Equal => Ok(C::Equal), - B::CustomBinaryComparisonOperator(op) => ComparisonFunction::from_graphql_name(op), - } - } -} diff --git a/crates/mongodb-agent-common/src/explain.rs b/crates/mongodb-agent-common/src/explain.rs index cad0d898..738b3a73 100644 --- a/crates/mongodb-agent-common/src/explain.rs +++ b/crates/mongodb-agent-common/src/explain.rs @@ -1,27 +1,28 @@ -use configuration::Configuration; -use dc_api_types::{ExplainResponse, QueryRequest}; +use std::collections::BTreeMap; + use mongodb::bson::{doc, to_bson, Bson}; +use ndc_models::{ExplainResponse, QueryRequest}; +use ndc_query_plan::plan_for_query_request; use crate::{ interface_types::MongoAgentError, + mongo_query_plan::MongoConfiguration, query::{self, QueryTarget}, state::ConnectorState, }; pub async fn explain_query( - config: &Configuration, + config: &MongoConfiguration, state: &ConnectorState, query_request: QueryRequest, ) -> Result { - tracing::debug!(query_request = %serde_json::to_string(&query_request).unwrap()); - let db = state.database(); + let query_plan = plan_for_query_request(config, query_request)?; - let pipeline = query::pipeline_for_query_request(config, &query_request)?; + let pipeline = query::pipeline_for_query_request(config, &query_plan)?; let pipeline_bson = to_bson(&pipeline)?; - let aggregate_target = match QueryTarget::for_request(config, &query_request).input_collection() - { + let aggregate_target = match QueryTarget::for_request(config, &query_plan).input_collection() { Some(collection_name) => Bson::String(collection_name.to_owned()), None => Bson::Int32(1), }; @@ -41,17 +42,13 @@ pub async fn explain_query( let explain_result = db.run_command(explain_command, None).await?; - let explanation = serde_json::to_string_pretty(&explain_result) - .map_err(MongoAgentError::Serialization)? - .lines() - .map(String::from) - .collect(); + let plan = + serde_json::to_string_pretty(&explain_result).map_err(MongoAgentError::Serialization)?; let query = serde_json::to_string_pretty(&query_command).map_err(MongoAgentError::Serialization)?; Ok(ExplainResponse { - lines: explanation, - query, + details: BTreeMap::from_iter([("plan".to_owned(), plan), ("query".to_owned(), query)]), }) } diff --git a/crates/mongodb-agent-common/src/interface_types/mongo_agent_error.rs b/crates/mongodb-agent-common/src/interface_types/mongo_agent_error.rs index 376fbfac..b725e129 100644 --- a/crates/mongodb-agent-common/src/interface_types/mongo_agent_error.rs +++ b/crates/mongodb-agent-common/src/interface_types/mongo_agent_error.rs @@ -1,12 +1,11 @@ use std::fmt::{self, Display}; -use axum::{response::IntoResponse, Json}; -use dc_api_types::ErrorResponse; use http::StatusCode; use mongodb::bson; +use ndc_query_plan::QueryPlanError; use thiserror::Error; -use crate::mutation::MutationError; +use crate::{procedure::ProcedureError, query::QueryResponseError}; /// A superset of the DC-API `AgentError` type. This enum adds error cases specific to the MongoDB /// agent. @@ -21,13 +20,14 @@ pub enum MongoAgentError { MongoDBSerialization(#[from] mongodb::bson::ser::Error), MongoDBSupport(#[from] mongodb_support::error::Error), NotImplemented(&'static str), - MutationError(#[from] MutationError), + Procedure(#[from] ProcedureError), + QueryPlan(#[from] QueryPlanError), + ResponseSerialization(#[from] QueryResponseError), Serialization(serde_json::Error), UnknownAggregationFunction(String), UnspecifiedRelation(String), VariableNotDefined(String), AdHoc(#[from] anyhow::Error), - AgentError(#[from] dc_api::AgentError), } use MongoAgentError::*; @@ -76,7 +76,9 @@ impl MongoAgentError { } MongoDBSupport(err) => (StatusCode::BAD_REQUEST, ErrorResponse::new(&err)), NotImplemented(missing_feature) => (StatusCode::BAD_REQUEST, ErrorResponse::new(&format!("The MongoDB agent does not yet support {missing_feature}"))), - MutationError(err) => (StatusCode::BAD_REQUEST, ErrorResponse::new(err)), + Procedure(err) => (StatusCode::BAD_REQUEST, ErrorResponse::new(err)), + QueryPlan(err) => (StatusCode::BAD_REQUEST, ErrorResponse::new(err)), + ResponseSerialization(err) => (StatusCode::BAD_REQUEST, ErrorResponse::new(err)), Serialization(err) => (StatusCode::INTERNAL_SERVER_ERROR, ErrorResponse::new(&err)), UnknownAggregationFunction(function) => ( StatusCode::BAD_REQUEST, @@ -91,7 +93,6 @@ impl MongoAgentError { ErrorResponse::new(&format!("Query referenced a variable, \"{variable_name}\", but it is not defined by the query request")) ), AdHoc(err) => (StatusCode::INTERNAL_SERVER_ERROR, ErrorResponse::new(&err)), - AgentError(err) => err.status_and_error_response(), } } } @@ -103,20 +104,47 @@ impl Display for MongoAgentError { } } -impl IntoResponse for MongoAgentError { - fn into_response(self) -> axum::response::Response { - if cfg!(debug_assertions) { - // Log certain errors in development only. The `debug_assertions` feature is present in - // debug builds, which we use during development. It is not present in release builds. - #[allow(clippy::single_match)] - match &self { - BadCollectionSchema(collection_name, collection_validator, err) => { - tracing::warn!(collection_name, ?collection_validator, error = %err, "error parsing collection validator") - } - _ => (), +#[derive(Clone, Debug, PartialEq, Default)] +pub struct ErrorResponse { + pub details: Option<::std::collections::HashMap>, + pub message: String, + pub r#type: Option, +} + +impl ErrorResponse { + pub fn new(message: &T) -> ErrorResponse + where + T: Display + ?Sized, + { + ErrorResponse { + details: None, + message: format!("{message}"), + r#type: None, + } + } +} + +#[derive(Clone, Copy, Debug, Eq, PartialEq, Ord, PartialOrd, Hash)] +pub enum ErrorResponseType { + UncaughtError, + MutationConstraintViolation, + MutationPermissionCheckFailure, +} + +impl ToString for ErrorResponseType { + fn to_string(&self) -> String { + match self { + Self::UncaughtError => String::from("uncaught-error"), + Self::MutationConstraintViolation => String::from("mutation-constraint-violation"), + Self::MutationPermissionCheckFailure => { + String::from("mutation-permission-check-failure") } } - let (status, resp) = self.status_and_error_response(); - (status, Json(resp)).into_response() + } +} + +impl Default for ErrorResponseType { + fn default() -> ErrorResponseType { + Self::UncaughtError } } diff --git a/crates/mongodb-agent-common/src/lib.rs b/crates/mongodb-agent-common/src/lib.rs index a57214ca..4fcd6596 100644 --- a/crates/mongodb-agent-common/src/lib.rs +++ b/crates/mongodb-agent-common/src/lib.rs @@ -3,10 +3,14 @@ pub mod comparison_function; pub mod explain; pub mod health; pub mod interface_types; +pub mod mongo_query_plan; pub mod mongodb; pub mod mongodb_connection; -pub mod mutation; +pub mod procedure; pub mod query; pub mod scalar_types_capabilities; pub mod schema; pub mod state; + +#[cfg(test)] +mod test_helpers; diff --git a/crates/mongodb-agent-common/src/mongo_query_plan/mod.rs b/crates/mongodb-agent-common/src/mongo_query_plan/mod.rs new file mode 100644 index 00000000..6fdc4e8f --- /dev/null +++ b/crates/mongodb-agent-common/src/mongo_query_plan/mod.rs @@ -0,0 +1,112 @@ +use std::collections::BTreeMap; + +use configuration::{ + native_mutation::NativeMutation, native_query::NativeQuery, Configuration, MongoScalarType, +}; +use mongodb_support::EXTENDED_JSON_TYPE_NAME; +use ndc_models as ndc; +use ndc_query_plan::{ConnectorTypes, QueryContext, QueryPlanError}; + +use crate::aggregation_function::AggregationFunction; +use crate::comparison_function::ComparisonFunction; +use crate::scalar_types_capabilities::SCALAR_TYPES; + +pub use ndc_query_plan::OrderByTarget; + +#[derive(Clone, Debug)] +pub struct MongoConfiguration(pub Configuration); + +impl MongoConfiguration { + pub fn native_queries(&self) -> &BTreeMap { + &self.0.native_queries + } + + pub fn native_mutations(&self) -> &BTreeMap { + &self.0.native_mutations + } +} + +impl ConnectorTypes for MongoConfiguration { + type AggregateFunction = AggregationFunction; + type ComparisonOperator = ComparisonFunction; + type ScalarType = MongoScalarType; +} + +impl QueryContext for MongoConfiguration { + fn lookup_scalar_type(type_name: &str) -> Option { + type_name.try_into().ok() + } + + fn lookup_aggregation_function( + &self, + input_type: &Type, + function_name: &str, + ) -> Result<(Self::AggregateFunction, &ndc::AggregateFunctionDefinition), QueryPlanError> { + let function = AggregationFunction::from_graphql_name(function_name)?; + let definition = scalar_type_name(input_type) + .and_then(|name| SCALAR_TYPES.get(name)) + .and_then(|scalar_type_def| scalar_type_def.aggregate_functions.get(function_name)) + .ok_or_else(|| QueryPlanError::UnknownAggregateFunction { + aggregate_function: function_name.to_owned(), + })?; + Ok((function, definition)) + } + + fn lookup_comparison_operator( + &self, + left_operand_type: &Type, + operator_name: &str, + ) -> Result<(Self::ComparisonOperator, &ndc::ComparisonOperatorDefinition), QueryPlanError> + where + Self: Sized, + { + let operator = ComparisonFunction::from_graphql_name(operator_name)?; + let definition = scalar_type_name(left_operand_type) + .and_then(|name| SCALAR_TYPES.get(name)) + .and_then(|scalar_type_def| scalar_type_def.comparison_operators.get(operator_name)) + .ok_or_else(|| QueryPlanError::UnknownComparisonOperator(operator_name.to_owned()))?; + Ok((operator, definition)) + } + + fn collections(&self) -> &BTreeMap { + &self.0.collections + } + + fn functions(&self) -> &BTreeMap { + &self.0.functions + } + + fn object_types(&self) -> &BTreeMap { + &self.0.object_types + } + + fn procedures(&self) -> &BTreeMap { + &self.0.procedures + } +} + +fn scalar_type_name(t: &Type) -> Option<&'static str> { + match t { + Type::Scalar(MongoScalarType::Bson(s)) => Some(s.graphql_name()), + Type::Scalar(MongoScalarType::ExtendedJSON) => Some(EXTENDED_JSON_TYPE_NAME), + Type::Nullable(t) => scalar_type_name(t), + _ => None, + } +} + +pub type Aggregate = ndc_query_plan::Aggregate; +pub type ComparisonTarget = ndc_query_plan::ComparisonTarget; +pub type ComparisonValue = ndc_query_plan::ComparisonValue; +pub type ExistsInCollection = ndc_query_plan::ExistsInCollection; +pub type Expression = ndc_query_plan::Expression; +pub type Field = ndc_query_plan::Field; +pub type NestedField = ndc_query_plan::NestedField; +pub type NestedArray = ndc_query_plan::NestedArray; +pub type NestedObject = ndc_query_plan::NestedObject; +pub type ObjectType = ndc_query_plan::ObjectType; +pub type OrderBy = ndc_query_plan::OrderBy; +pub type Query = ndc_query_plan::Query; +pub type QueryPlan = ndc_query_plan::QueryPlan; +pub type Relationship = ndc_query_plan::Relationship; +pub type Relationships = ndc_query_plan::Relationships; +pub type Type = ndc_query_plan::Type; diff --git a/crates/mongodb-agent-common/src/mongodb/sanitize.rs b/crates/mongodb-agent-common/src/mongodb/sanitize.rs index 2afe2c61..0ef537a2 100644 --- a/crates/mongodb-agent-common/src/mongodb/sanitize.rs +++ b/crates/mongodb-agent-common/src/mongodb/sanitize.rs @@ -1,7 +1,6 @@ use std::borrow::Cow; use anyhow::anyhow; -use dc_api_types::comparison_column::ColumnSelector; use mongodb::bson::{doc, Document}; use once_cell::sync::Lazy; use regex::Regex; @@ -45,14 +44,3 @@ pub fn safe_name(name: &str) -> Result, MongoAgentError> { Ok(Cow::Borrowed(name)) } } - -pub fn safe_column_selector(column_selector: &ColumnSelector) -> Result, MongoAgentError> { - match column_selector { - ColumnSelector::Path(p) => p - .iter() - .map(|s| safe_name(s)) - .collect::>, MongoAgentError>>() - .map(|v| Cow::Owned(v.join("."))), - ColumnSelector::Column(c) => safe_name(c), - } -} diff --git a/crates/mongodb-agent-common/src/mongodb/selection.rs b/crates/mongodb-agent-common/src/mongodb/selection.rs index db99df03..2e031d2a 100644 --- a/crates/mongodb-agent-common/src/mongodb/selection.rs +++ b/crates/mongodb-agent-common/src/mongodb/selection.rs @@ -1,11 +1,11 @@ -use std::collections::HashMap; - -use dc_api_types::{query_request::QueryRequest, Field, TableRelationships}; -use mongodb::bson::{self, bson, doc, Bson, Document}; +use indexmap::IndexMap; +use mongodb::bson::{self, doc, Bson, Document}; use serde::{Deserialize, Serialize}; use crate::{ - interface_types::MongoAgentError, mongodb::sanitize::get_field, query::is_response_faceted, + interface_types::MongoAgentError, + mongo_query_plan::{Field, NestedArray, NestedField, NestedObject, QueryPlan}, + mongodb::sanitize::get_field, }; /// Wraps a BSON document that represents a MongoDB "expression" that constructs a document based @@ -15,8 +15,6 @@ use crate::{ /// When we compose pipelines, we can pair each Pipeline with a Selection that extracts the data we /// want, in the format we want it to provide to HGE. We can collect Selection values and merge /// them to form one stage after all of the composed pipelines. -/// -/// TODO: Do we need a deep/recursive merge for this type? #[derive(Clone, Debug, PartialEq, Deserialize, Serialize)] #[serde(transparent)] pub struct Selection(pub bson::Document); @@ -26,109 +24,91 @@ impl Selection { Selection(doc) } - pub fn from_query_request(query_request: &QueryRequest) -> Result { + pub fn from_query_request(query_request: &QueryPlan) -> Result { // let fields = (&query_request.query.fields).flatten().unwrap_or_default(); - let empty_map = HashMap::new(); + let empty_map = IndexMap::new(); let fields = if let Some(fs) = &query_request.query.fields { fs } else { &empty_map }; - let doc = from_query_request_helper(&query_request.relationships, &[], fields)?; + let doc = from_query_request_helper(&[], fields)?; Ok(Selection(doc)) } } fn from_query_request_helper( - table_relationships: &[TableRelationships], parent_columns: &[&str], - field_selection: &HashMap, + field_selection: &IndexMap, ) -> Result { field_selection .iter() - .map(|(key, value)| { - Ok(( - key.into(), - selection_for_field(table_relationships, parent_columns, key, value)?, - )) - }) + .map(|(key, value)| Ok((key.into(), selection_for_field(parent_columns, value)?))) .collect() } -/// If column_type is date we want to format it as a string. -/// TODO: do we want to format any other BSON types in any particular way, -/// e.g. formated ObjectId as string? -/// /// Wraps column reference with an `$isNull` check. That catches cases where a field is missing /// from a document, and substitutes a concrete null value. Otherwise the field would be omitted /// from query results which leads to an error in the engine. -pub fn serialized_null_checked_column_reference(col_path: String, column_type: &str) -> Bson { - let col_path = doc! { "$ifNull": [col_path, Bson::Null] }; - match column_type { - // Don't worry, $dateToString will returns `null` if `col_path` is null - "date" => bson!({"$dateToString": {"date": col_path}}), - _ => bson!(col_path), - } +fn value_or_null(col_path: String) -> Bson { + doc! { "$ifNull": [col_path, Bson::Null] }.into() } -fn selection_for_field( - table_relationships: &[TableRelationships], - parent_columns: &[&str], - field_name: &str, - field: &Field, -) -> Result { +fn selection_for_field(parent_columns: &[&str], field: &Field) -> Result { match field { Field::Column { column, - column_type, + fields: None, + .. } => { let col_path = match parent_columns { [] => format!("${column}"), _ => format!("${}.{}", parent_columns.join("."), column), }; - let bson_col_path = serialized_null_checked_column_reference(col_path, column_type); + let bson_col_path = value_or_null(col_path); Ok(bson_col_path) } - Field::NestedObject { column, query } => { + Field::Column { + column, + fields: Some(NestedField::Object(NestedObject { fields })), + .. + } => { let nested_parent_columns = append_to_path(parent_columns, column); let nested_parent_col_path = format!("${}", nested_parent_columns.join(".")); - let fields = query.fields.clone().unwrap_or_default(); - let nested_selection = - from_query_request_helper(table_relationships, &nested_parent_columns, &fields)?; + let nested_selection = from_query_request_helper(&nested_parent_columns, fields)?; Ok(doc! {"$cond": {"if": nested_parent_col_path, "then": nested_selection, "else": Bson::Null}}.into()) } - Field::NestedArray { - field, - // NOTE: We can use a $slice in our selection to do offsets and limits: - // https://www.mongodb.com/docs/manual/reference/operator/projection/slice/#mongodb-projection-proj.-slice - limit: _, - offset: _, - r#where: _, - } => selection_for_array(table_relationships, parent_columns, field_name, field, 0), - Field::Relationship { query, .. } => { - if is_response_faceted(query) { - Ok(doc! { "$first": get_field(field_name) }.into()) + Field::Column { + column, + fields: + Some(NestedField::Array(NestedArray { + fields: nested_field, + })), + .. + } => selection_for_array(&append_to_path(parent_columns, column), nested_field, 0), + Field::Relationship { + relationship, + aggregates, + .. + } => { + if aggregates.is_some() { + Ok(doc! { "$first": get_field(relationship) }.into()) } else { - Ok(doc! { "rows": get_field(field_name) }.into()) + Ok(doc! { "rows": get_field(relationship) }.into()) } } } } fn selection_for_array( - table_relationships: &[TableRelationships], parent_columns: &[&str], - field_name: &str, - field: &Field, + field: &NestedField, array_nesting_level: usize, ) -> Result { match field { - Field::NestedObject { column, query } => { - let nested_parent_columns = append_to_path(parent_columns, column); - let nested_parent_col_path = format!("${}", nested_parent_columns.join(".")); - let fields = query.fields.clone().unwrap_or_default(); - let mut nested_selection = - from_query_request_helper(table_relationships, &["$this"], &fields)?; + NestedField::Object(NestedObject { fields }) => { + let nested_parent_col_path = format!("${}", parent_columns.join(".")); + let mut nested_selection = from_query_request_helper(&["$this"], fields)?; for _ in 0..array_nesting_level { nested_selection = doc! {"$map": {"input": "$$this", "in": nested_selection}} } @@ -136,21 +116,9 @@ fn selection_for_array( doc! {"$map": {"input": &nested_parent_col_path, "in": nested_selection}}; Ok(doc! {"$cond": {"if": &nested_parent_col_path, "then": map_expression, "else": Bson::Null}}.into()) } - Field::NestedArray { - field, - // NOTE: We can use a $slice in our selection to do offsets and limits: - // https://www.mongodb.com/docs/manual/reference/operator/projection/slice/#mongodb-projection-proj.-slice - limit: _, - offset: _, - r#where: _, - } => selection_for_array( - table_relationships, - parent_columns, - field_name, - field, - array_nesting_level + 1, - ), - _ => selection_for_field(table_relationships, parent_columns, field_name, field), + NestedField::Array(NestedArray { + fields: nested_field, + }) => selection_for_array(parent_columns, nested_field, array_nesting_level + 1), } } fn append_to_path<'a, 'b, 'c>(parent_columns: &'a [&'b str], column: &'c str) -> Vec<&'c str> @@ -183,85 +151,46 @@ impl TryFrom for Selection { #[cfg(test)] mod tests { - use std::collections::HashMap; - + use configuration::Configuration; use mongodb::bson::{doc, Document}; + use ndc_query_plan::plan_for_query_request; + use ndc_test_helpers::{ + array, array_of, collection, field, named_type, nullable, object, object_type, query, + query_request, relation_field, relationship, + }; use pretty_assertions::assert_eq; - use serde_json::{from_value, json}; + + use crate::mongo_query_plan::MongoConfiguration; use super::Selection; - use dc_api_types::{Field, Query, QueryRequest, Target}; #[test] fn calculates_selection_for_query_request() -> Result<(), anyhow::Error> { - let fields: HashMap = from_value(json!({ - "foo": { "type": "column", "column": "foo", "column_type": "String" }, - "foo_again": { "type": "column", "column": "foo", "column_type": "String" }, - "bar": { - "type": "object", - "column": "bar", - "query": { - "fields": { - "baz": { "type": "column", "column": "baz", "column_type": "String" }, - "baz_again": { "type": "column", "column": "baz", "column_type": "String" }, - }, - }, - }, - "bar_again": { - "type": "object", - "column": "bar", - "query": { - "fields": { - "baz": { "type": "column", "column": "baz", "column_type": "String" }, - }, - }, - }, - "my_date": { "type": "column", "column": "my_date", "column_type": "date"}, - "array_of_scalars": {"type": "array", "field": { "type": "column", "column": "foo", "column_type": "String"}}, - "array_of_objects": { - "type": "array", - "field": { - "type": "object", - "column": "foo", - "query": { - "fields": { - "baz": {"type": "column", "column": "baz", "column_type": "String"} - } - } - } - }, - "array_of_arrays_of_objects": { - "type": "array", - "field": { - "type": "array", - "field": { - "type": "object", - "column": "foo", - "query": { - "fields": { - "baz": {"type": "column", "column": "baz", "column_type": "String"} - } - } - } - } - } - }))?; + let query_request = query_request() + .collection("test") + .query(query().fields([ + field!("foo"), + field!("foo_again" => "foo"), + field!("bar" => "bar", object!([ + field!("baz"), + field!("baz_again" => "baz"), + ])), + field!("bar_again" => "bar", object!([ + field!("baz"), + ])), + field!("array_of_scalars" => "xs"), + field!("array_of_objects" => "os", array!(object!([ + field!("cat") + ]))), + field!("array_of_arrays_of_objects" => "oss", array!(array!(object!([ + field!("cat") + ])))), + ])) + .into(); - let query_request = QueryRequest { - query: Box::new(Query { - fields: Some(fields), - ..Default::default() - }), - foreach: None, - variables: None, - target: Target::TTable { - name: vec!["test".to_owned()], - arguments: Default::default(), - }, - relationships: vec![], - }; + let query_plan = plan_for_query_request(&foo_config(), query_request)?; - let selection = Selection::from_query_request(&query_request)?; + let selection = Selection::from_query_request(&query_plan)?; assert_eq!( Into::::into(selection), doc! { @@ -286,19 +215,14 @@ mod tests { "else": null } }, - "my_date": { - "$dateToString": { - "date": { "$ifNull": ["$my_date", null] } - } - }, - "array_of_scalars": { "$ifNull": ["$foo", null] }, + "array_of_scalars": { "$ifNull": ["$xs", null] }, "array_of_objects": { "$cond": { - "if": "$foo", + "if": "$os", "then": { "$map": { - "input": "$foo", - "in": {"baz": { "$ifNull": ["$$this.baz", null] }} + "input": "$os", + "in": {"cat": { "$ifNull": ["$$this.cat", null] }} } }, "else": null @@ -306,14 +230,14 @@ mod tests { }, "array_of_arrays_of_objects": { "$cond": { - "if": "$foo", + "if": "$oss", "then": { "$map": { - "input": "$foo", + "input": "$oss", "in": { "$map": { "input": "$$this", - "in": {"baz": { "$ifNull": ["$$this.baz", null] }} + "in": {"cat": { "$ifNull": ["$$this.cat", null] }} } } } @@ -328,42 +252,25 @@ mod tests { #[test] fn produces_selection_for_relation() -> Result<(), anyhow::Error> { - let query_request: QueryRequest = from_value(json!({ - "query": { - "fields": { - "class_students": { - "type": "relationship", - "query": { - "fields": { - "name": { "type": "column", "column": "name", "column_type": "string" }, - }, - }, - "relationship": "class_students", - }, - "students": { - "type": "relationship", - "query": { - "fields": { - "student_name": { "type": "column", "column": "name", "column_type": "string" }, - }, - }, - "relationship": "class_students", - }, - }, - }, - "target": {"name": ["classes"], "type": "table"}, - "relationships": [{ - "source_table": ["classes"], - "relationships": { - "class_students": { - "column_mapping": { "_id": "classId" }, - "relationship_type": "array", - "target": {"name": ["students"], "type": "table"}, - }, - }, - }], - }))?; - let selection = Selection::from_query_request(&query_request)?; + let query_request = query_request() + .collection("classes") + .query(query().fields([ + relation_field!("class_students" => "class_students", query().fields([ + field!("name") + ])), + relation_field!("students" => "class_students", query().fields([ + field!("student_name" => "name") + ])), + ])) + .relationships([( + "class_students", + relationship("students", [("_id", "classId")]), + )]) + .into(); + + let query_plan = plan_for_query_request(&students_config(), query_request)?; + + let selection = Selection::from_query_request(&query_plan)?; assert_eq!( Into::::into(selection), doc! { @@ -374,7 +281,7 @@ mod tests { }, "students": { "rows": { - "$getField": { "$literal": "students" } + "$getField": { "$literal": "class_students" } }, }, } @@ -382,60 +289,78 @@ mod tests { Ok(()) } - // Same test as above, but using the old query format to test for backwards compatibility - #[test] - fn produces_selection_for_relation_compat() -> Result<(), anyhow::Error> { - let query_request: QueryRequest = from_value(json!({ - "query": { - "fields": { - "class_students": { - "type": "relationship", - "query": { - "fields": { - "name": { "type": "column", "column": "name", "column_type": "string" }, - }, - }, - "relationship": "class_students", - }, - "students": { - "type": "relationship", - "query": { - "fields": { - "student_name": { "type": "column", "column": "name", "column_type": "string" }, - }, - }, - "relationship": "class_students", - }, - }, - }, - "table": ["classes"], - "table_relationships": [{ - "source_table": ["classes"], - "relationships": { - "class_students": { - "column_mapping": { "_id": "classId" }, - "relationship_type": "array", - "target_table": ["students"], - }, - }, - }], - }))?; - let selection = Selection::from_query_request(&query_request)?; - assert_eq!( - Into::::into(selection), - doc! { - "class_students": { - "rows": { - "$getField": { "$literal": "class_students" } - }, - }, - "students": { - "rows": { - "$getField": { "$literal": "students" } - }, - }, - } - ); - Ok(()) + fn students_config() -> MongoConfiguration { + MongoConfiguration(Configuration { + collections: [collection("classes"), collection("students")].into(), + object_types: [ + ( + "assignments".into(), + object_type([ + ("_id", named_type("ObjectId")), + ("student_id", named_type("ObjectId")), + ("title", named_type("String")), + ]), + ), + ( + "classes".into(), + object_type([ + ("_id", named_type("ObjectId")), + ("title", named_type("String")), + ("year", named_type("Int")), + ]), + ), + ( + "students".into(), + object_type([ + ("_id", named_type("ObjectId")), + ("classId", named_type("ObjectId")), + ("gpa", named_type("Double")), + ("name", named_type("String")), + ("year", named_type("Int")), + ]), + ), + ] + .into(), + functions: Default::default(), + procedures: Default::default(), + native_mutations: Default::default(), + native_queries: Default::default(), + options: Default::default(), + }) + } + + fn foo_config() -> MongoConfiguration { + MongoConfiguration(Configuration { + collections: [collection("test")].into(), + object_types: [ + ( + "test".into(), + object_type([ + ("foo", nullable(named_type("String"))), + ("bar", nullable(named_type("bar"))), + ("xs", nullable(array_of(nullable(named_type("Int"))))), + ("os", nullable(array_of(nullable(named_type("os"))))), + ( + "oss", + nullable(array_of(nullable(array_of(nullable(named_type("os")))))), + ), + ]), + ), + ( + "bar".into(), + object_type([("baz", nullable(named_type("String")))]), + ), + ( + "os".into(), + object_type([("cat", nullable(named_type("String")))]), + ), + ] + .into(), + functions: Default::default(), + procedures: Default::default(), + native_mutations: Default::default(), + native_queries: Default::default(), + options: Default::default(), + }) } } diff --git a/crates/mongodb-agent-common/src/mongodb/stage.rs b/crates/mongodb-agent-common/src/mongodb/stage.rs index 4be51550..addb6fe3 100644 --- a/crates/mongodb-agent-common/src/mongodb/stage.rs +++ b/crates/mongodb-agent-common/src/mongodb/stage.rs @@ -37,7 +37,7 @@ pub enum Stage { /// /// See https://www.mongodb.com/docs/manual/reference/operator/aggregation/limit/#mongodb-pipeline-pipe.-limit #[serde(rename = "$limit")] - Limit(i64), + Limit(u32), /// Performs a left outer join to another collection in the same database to filter in /// documents from the "joined" collection for processing. @@ -95,7 +95,7 @@ pub enum Stage { /// /// See https://www.mongodb.com/docs/manual/reference/operator/aggregation/skip/#mongodb-pipeline-pipe.-skip #[serde(rename = "$skip")] - Skip(u64), + Skip(u32), /// Groups input documents by a specified identifier expression and applies the accumulator /// expression(s), if specified, to each group. Consumes all input documents and outputs one diff --git a/crates/mongodb-agent-common/src/mutation/error.rs b/crates/mongodb-agent-common/src/procedure/error.rs similarity index 96% rename from crates/mongodb-agent-common/src/mutation/error.rs rename to crates/mongodb-agent-common/src/procedure/error.rs index e2e363bf..45a5ba56 100644 --- a/crates/mongodb-agent-common/src/mutation/error.rs +++ b/crates/mongodb-agent-common/src/procedure/error.rs @@ -4,7 +4,7 @@ use thiserror::Error; use crate::query::arguments::ArgumentError; #[derive(Debug, Error)] -pub enum MutationError { +pub enum ProcedureError { #[error("error executing mongodb command: {0}")] ExecutionError(#[from] mongodb::error::Error), diff --git a/crates/mongodb-agent-common/src/mutation/interpolated_command.rs b/crates/mongodb-agent-common/src/procedure/interpolated_command.rs similarity index 72% rename from crates/mongodb-agent-common/src/mutation/interpolated_command.rs rename to crates/mongodb-agent-common/src/procedure/interpolated_command.rs index e90c9c89..59d8b488 100644 --- a/crates/mongodb-agent-common/src/mutation/interpolated_command.rs +++ b/crates/mongodb-agent-common/src/procedure/interpolated_command.rs @@ -3,9 +3,9 @@ use std::collections::BTreeMap; use itertools::Itertools as _; use mongodb::bson::{self, Bson}; -use super::MutationError; +use super::ProcedureError; -type Result = std::result::Result; +type Result = std::result::Result; /// Parse native mutation commands, and interpolate arguments. pub fn interpolated_command( @@ -48,7 +48,7 @@ fn interpolate_document( let interpolated_key = interpolate_string(&key, arguments)?; match interpolated_key { Bson::String(string_key) => Ok((string_key, interpolated_value)), - _ => Err(MutationError::NonStringKey(interpolated_key)), + _ => Err(ProcedureError::NonStringKey(interpolated_key)), } }) .try_collect() @@ -85,7 +85,7 @@ fn interpolate_string(string: &str, arguments: &BTreeMap) -> Resul let argument_value = resolve_argument(¶m, arguments)?; match argument_value { Bson::String(string) => Ok(string), - _ => Err(MutationError::NonStringInStringContext(param)), + _ => Err(ProcedureError::NonStringInStringContext(param)), } } }) @@ -97,7 +97,7 @@ fn interpolate_string(string: &str, arguments: &BTreeMap) -> Resul fn resolve_argument(argument_name: &str, arguments: &BTreeMap) -> Result { let argument = arguments .get(argument_name) - .ok_or_else(|| MutationError::MissingArgument(argument_name.to_owned()))?; + .ok_or_else(|| ProcedureError::MissingArgument(argument_name.to_owned()))?; Ok(argument.clone()) } @@ -110,7 +110,7 @@ enum NativeMutationPart { Parameter(String), } -/// Parse a string or key in a native mutation into parts where variables have the syntax +/// Parse a string or key in a native procedure into parts where variables have the syntax /// `{{}}`. fn parse_native_mutation(string: &str) -> Vec { let vec: Vec> = string @@ -135,40 +135,31 @@ fn parse_native_mutation(string: &str) -> Vec { #[cfg(test)] mod tests { - use configuration::{ - native_mutation::NativeMutation, - schema::{ObjectField, ObjectType, Type}, - }; + use configuration::{native_mutation::NativeMutation, MongoScalarType}; use mongodb::bson::doc; use mongodb_support::BsonScalarType as S; use pretty_assertions::assert_eq; use serde_json::json; - use crate::query::arguments::resolve_arguments; + use crate::{ + mongo_query_plan::{ObjectType, Type}, + query::arguments::resolve_arguments, + }; use super::*; - // TODO: key - // TODO: key with multiple placeholders - #[test] fn interpolates_non_string_type() -> anyhow::Result<()> { let native_mutation = NativeMutation { - result_type: Type::Object("InsertArtist".to_owned()), + result_type: Type::Object(ObjectType { + name: Some("InsertArtist".into()), + fields: [("ok".into(), Type::Scalar(MongoScalarType::Bson(S::Bool)))].into(), + }), arguments: [ - ( - "id".to_owned(), - ObjectField { - r#type: Type::Scalar(S::Int), - description: Default::default(), - }, - ), + ("id".to_owned(), Type::Scalar(MongoScalarType::Bson(S::Int))), ( "name".to_owned(), - ObjectField { - r#type: Type::Scalar(S::String), - description: Default::default(), - }, + Type::Scalar(MongoScalarType::Bson(S::String)), ), ] .into(), @@ -190,11 +181,7 @@ mod tests { .into_iter() .collect(); - let arguments = resolve_arguments( - &Default::default(), - &native_mutation.arguments, - input_arguments, - )?; + let arguments = resolve_arguments(&native_mutation.arguments, input_arguments)?; let command = interpolated_command(&native_mutation.command, &arguments)?; assert_eq!( @@ -213,13 +200,26 @@ mod tests { #[test] fn interpolates_array_argument() -> anyhow::Result<()> { let native_mutation = NativeMutation { - result_type: Type::Object("InsertArtist".to_owned()), + result_type: Type::Object(ObjectType { + name: Some("InsertArtist".into()), + fields: [("ok".into(), Type::Scalar(MongoScalarType::Bson(S::Bool)))].into(), + }), arguments: [( "documents".to_owned(), - ObjectField { - r#type: Type::ArrayOf(Box::new(Type::Object("ArtistInput".to_owned()))), - description: Default::default(), - }, + Type::ArrayOf(Box::new(Type::Object(ObjectType { + name: Some("ArtistInput".into()), + fields: [ + ( + "ArtistId".into(), + Type::Scalar(MongoScalarType::Bson(S::Int)), + ), + ( + "Name".into(), + Type::Scalar(MongoScalarType::Bson(S::String)), + ), + ] + .into(), + }))), )] .into(), command: doc! { @@ -230,31 +230,6 @@ mod tests { description: Default::default(), }; - let object_types = [( - "ArtistInput".to_owned(), - ObjectType { - fields: [ - ( - "ArtistId".to_owned(), - ObjectField { - r#type: Type::Scalar(S::Int), - description: Default::default(), - }, - ), - ( - "Name".to_owned(), - ObjectField { - r#type: Type::Scalar(S::String), - description: Default::default(), - }, - ), - ] - .into(), - description: Default::default(), - }, - )] - .into(); - let input_arguments = [( "documents".to_owned(), json!([ @@ -265,8 +240,7 @@ mod tests { .into_iter() .collect(); - let arguments = - resolve_arguments(&object_types, &native_mutation.arguments, input_arguments)?; + let arguments = resolve_arguments(&native_mutation.arguments, input_arguments)?; let command = interpolated_command(&native_mutation.command, &arguments)?; assert_eq!( @@ -291,21 +265,18 @@ mod tests { #[test] fn interpolates_arguments_within_string() -> anyhow::Result<()> { let native_mutation = NativeMutation { - result_type: Type::Object("Insert".to_owned()), + result_type: Type::Object(ObjectType { + name: Some("Insert".into()), + fields: [("ok".into(), Type::Scalar(MongoScalarType::Bson(S::Bool)))].into(), + }), arguments: [ ( "prefix".to_owned(), - ObjectField { - r#type: Type::Scalar(S::String), - description: Default::default(), - }, + Type::Scalar(MongoScalarType::Bson(S::String)), ), ( "basename".to_owned(), - ObjectField { - r#type: Type::Scalar(S::String), - description: Default::default(), - }, + Type::Scalar(MongoScalarType::Bson(S::String)), ), ] .into(), @@ -324,11 +295,7 @@ mod tests { .into_iter() .collect(); - let arguments = resolve_arguments( - &Default::default(), - &native_mutation.arguments, - input_arguments, - )?; + let arguments = resolve_arguments(&native_mutation.arguments, input_arguments)?; let command = interpolated_command(&native_mutation.command, &arguments)?; assert_eq!( diff --git a/crates/mongodb-agent-common/src/mutation/mod.rs b/crates/mongodb-agent-common/src/procedure/mod.rs similarity index 56% rename from crates/mongodb-agent-common/src/mutation/mod.rs rename to crates/mongodb-agent-common/src/procedure/mod.rs index 512e716e..841f670a 100644 --- a/crates/mongodb-agent-common/src/mutation/mod.rs +++ b/crates/mongodb-agent-common/src/procedure/mod.rs @@ -5,74 +5,62 @@ use std::borrow::Cow; use std::collections::BTreeMap; use configuration::native_mutation::NativeMutation; -use configuration::schema::{ObjectField, ObjectType, Type}; use mongodb::options::SelectionCriteria; use mongodb::{bson, Database}; +use crate::mongo_query_plan::Type; use crate::query::arguments::resolve_arguments; -pub use self::error::MutationError; +pub use self::error::ProcedureError; pub use self::interpolated_command::interpolated_command; /// Encapsulates running arbitrary mongodb commands with interpolated arguments #[derive(Clone, Debug)] -pub struct Mutation<'a> { +pub struct Procedure<'a> { arguments: BTreeMap, command: Cow<'a, bson::Document>, - parameters: Cow<'a, BTreeMap>, + parameters: Cow<'a, BTreeMap>, result_type: Type, selection_criteria: Option>, } -impl<'a> Mutation<'a> { +impl<'a> Procedure<'a> { pub fn from_native_mutation( native_mutation: &'a NativeMutation, arguments: BTreeMap, ) -> Self { - Mutation { + Procedure { arguments, command: Cow::Borrowed(&native_mutation.command), parameters: Cow::Borrowed(&native_mutation.arguments), result_type: native_mutation.result_type.clone(), - selection_criteria: native_mutation.selection_criteria.as_ref().map(Cow::Borrowed), + selection_criteria: native_mutation + .selection_criteria + .as_ref() + .map(Cow::Borrowed), } } pub async fn execute( self, - object_types: &BTreeMap, database: Database, - ) -> Result<(bson::Document, Type), MutationError> { + ) -> Result<(bson::Document, Type), ProcedureError> { let selection_criteria = self.selection_criteria.map(Cow::into_owned); - let command = interpolate( - object_types, - &self.parameters, - self.arguments, - &self.command, - )?; + let command = interpolate(&self.parameters, self.arguments, &self.command)?; let result = database.run_command(command, selection_criteria).await?; Ok((result, self.result_type)) } - pub fn interpolated_command( - self, - object_types: &BTreeMap, - ) -> Result { - interpolate( - object_types, - &self.parameters, - self.arguments, - &self.command, - ) + pub fn interpolated_command(self) -> Result { + interpolate(&self.parameters, self.arguments, &self.command) } } fn interpolate( - object_types: &BTreeMap, - parameters: &BTreeMap, + parameters: &BTreeMap, arguments: BTreeMap, command: &bson::Document, -) -> Result { - let bson_arguments = resolve_arguments(object_types, parameters, arguments)?; +) -> Result { + let bson_arguments = resolve_arguments(parameters, arguments)?; interpolated_command(command, &bson_arguments) } diff --git a/crates/mongodb-agent-common/src/query/arguments.rs b/crates/mongodb-agent-common/src/query/arguments.rs index 5e5078c0..be1d8066 100644 --- a/crates/mongodb-agent-common/src/query/arguments.rs +++ b/crates/mongodb-agent-common/src/query/arguments.rs @@ -1,12 +1,13 @@ use std::collections::BTreeMap; -use configuration::schema::{ObjectField, ObjectType, Type}; use indent::indent_all_by; use itertools::Itertools as _; use mongodb::bson::Bson; use serde_json::Value; use thiserror::Error; +use crate::mongo_query_plan::Type; + use super::serialization::{json_to_bson, JsonToBsonError}; #[derive(Debug, Error)] @@ -24,19 +25,18 @@ pub enum ArgumentError { /// Translate arguments to queries or native queries to BSON according to declared parameter types. /// /// Checks that all arguments have been provided, and that no arguments have been given that do not -/// map to declared paremeters (no excess arguments). +/// map to declared parameters (no excess arguments). pub fn resolve_arguments( - object_types: &BTreeMap, - parameters: &BTreeMap, + parameters: &BTreeMap, mut arguments: BTreeMap, ) -> Result, ArgumentError> { validate_no_excess_arguments(parameters, &arguments)?; let (arguments, missing): (Vec<(String, Value, &Type)>, Vec) = parameters .iter() - .map(|(name, parameter)| { + .map(|(name, parameter_type)| { if let Some((name, argument)) = arguments.remove_entry(name) { - Ok((name, argument, ¶meter.r#type)) + Ok((name, argument, parameter_type)) } else { Err(name.clone()) } @@ -48,12 +48,12 @@ pub fn resolve_arguments( let (resolved, errors): (BTreeMap, BTreeMap) = arguments .into_iter() - .map(|(name, argument, parameter_type)| { - match json_to_bson(parameter_type, object_types, argument) { + .map( + |(name, argument, parameter_type)| match json_to_bson(parameter_type, argument) { Ok(bson) => Ok((name, bson)), Err(err) => Err((name, err)), - } - }) + }, + ) .partition_result(); if !errors.is_empty() { return Err(ArgumentError::Invalid(errors)); @@ -63,7 +63,7 @@ pub fn resolve_arguments( } pub fn validate_no_excess_arguments( - parameters: &BTreeMap, + parameters: &BTreeMap, arguments: &BTreeMap, ) -> Result<(), ArgumentError> { let excess: Vec = arguments diff --git a/crates/mongodb-agent-common/src/query/column_ref.rs b/crates/mongodb-agent-common/src/query/column_ref.rs index 85255bcd..be68f59b 100644 --- a/crates/mongodb-agent-common/src/query/column_ref.rs +++ b/crates/mongodb-agent-common/src/query/column_ref.rs @@ -1,32 +1,52 @@ -use dc_api_types::ComparisonColumn; +use std::borrow::Cow; +use std::iter::once; + +use itertools::Either; use crate::{ - interface_types::MongoAgentError, - mongodb::sanitize::{safe_column_selector, safe_name}, + interface_types::MongoAgentError, mongo_query_plan::ComparisonTarget, + mongodb::sanitize::safe_name, }; -/// Given a column, and an optional relationship name returns a MongoDB expression that -/// resolves to the value of the corresponding field, either in the target collection of a query -/// request, or in the related collection. -/// -/// evaluating them as expressions. -pub fn column_ref( - column: &ComparisonColumn, - collection_name: Option<&str>, -) -> Result { - if column.path.as_ref().map(|path| !path.is_empty()).unwrap_or(false) { - return Err(MongoAgentError::NotImplemented("comparisons against root query table columns")) - } +/// Given a column target returns a MongoDB expression that resolves to the value of the +/// corresponding field, either in the target collection of a query request, or in the related +/// collection. +pub fn column_ref(column: &ComparisonTarget) -> Result, MongoAgentError> { + let path = match column { + ComparisonTarget::Column { + name, + field_path, + path, + .. + } => Either::Left( + path.iter() + .chain(once(name)) + .chain(field_path.iter().flatten()) + .map(AsRef::as_ref), + ), + ComparisonTarget::RootCollectionColumn { + name, field_path, .. + } => Either::Right( + once("$$ROOT") + .chain(once(name.as_ref())) + .chain(field_path.iter().flatten().map(AsRef::as_ref)), + ), + }; + safe_selector(path) +} - let reference = if let Some(collection) = collection_name { - // This assumes that a related collection has been brought into scope by a $lookup stage. - format!( - "{}.{}", - safe_name(collection)?, - safe_column_selector(&column.name)? - ) +/// Given an iterable of fields to access, ensures that each field name does not include characters +/// that could be interpereted as a MongoDB expression. +fn safe_selector<'a>( + path: impl IntoIterator, +) -> Result, MongoAgentError> { + let mut safe_elements = path + .into_iter() + .map(safe_name) + .collect::>, MongoAgentError>>()?; + if safe_elements.len() == 1 { + Ok(safe_elements.pop().unwrap()) } else { - format!("{}", safe_column_selector(&column.name)?) - }; - Ok(reference) + Ok(Cow::Owned(safe_elements.join("."))) + } } diff --git a/crates/mongodb-agent-common/src/query/execute_query_request.rs b/crates/mongodb-agent-common/src/query/execute_query_request.rs index 43eaff9a..7bbed719 100644 --- a/crates/mongodb-agent-common/src/query/execute_query_request.rs +++ b/crates/mongodb-agent-common/src/query/execute_query_request.rs @@ -1,72 +1,104 @@ -use configuration::Configuration; -use dc_api_types::QueryRequest; use futures::Stream; use futures_util::TryStreamExt as _; use mongodb::bson; -use tracing::Instrument; +use ndc_models::{QueryRequest, QueryResponse}; +use ndc_query_plan::plan_for_query_request; +use tracing::{instrument, Instrument}; -use super::pipeline::pipeline_for_query_request; +use super::{pipeline::pipeline_for_query_request, response::serialize_query_response}; use crate::{ interface_types::MongoAgentError, - mongodb::{CollectionTrait as _, DatabaseTrait}, + mongo_query_plan::{MongoConfiguration, QueryPlan}, + mongodb::{CollectionTrait as _, DatabaseTrait, Pipeline}, query::QueryTarget, }; +type Result = std::result::Result; + /// Execute a query request against the given collection. /// /// The use of `DatabaseTrait` lets us inject a mock implementation of the MongoDB driver for /// testing. pub async fn execute_query_request( database: impl DatabaseTrait, - config: &Configuration, + config: &MongoConfiguration, query_request: QueryRequest, -) -> Result, MongoAgentError> { - let target = QueryTarget::for_request(config, &query_request); - let pipeline = tracing::info_span!("Build Query Pipeline").in_scope(|| { - pipeline_for_query_request(config, &query_request) - })?; +) -> Result { + let query_plan = preprocess_query_request(config, query_request)?; + let pipeline = pipeline_for_query_request(config, &query_plan)?; + let documents = execute_query_pipeline(database, config, &query_plan, pipeline).await?; + let response = serialize_query_response(&query_plan, documents)?; + Ok(response) +} + +#[instrument(name = "Pre-process Query Request", skip_all, fields(internal.visibility = "user"))] +fn preprocess_query_request( + config: &MongoConfiguration, + query_request: QueryRequest, +) -> Result { + let query_plan = plan_for_query_request(config, query_request)?; + Ok(query_plan) +} + +#[instrument(name = "Execute Query Pipeline", skip_all, fields(internal.visibility = "user"))] +async fn execute_query_pipeline( + database: impl DatabaseTrait, + config: &MongoConfiguration, + query_plan: &QueryPlan, + pipeline: Pipeline, +) -> Result> { + let target = QueryTarget::for_request(config, query_plan); tracing::debug!( - ?query_request, ?target, pipeline = %serde_json::to_string(&pipeline).unwrap(), "executing query" ); + // The target of a query request might be a collection, or it might be a native query. In the // latter case there is no collection to perform the aggregation against. So instead of sending // the MongoDB API call `db..aggregate` we instead call `db.aggregate`. - let documents = async move { - match target.input_collection() { - Some(collection_name) => { - let collection = database.collection(collection_name); - collect_from_cursor( - collection.aggregate(pipeline, None) - .instrument(tracing::info_span!("Process Pipeline", internal.visibility = "user")) - .await? - ) - .await - } - None => collect_from_cursor( - database.aggregate(pipeline, None) - .instrument(tracing::info_span!("Process Pipeline", internal.visibility = "user")) - .await? - ) - .await, + let documents = match target.input_collection() { + Some(collection_name) => { + let collection = database.collection(collection_name); + collect_response_documents( + collection + .aggregate(pipeline, None) + .instrument(tracing::info_span!( + "MongoDB Aggregate Command", + internal.visibility = "user" + )) + .await?, + ) + .await } - } - .instrument(tracing::info_span!("Execute Query Pipeline", internal.visibility = "user")) - .await?; + None => { + collect_response_documents( + database + .aggregate(pipeline, None) + .instrument(tracing::info_span!( + "MongoDB Aggregate Command", + internal.visibility = "user" + )) + .await?, + ) + .await + } + }?; tracing::debug!(response_documents = %serde_json::to_string(&documents).unwrap(), "response from MongoDB"); - Ok(documents) } -async fn collect_from_cursor( - document_cursor: impl Stream>, -) -> Result, MongoAgentError> { +#[instrument(name = "Collect Response Documents", skip_all, fields(internal.visibility = "user"))] +async fn collect_response_documents( + document_cursor: impl Stream>, +) -> Result> { document_cursor .into_stream() .map_err(MongoAgentError::MongoDB) .try_collect::>() - .instrument(tracing::info_span!("Collect Pipeline", internal.visibility = "user")) + .instrument(tracing::info_span!( + "Collect Pipeline", + internal.visibility = "user" + )) .await } diff --git a/crates/mongodb-agent-common/src/query/foreach.rs b/crates/mongodb-agent-common/src/query/foreach.rs index 3541f4f3..26eb9794 100644 --- a/crates/mongodb-agent-common/src/query/foreach.rs +++ b/crates/mongodb-agent-common/src/query/foreach.rs @@ -1,14 +1,8 @@ -use std::collections::HashMap; - -use configuration::Configuration; -use dc_api_types::comparison_column::ColumnSelector; -use dc_api_types::{ - BinaryComparisonOperator, ComparisonColumn, ComparisonValue, Expression, QueryRequest, - ScalarValue, VariableSet, -}; use mongodb::bson::{doc, Bson}; +use ndc_query_plan::VariableSet; use super::pipeline::pipeline_for_non_foreach; +use crate::mongo_query_plan::{MongoConfiguration, QueryPlan}; use crate::mongodb::Selection; use crate::{ interface_types::MongoAgentError, @@ -17,66 +11,21 @@ use crate::{ const FACET_FIELD: &str = "__FACET__"; -/// If running a native v2 query we will get `Expression` values. If the query is translated from -/// v3 we will get variable sets instead. -#[derive(Clone, Debug)] -pub enum ForeachVariant { - Predicate(Expression), - VariableSet(VariableSet), -} - -/// If the query request represents a "foreach" query then we will need to run multiple variations -/// of the query represented by added predicates and variable sets. This function returns a vec in -/// that case. If the returned map is `None` then the request is not a "foreach" query. -pub fn foreach_variants(query_request: &QueryRequest) -> Option> { - if let Some(Some(foreach)) = &query_request.foreach { - let expressions = foreach - .iter() - .map(make_expression) - .map(ForeachVariant::Predicate) - .collect(); - Some(expressions) - } else if let Some(variables) = &query_request.variables { - let variable_sets = variables - .iter() - .cloned() - .map(ForeachVariant::VariableSet) - .collect(); - Some(variable_sets) - } else { - None - } -} - /// Produces a complete MongoDB pipeline for a foreach query. /// /// For symmetry with [`super::execute_query_request::pipeline_for_query`] and /// [`pipeline_for_non_foreach`] this function returns a pipeline paired with a value that /// indicates whether the response requires post-processing in the agent. pub fn pipeline_for_foreach( - foreach: Vec, - config: &Configuration, - query_request: &QueryRequest, + variable_sets: &[VariableSet], + config: &MongoConfiguration, + query_request: &QueryPlan, ) -> Result { - let pipelines: Vec<(String, Pipeline)> = foreach - .into_iter() + let pipelines: Vec<(String, Pipeline)> = variable_sets + .iter() .enumerate() - .map(|(index, foreach_variant)| { - let (predicate, variables) = match foreach_variant { - ForeachVariant::Predicate(expression) => (Some(expression), None), - ForeachVariant::VariableSet(variables) => (None, Some(variables)), - }; - let mut q = query_request.clone(); - - if let Some(predicate) = predicate { - q.query.r#where = match q.query.r#where { - Some(e_old) => e_old.and(predicate), - None => predicate, - } - .into(); - } - - let pipeline = pipeline_for_non_foreach(config, variables.as_ref(), &q)?; + .map(|(index, variables)| { + let pipeline = pipeline_for_non_foreach(config, Some(variables), query_request)?; Ok((facet_name(index), pipeline)) }) .collect::>()?; @@ -94,85 +43,51 @@ pub fn pipeline_for_foreach( }) } -/// Fold a 'foreach' HashMap into an Expression. -fn make_expression(column_values: &HashMap) -> Expression { - let sub_exps: Vec = column_values - .clone() - .into_iter() - .map( - |(column_name, scalar_value)| Expression::ApplyBinaryComparison { - column: ComparisonColumn { - column_type: scalar_value.value_type.clone(), - name: ColumnSelector::new(column_name), - path: None, - }, - operator: BinaryComparisonOperator::Equal, - value: ComparisonValue::ScalarValueComparison { - value: scalar_value.value, - value_type: scalar_value.value_type, - }, - }, - ) - .collect(); - - Expression::And { - expressions: sub_exps, - } -} - fn facet_name(index: usize) -> String { format!("{FACET_FIELD}_{index}") } #[cfg(test)] mod tests { - use dc_api_types::{BinaryComparisonOperator, ComparisonColumn, Field, Query, QueryRequest}; - use mongodb::bson::{bson, doc, Bson}; + use configuration::Configuration; + use mongodb::bson::{bson, Bson}; + use ndc_test_helpers::{ + binop, collection, field, named_type, object_type, query, query_request, query_response, + row_set, star_count_aggregate, target, variable, + }; use pretty_assertions::assert_eq; - use serde_json::{from_value, json}; + use serde_json::json; use crate::{ + mongo_query_plan::MongoConfiguration, mongodb::test_helpers::mock_collection_aggregate_response_for_pipeline, query::execute_query_request::execute_query_request, }; #[tokio::test] - async fn executes_foreach_with_fields() -> Result<(), anyhow::Error> { - let query_request: QueryRequest = from_value(json!({ - "query": { - "fields": { - "albumId": { - "type": "column", - "column": "albumId", - "column_type": "number" - }, - "title": { - "type": "column", - "column": "title", - "column_type": "string" - } - } - }, - "target": {"name": ["tracks"], "type": "table"}, - "relationships": [], - "foreach": [ - { "artistId": {"value": 1, "value_type": "int"} }, - { "artistId": {"value": 2, "value_type": "int"} } - ] - }))?; + async fn executes_query_with_variables_and_fields() -> Result<(), anyhow::Error> { + let query_request = query_request() + .collection("tracks") + .query( + query() + .fields([field!("albumId"), field!("title")]) + .predicate(binop("_eq", target!("artistId"), variable!(artistId))), + ) + .variables([[("artistId", json!(1))], [("artistId", json!(2))]]) + .into(); let expected_pipeline = bson!([ { "$facet": { "__FACET___0": [ - { "$match": { "$and": [{ "artistId": {"$eq":1 }}]}}, + { "$match": { "artistId": { "$eq": 1 } } }, { "$replaceWith": { "albumId": { "$ifNull": ["$albumId", null] }, "title": { "$ifNull": ["$title", null] } } }, ], "__FACET___1": [ - { "$match": { "$and": [{ "artistId": {"$eq":2}}]}}, + { "$match": { "artistId": { "$eq": 2 } } }, { "$replaceWith": { "albumId": { "$ifNull": ["$albumId", null] }, "title": { "$ifNull": ["$title", null] } @@ -190,18 +105,19 @@ mod tests { } ]); - let expected_response = vec![doc! { - "row_sets": [ - [ - { "albumId": 1, "title": "For Those About To Rock We Salute You" }, - { "albumId": 4, "title": "Let There Be Rock" }, - ], + let expected_response = query_response() + .row_set_rows([ [ - { "albumId": 2, "title": "Balls to the Wall" }, - { "albumId": 3, "title": "Restless and Wild" }, + ("albumId", json!(1)), + ("title", json!("For Those About To Rock We Salute You")), ], - ] - }]; + [("albumId", json!(4)), ("title", json!("Let There Be Rock"))], + ]) + .row_set_rows([ + [("albumId", json!(2)), ("title", json!("Balls to the Wall"))], + [("albumId", json!(3)), ("title", json!("Restless and Wild"))], + ]) + .build(); let db = mock_collection_aggregate_response_for_pipeline( "tracks", @@ -220,45 +136,30 @@ mod tests { }]), ); - let result = execute_query_request(db, &Default::default(), query_request).await?; + let result = execute_query_request(db, &music_config(), query_request).await?; assert_eq!(expected_response, result); Ok(()) } #[tokio::test] - async fn executes_foreach_with_aggregates() -> Result<(), anyhow::Error> { - let query_request: QueryRequest = from_value(json!({ - "query": { - "aggregates": { - "count": { "type": "star_count" }, - }, - "fields": { - "albumId": { - "type": "column", - "column": "albumId", - "column_type": "number" - }, - "title": { - "type": "column", - "column": "title", - "column_type": "string" - } - } - }, - "target": {"name": ["tracks"], "type": "table"}, - "relationships": [], - "foreach": [ - { "artistId": {"value": 1, "value_type": "int"} }, - { "artistId": {"value": 2, "value_type": "int"} } - ] - }))?; + async fn executes_query_with_variables_and_aggregates() -> Result<(), anyhow::Error> { + let query_request = query_request() + .collection("tracks") + .query( + query() + .aggregates([star_count_aggregate!("count")]) + .fields([field!("albumId"), field!("title")]) + .predicate(binop("_eq", target!("artistId"), variable!(artistId))), + ) + .variables([[("artistId", 1)], [("artistId", 2)]]) + .into(); let expected_pipeline = bson!([ { "$facet": { "__FACET___0": [ - { "$match": { "$and": [{ "artistId": {"$eq": 1 }}]}}, + { "$match": { "artistId": {"$eq": 1 }}}, { "$facet": { "__ROWS__": [{ "$replaceWith": { "albumId": { "$ifNull": ["$albumId", null] }, @@ -277,7 +178,7 @@ mod tests { } }, ], "__FACET___1": [ - { "$match": { "$and": [{ "artistId": {"$eq": 2 }}]}}, + { "$match": { "artistId": {"$eq": 2 }}}, { "$facet": { "__ROWS__": [{ "$replaceWith": { "albumId": { "$ifNull": ["$albumId", null] }, @@ -307,28 +208,27 @@ mod tests { } ]); - let expected_response = vec![doc! { - "row_sets": [ - { - "aggregates": { - "count": 2, - }, - "rows": [ - { "albumId": 1, "title": "For Those About To Rock We Salute You" }, - { "albumId": 4, "title": "Let There Be Rock" }, - ] - }, - { - "aggregates": { - "count": 2, - }, - "rows": [ - { "albumId": 2, "title": "Balls to the Wall" }, - { "albumId": 3, "title": "Restless and Wild" }, - ] - }, - ] - }]; + let expected_response = query_response() + .row_set( + row_set() + .aggregates([("count", json!({ "$numberInt": "2" }))]) + .rows([ + [ + ("albumId", json!(1)), + ("title", json!("For Those About To Rock We Salute You")), + ], + [("albumId", json!(4)), ("title", json!("Let There Be Rock"))], + ]), + ) + .row_set( + row_set() + .aggregates([("count", json!({ "$numberInt": "2" }))]) + .rows([ + [("albumId", json!(2)), ("title", json!("Balls to the Wall"))], + [("albumId", json!(3)), ("title", json!("Restless and Wild"))], + ]), + ) + .build(); let db = mock_collection_aggregate_response_for_pipeline( "tracks", @@ -357,63 +257,23 @@ mod tests { }]), ); - let result = execute_query_request(db, &Default::default(), query_request).await?; + let result = execute_query_request(db, &music_config(), query_request).await?; assert_eq!(expected_response, result); Ok(()) } #[tokio::test] - async fn executes_foreach_with_variables() -> Result<(), anyhow::Error> { - let query_request = QueryRequest { - foreach: None, - variables: Some( - (1..=12) - .map(|artist_id| [("artistId".to_owned(), json!(artist_id))].into()) - .collect(), - ), - target: dc_api_types::Target::TTable { - name: vec!["tracks".to_owned()], - arguments: Default::default(), - }, - relationships: Default::default(), - query: Box::new(Query { - r#where: Some(dc_api_types::Expression::ApplyBinaryComparison { - column: ComparisonColumn::new( - "int".to_owned(), - dc_api_types::ColumnSelector::Column("artistId".to_owned()), - ), - operator: BinaryComparisonOperator::Equal, - value: dc_api_types::ComparisonValue::Variable { - name: "artistId".to_owned(), - }, - }), - fields: Some( - [ - ( - "albumId".to_owned(), - Field::Column { - column: "albumId".to_owned(), - column_type: "int".to_owned(), - }, - ), - ( - "title".to_owned(), - Field::Column { - column: "title".to_owned(), - column_type: "string".to_owned(), - }, - ), - ] - .into(), - ), - aggregates: None, - aggregates_limit: None, - limit: None, - offset: None, - order_by: None, - }), - }; + async fn executes_request_with_more_than_ten_variable_sets() -> Result<(), anyhow::Error> { + let query_request = query_request() + .variables((1..=12).map(|artist_id| [("artistId", artist_id)])) + .collection("tracks") + .query( + query() + .predicate(binop("_eq", target!("artistId"), variable!(artistId))) + .fields([field!("albumId"), field!("title")]), + ) + .into(); fn facet(artist_id: i32) -> Bson { bson!([ @@ -462,27 +322,28 @@ mod tests { } ]); - let expected_response = vec![doc! { - "row_sets": [ - [ - { "albumId": 1, "title": "For Those About To Rock We Salute You" }, - { "albumId": 4, "title": "Let There Be Rock" } - ], - [], + let expected_response = query_response() + .row_set_rows([ [ - { "albumId": 2, "title": "Balls to the Wall" }, - { "albumId": 3, "title": "Restless and Wild" } + ("albumId", json!(1)), + ("title", json!("For Those About To Rock We Salute You")), ], - [], - [], - [], - [], - [], - [], - [], - [], - ] - }]; + [("albumId", json!(4)), ("title", json!("Let There Be Rock"))], + ]) + .empty_row_set() + .row_set_rows([ + [("albumId", json!(2)), ("title", json!("Balls to the Wall"))], + [("albumId", json!(3)), ("title", json!("Restless and Wild"))], + ]) + .empty_row_set() + .empty_row_set() + .empty_row_set() + .empty_row_set() + .empty_row_set() + .empty_row_set() + .empty_row_set() + .empty_row_set() + .build(); let db = mock_collection_aggregate_response_for_pipeline( "tracks", @@ -510,9 +371,29 @@ mod tests { }]), ); - let result = execute_query_request(db, &Default::default(), query_request).await?; + let result = execute_query_request(db, &music_config(), query_request).await?; assert_eq!(expected_response, result); Ok(()) } + + fn music_config() -> MongoConfiguration { + MongoConfiguration(Configuration { + collections: [collection("tracks")].into(), + object_types: [( + "tracks".into(), + object_type([ + ("albumId", named_type("Int")), + ("artistId", named_type("Int")), + ("title", named_type("String")), + ]), + )] + .into(), + functions: Default::default(), + procedures: Default::default(), + native_mutations: Default::default(), + native_queries: Default::default(), + options: Default::default(), + }) + } } diff --git a/crates/mongodb-agent-common/src/query/make_selector.rs b/crates/mongodb-agent-common/src/query/make_selector.rs index 88317403..71ae8a98 100644 --- a/crates/mongodb-agent-common/src/query/make_selector.rs +++ b/crates/mongodb-agent-common/src/query/make_selector.rs @@ -1,147 +1,96 @@ use std::collections::BTreeMap; use anyhow::anyhow; -use dc_api_types::{ - ArrayComparisonValue, BinaryArrayComparisonOperator, ComparisonValue, ExistsInTable, - Expression, UnaryComparisonOperator, -}; use mongodb::bson::{self, doc, Document}; -use mongodb_support::BsonScalarType; +use ndc_models::UnaryComparisonOperator; use crate::{ - comparison_function::ComparisonFunction, interface_types::MongoAgentError, - query::column_ref::column_ref, query::serialization::json_to_bson_scalar, + interface_types::MongoAgentError, + mongo_query_plan::{ComparisonValue, ExistsInCollection, Expression, Type}, + query::column_ref::column_ref, }; -use BinaryArrayComparisonOperator as ArrOp; +use super::serialization::json_to_bson; + +pub type Result = std::result::Result; /// Convert a JSON Value into BSON using the provided type information. -/// Parses values of type "date" into BSON DateTime. -fn bson_from_scalar_value( - value: &serde_json::Value, - value_type: &str, -) -> Result { - let bson_type = BsonScalarType::from_bson_name(value_type).ok(); - match bson_type { - Some(t) => { - json_to_bson_scalar(t, value.clone()).map_err(|e| MongoAgentError::BadQuery(anyhow!(e))) - } - None => Err(MongoAgentError::InvalidScalarTypeName( - value_type.to_owned(), - )), - } +/// For example, parses values of type "Date" into BSON DateTime. +fn bson_from_scalar_value(value: &serde_json::Value, value_type: &Type) -> Result { + json_to_bson(value_type, value.clone()).map_err(|e| MongoAgentError::BadQuery(anyhow!(e))) } pub fn make_selector( variables: Option<&BTreeMap>, expr: &Expression, -) -> Result { - make_selector_helper(None, variables, expr) -} - -fn make_selector_helper( - in_table: Option<&str>, - variables: Option<&BTreeMap>, - expr: &Expression, -) -> Result { +) -> Result { match expr { Expression::And { expressions } => { let sub_exps: Vec = expressions .clone() .iter() - .map(|e| make_selector_helper(in_table, variables, e)) - .collect::>()?; + .map(|e| make_selector(variables, e)) + .collect::>()?; Ok(doc! {"$and": sub_exps}) } Expression::Or { expressions } => { let sub_exps: Vec = expressions .clone() .iter() - .map(|e| make_selector_helper(in_table, variables, e)) - .collect::>()?; + .map(|e| make_selector(variables, e)) + .collect::>()?; Ok(doc! {"$or": sub_exps}) } Expression::Not { expression } => { - Ok(doc! { "$nor": [make_selector_helper(in_table, variables, expression)?]}) + Ok(doc! { "$nor": [make_selector(variables, expression)?]}) } - Expression::Exists { in_table, r#where } => match in_table { - ExistsInTable::RelatedTable { relationship } => { - make_selector_helper(Some(relationship), variables, r#where) - } - ExistsInTable::UnrelatedTable { .. } => Err(MongoAgentError::NotImplemented( - "filtering on an unrelated table", - )), - }, - Expression::ApplyBinaryComparison { + Expression::Exists { + in_collection, + predicate, + } => Ok(match in_collection { + ExistsInCollection::Related { relationship } => match predicate { + Some(predicate) => doc! { + relationship: { "$elemMatch": make_selector(variables, predicate)? } + }, + None => doc! { format!("{relationship}.0"): { "$exists": true } }, + }, + ExistsInCollection::Unrelated { + unrelated_collection, + } => doc! { format!("$$ROOT.{unrelated_collection}.0"): { "$exists": true } }, + }), + Expression::BinaryComparisonOperator { column, operator, value, } => { - let mongo_op = ComparisonFunction::try_from(operator)?; - let col = column_ref(column, in_table)?; + let col = column_ref(column)?; let comparison_value = match value { - ComparisonValue::AnotherColumnComparison { .. } => Err( - MongoAgentError::NotImplemented("comparisons between columns"), - ), - ComparisonValue::ScalarValueComparison { value, value_type } => { + // TODO: MDB-152 To compare to another column we need to wrap the entire expression in + // an `$expr` aggregation operator (assuming the expression is not already in + // an aggregation expression context) + ComparisonValue::Column { .. } => Err(MongoAgentError::NotImplemented( + "comparisons between columns", + )), + ComparisonValue::Scalar { value, value_type } => { bson_from_scalar_value(value, value_type) } - ComparisonValue::Variable { name } => { - variable_to_mongo_expression(variables, name, &column.column_type) - .map(Into::into) - } + ComparisonValue::Variable { + name, + variable_type, + } => variable_to_mongo_expression(variables, name, variable_type).map(Into::into), }?; - Ok(mongo_op.mongodb_expression(col, comparison_value)) - } - Expression::ApplyBinaryArrayComparison { - column, - operator, - value_type, - values, - } => { - let mongo_op = match operator { - ArrOp::In => "$in", - ArrOp::CustomBinaryComparisonOperator(op) => op, - }; - let values: Vec = values - .iter() - .map(|value| match value { - ArrayComparisonValue::Scalar(value) => { - bson_from_scalar_value(value, value_type) - } - ArrayComparisonValue::Column(_column) => Err(MongoAgentError::NotImplemented( - "comparisons between columns", - )), - ArrayComparisonValue::Variable(name) => { - variable_to_mongo_expression(variables, name, value_type) - } - }) - .collect::>()?; - Ok(doc! { - column_ref(column, in_table)?: { - mongo_op: values - } - }) + Ok(operator.mongodb_expression(col.into_owned(), comparison_value)) } - Expression::ApplyUnaryComparison { column, operator } => match operator { + Expression::UnaryComparisonOperator { column, operator } => match operator { UnaryComparisonOperator::IsNull => { // Checks the type of the column - type 10 is the code for null. This differs from // `{ "$eq": null }` in that the checking equality with null returns true if the // value is null or is absent. Checking for type 10 returns true if the value is // null, but false if it is absent. Ok(doc! { - column_ref(column, in_table)?: { "$type": 10 } + column_ref(column)?: { "$type": 10 } }) } - UnaryComparisonOperator::CustomUnaryComparisonOperator(op) => { - let col = column_ref(column, in_table)?; - if op == "$exists" { - Ok(doc! { col: { "$exists": true } }) - } else { - // TODO: Is `true` the proper value here? - Ok(doc! { col: { op: true } }) - } - } }, } } @@ -149,8 +98,8 @@ fn make_selector_helper( fn variable_to_mongo_expression( variables: Option<&BTreeMap>, variable: &str, - value_type: &str, -) -> Result { + value_type: &Type, +) -> Result { let value = variables .and_then(|vars| vars.get(variable)) .ok_or_else(|| MongoAgentError::VariableNotDefined(variable.to_owned()))?; diff --git a/crates/mongodb-agent-common/src/query/make_sort.rs b/crates/mongodb-agent-common/src/query/make_sort.rs index 2b2821a7..473dc017 100644 --- a/crates/mongodb-agent-common/src/query/make_sort.rs +++ b/crates/mongodb-agent-common/src/query/make_sort.rs @@ -1,30 +1,63 @@ +use itertools::Itertools; use mongodb::bson::{bson, Document}; +use ndc_models::OrderDirection; -use dc_api_types::{OrderBy, OrderByTarget, OrderDirection}; +use crate::{ + interface_types::MongoAgentError, + mongo_query_plan::{OrderBy, OrderByTarget}, + mongodb::sanitize::safe_name, +}; -pub fn make_sort(order_by: &OrderBy) -> Document { - let OrderBy { - elements, - relations: _, - } = order_by; +pub fn make_sort(order_by: &OrderBy) -> Result { + let OrderBy { elements } = order_by; elements .clone() .iter() - .filter_map(|obe| { + .map(|obe| { let direction = match obe.clone().order_direction { OrderDirection::Asc => bson!(1), OrderDirection::Desc => bson!(-1), }; - match obe.target { - OrderByTarget::Column { ref column } => Some((column.as_path(), direction)), + match &obe.target { + OrderByTarget::Column { + name, + field_path, + path, + } => Ok(( + column_ref_with_path(name, field_path.as_deref(), path)?, + direction, + )), OrderByTarget::SingleColumnAggregate { column: _, function: _, + path: _, result_type: _, - } => None, - OrderByTarget::StarCountAggregate {} => None, + } => + // TODO: MDB-150 + { + Err(MongoAgentError::NotImplemented( + "ordering by single column aggregate", + )) + } + OrderByTarget::StarCountAggregate { path: _ } => Err( + // TODO: MDB-151 + MongoAgentError::NotImplemented("ordering by star count aggregate"), + ), } }) .collect() } + +fn column_ref_with_path( + name: &String, + field_path: Option<&[String]>, + relation_path: &[String], +) -> Result { + relation_path + .iter() + .chain(std::iter::once(name)) + .chain(field_path.into_iter().flatten()) + .map(|x| safe_name(x)) + .process_results(|mut iter| iter.join(".")) +} diff --git a/crates/mongodb-agent-common/src/query/mod.rs b/crates/mongodb-agent-common/src/query/mod.rs index c86a012a..bf258c79 100644 --- a/crates/mongodb-agent-common/src/query/mod.rs +++ b/crates/mongodb-agent-common/src/query/mod.rs @@ -9,11 +9,10 @@ mod native_query; mod pipeline; mod query_target; mod relations; +pub mod response; pub mod serialization; -use configuration::Configuration; -use dc_api_types::QueryRequest; -use mongodb::bson; +use ndc_models::{QueryRequest, QueryResponse}; use self::execute_query_request::execute_query_request; pub use self::{ @@ -21,14 +20,17 @@ pub use self::{ make_sort::make_sort, pipeline::{is_response_faceted, pipeline_for_non_foreach, pipeline_for_query_request}, query_target::QueryTarget, + response::QueryResponseError, +}; +use crate::{ + interface_types::MongoAgentError, mongo_query_plan::MongoConfiguration, state::ConnectorState, }; -use crate::{interface_types::MongoAgentError, state::ConnectorState}; pub async fn handle_query_request( - config: &Configuration, + config: &MongoConfiguration, state: &ConnectorState, query_request: QueryRequest, -) -> Result, MongoAgentError> { +) -> Result { let database = state.database(); // This function delegates to another function which gives is a point to inject a mock database // implementation for testing. @@ -37,35 +39,38 @@ pub async fn handle_query_request( #[cfg(test)] mod tests { - use dc_api_types::QueryRequest; - use mongodb::bson::{self, bson, doc}; + use configuration::Configuration; + use mongodb::bson::{self, bson}; + use ndc_models::{QueryResponse, RowSet}; + use ndc_test_helpers::{ + binop, collection, column_aggregate, column_count_aggregate, field, named_type, + object_type, query, query_request, row_set, target, value, + }; use pretty_assertions::assert_eq; - use serde_json::{from_value, json}; + use serde_json::json; use super::execute_query_request; - use crate::mongodb::test_helpers::{ - mock_collection_aggregate_response, mock_collection_aggregate_response_for_pipeline, + use crate::{ + mongo_query_plan::MongoConfiguration, + mongodb::test_helpers::{ + mock_collection_aggregate_response, mock_collection_aggregate_response_for_pipeline, + }, }; #[tokio::test] async fn executes_query() -> Result<(), anyhow::Error> { - let query_request: QueryRequest = from_value(json!({ - "query": { - "fields": { - "student_gpa": { "type": "column", "column": "gpa", "column_type": "double" }, - }, - "where": { - "type": "binary_op", - "column": { "name": "gpa", "column_type": "double" }, - "operator": "less_than", - "value": { "type": "scalar", "value": 4.0, "value_type": "double" } - }, - }, - "target": {"name": ["students"], "type": "table"}, - "relationships": [], - }))?; + let query_request = query_request() + .collection("students") + .query( + query() + .fields([field!("student_gpa" => "gpa")]) + .predicate(binop("_lt", target!("gpa"), value!(4.0))), + ) + .into(); - let expected_response = vec![doc! { "student_gpa": 3.1 }, doc! { "student_gpa": 3.6 }]; + let expected_response = row_set() + .rows([[("student_gpa", 3.1)], [("student_gpa", 3.6)]]) + .into_response(); let expected_pipeline = bson!([ { "$match": { "gpa": { "$lt": 4.0 } } }, @@ -81,39 +86,27 @@ mod tests { ]), ); - let result = execute_query_request(db, &Default::default(), query_request).await?; + let result = execute_query_request(db, &students_config(), query_request).await?; assert_eq!(expected_response, result); Ok(()) } #[tokio::test] async fn executes_aggregation() -> Result<(), anyhow::Error> { - let query_request: QueryRequest = from_value(json!({ - "query": { - "aggregates": { - "count": { - "type": "column_count", - "column": "gpa", - "distinct": true, - }, - "avg": { - "type": "single_column", - "column": "gpa", - "function": "avg", - "result_type": "double", - }, - }, - }, - "target": {"name": ["students"], "type": "table"}, - "relationships": [], - }))?; + let query_request = query_request() + .collection("students") + .query(query().aggregates([ + column_count_aggregate!("count" => "gpa", distinct: true), + column_aggregate!("avg" => "gpa", "avg"), + ])) + .into(); - let expected_response = vec![doc! { - "aggregates": { - "count": 11, - "avg": 3, - } - }]; + let expected_response = row_set() + .aggregates([ + ("count", json!({ "$numberInt": "11" })), + ("avg", json!({ "$numberInt": "3" })), + ]) + .into_response(); let expected_pipeline = bson!([ { @@ -156,45 +149,27 @@ mod tests { }]), ); - let result = execute_query_request(db, &Default::default(), query_request).await?; - assert_eq!(expected_response, result); + let result = execute_query_request(db, &students_config(), query_request).await?; + assert_eq!(result, expected_response); Ok(()) } #[tokio::test] async fn executes_aggregation_with_fields() -> Result<(), anyhow::Error> { - let query_request: QueryRequest = from_value(json!({ - "query": { - "aggregates": { - "avg": { - "type": "single_column", - "column": "gpa", - "function": "avg", - "result_type": "double", - }, - }, - "fields": { - "student_gpa": { "type": "column", "column": "gpa", "column_type": "double" }, - }, - "where": { - "type": "binary_op", - "column": { "name": "gpa", "column_type": "double" }, - "operator": "less_than", - "value": { "type": "scalar", "value": 4.0, "value_type": "double" } - }, - }, - "target": {"name": ["students"], "type": "table"}, - "relationships": [], - }))?; + let query_request = query_request() + .collection("students") + .query( + query() + .aggregates([column_aggregate!("avg" => "gpa", "avg")]) + .fields([field!("student_gpa" => "gpa")]) + .predicate(binop("_lt", target!("gpa"), value!(4.0))), + ) + .into(); - let expected_response = vec![doc! { - "aggregates": { - "avg": 3.1, - }, - "rows": [{ - "gpa": 3.1, - }], - }]; + let expected_response = row_set() + .aggregates([("avg", json!({ "$numberDouble": "3.1" }))]) + .row([("student_gpa", 3.1)]) + .into_response(); let expected_pipeline = bson!([ { "$match": { "gpa": { "$lt": 4.0 } } }, @@ -232,39 +207,30 @@ mod tests { "avg": 3.1, }, "rows": [{ - "gpa": 3.1, + "student_gpa": 3.1, }], }]), ); - let result = execute_query_request(db, &Default::default(), query_request).await?; - assert_eq!(expected_response, result); + let result = execute_query_request(db, &students_config(), query_request).await?; + assert_eq!(result, expected_response); Ok(()) } #[tokio::test] async fn converts_date_inputs_to_bson() -> Result<(), anyhow::Error> { - let query_request: QueryRequest = from_value(json!({ - "query": { - "fields": { - "date": { "type": "column", "column": "date", "column_type": "date", }, - }, - "where": { - "type": "binary_op", - "column": { "column_type": "date", "name": "date" }, - "operator": "greater_than_or_equal", - "value": { - "type": "scalar", - "value": "2018-08-14T07:05-0800", - "value_type": "date" - } - } - }, - "target": { "type": "table", "name": [ "comments" ] }, - "relationships": [] - }))?; + let query_request = query_request() + .collection("comments") + .query(query().fields([field!("date")]).predicate(binop( + "_gte", + target!("date"), + value!("2018-08-14T07:05-0800"), + ))) + .into(); - let expected_response = vec![doc! { "date": "2018-08-14T15:05:03.142Z" }]; + let expected_response = row_set() + .row([("date", "2018-08-14T15:05:00.000000000Z")]) + .into_response(); let expected_pipeline = bson!([ { @@ -274,11 +240,7 @@ mod tests { }, { "$replaceWith": { - "date": { - "$dateToString": { - "date": { "$ifNull": ["$date", null] }, - }, - }, + "date": { "$ifNull": ["$date", null] }, } }, ]); @@ -287,33 +249,63 @@ mod tests { "comments", expected_pipeline, bson!([{ - "date": "2018-08-14T15:05:03.142Z", + "date": bson::DateTime::builder().year(2018).month(8).day(14).hour(15).minute(5).build().unwrap(), }]), ); - let result = execute_query_request(db, &Default::default(), query_request).await?; + let result = execute_query_request(db, &comments_config(), query_request).await?; assert_eq!(expected_response, result); Ok(()) } #[tokio::test] async fn parses_empty_response() -> Result<(), anyhow::Error> { - let query_request: QueryRequest = from_value(json!({ - "query": { - "fields": { - "date": { "type": "column", "column": "date", "column_type": "date", }, - }, - }, - "target": { "type": "table", "name": [ "comments" ] }, - "relationships": [], - }))?; + let query_request = query_request() + .collection("comments") + .query(query().fields([field!("date")])) + .into(); - let expected_response: Vec = vec![]; + let expected_response = QueryResponse(vec![RowSet { + aggregates: None, + rows: Some(vec![]), + }]); let db = mock_collection_aggregate_response("comments", bson!([])); - let result = execute_query_request(db, &Default::default(), query_request).await?; + let result = execute_query_request(db, &comments_config(), query_request).await?; assert_eq!(expected_response, result); Ok(()) } + + fn students_config() -> MongoConfiguration { + MongoConfiguration(Configuration { + collections: [collection("students")].into(), + object_types: [( + "students".into(), + object_type([("gpa", named_type("Double"))]), + )] + .into(), + functions: Default::default(), + procedures: Default::default(), + native_mutations: Default::default(), + native_queries: Default::default(), + options: Default::default(), + }) + } + + fn comments_config() -> MongoConfiguration { + MongoConfiguration(Configuration { + collections: [collection("comments")].into(), + object_types: [( + "comments".into(), + object_type([("date", named_type("Date"))]), + )] + .into(), + functions: Default::default(), + procedures: Default::default(), + native_mutations: Default::default(), + native_queries: Default::default(), + options: Default::default(), + }) + } } diff --git a/crates/mongodb-agent-common/src/query/native_query.rs b/crates/mongodb-agent-common/src/query/native_query.rs index 85f70d95..0df1fbf6 100644 --- a/crates/mongodb-agent-common/src/query/native_query.rs +++ b/crates/mongodb-agent-common/src/query/native_query.rs @@ -1,13 +1,15 @@ -use std::collections::HashMap; +use std::collections::BTreeMap; -use configuration::{native_query::NativeQuery, Configuration}; -use dc_api_types::{Argument, QueryRequest, VariableSet}; +use configuration::native_query::NativeQuery; use itertools::Itertools as _; +use ndc_models::Argument; +use ndc_query_plan::VariableSet; use crate::{ interface_types::MongoAgentError, + mongo_query_plan::{MongoConfiguration, QueryPlan}, mongodb::{Pipeline, Stage}, - mutation::{interpolated_command, MutationError}, + procedure::{interpolated_command, ProcedureError}, }; use super::{arguments::resolve_arguments, query_target::QueryTarget}; @@ -15,9 +17,9 @@ use super::{arguments::resolve_arguments, query_target::QueryTarget}; /// Returns either the pipeline defined by a native query with variable bindings for arguments, or /// an empty pipeline if the query request target is not a native query pub fn pipeline_for_native_query( - config: &Configuration, + config: &MongoConfiguration, variables: Option<&VariableSet>, - query_request: &QueryRequest, + query_request: &QueryPlan, ) -> Result { match QueryTarget::for_request(config, query_request) { QueryTarget::Collection(_) => Ok(Pipeline::empty()), @@ -25,15 +27,14 @@ pub fn pipeline_for_native_query( native_query, arguments, .. - } => make_pipeline(config, variables, native_query, arguments), + } => make_pipeline(variables, native_query, arguments), } } fn make_pipeline( - config: &Configuration, variables: Option<&VariableSet>, native_query: &NativeQuery, - arguments: &HashMap, + arguments: &BTreeMap, ) -> Result { let expressions = arguments .iter() @@ -45,9 +46,8 @@ fn make_pipeline( }) .try_collect()?; - let bson_arguments = - resolve_arguments(&config.object_types, &native_query.arguments, expressions) - .map_err(MutationError::UnresolvableArguments)?; + let bson_arguments = resolve_arguments(&native_query.arguments, expressions) + .map_err(ProcedureError::UnresolvableArguments)?; // Replace argument placeholders with resolved expressions, convert document list to // a `Pipeline` value @@ -71,29 +71,26 @@ fn argument_to_mongodb_expression( .ok_or_else(|| MongoAgentError::VariableNotDefined(name.to_owned())) .cloned(), Argument::Literal { value } => Ok(value.clone()), - // TODO: Column references are needed for native queries that are a target of a relation. - // MDB-106 - Argument::Column { .. } => Err(MongoAgentError::NotImplemented( - "column references in native queries are not currently implemented", - )), } } #[cfg(test)] mod tests { use configuration::{ - native_query::{NativeQuery, NativeQueryRepresentation}, + native_query::NativeQueryRepresentation, schema::{ObjectField, ObjectType, Type}, + serialized::NativeQuery, Configuration, }; - use dc_api_test_helpers::{column, query, query_request}; - use dc_api_types::Argument; use mongodb::bson::{bson, doc}; use mongodb_support::BsonScalarType as S; + use ndc_models::Argument; + use ndc_test_helpers::{field, query, query_request, row_set}; use pretty_assertions::assert_eq; use serde_json::json; use crate::{ + mongo_query_plan::MongoConfiguration, mongodb::test_helpers::mock_aggregate_response_for_pipeline, query::execute_query_request, }; @@ -134,6 +131,44 @@ mod tests { ] .into(), result_document_type: "VectorResult".to_owned(), + object_types: [( + "VectorResult".to_owned(), + ObjectType { + description: None, + fields: [ + ( + "_id".to_owned(), + ObjectField { + r#type: Type::Scalar(S::ObjectId), + description: None, + }, + ), + ( + "title".to_owned(), + ObjectField { + r#type: Type::Scalar(S::String), + description: None, + }, + ), + ( + "genres".to_owned(), + ObjectField { + r#type: Type::ArrayOf(Box::new(Type::Scalar(S::String))), + description: None, + }, + ), + ( + "year".to_owned(), + ObjectField { + r#type: Type::Scalar(S::Int), + description: None, + }, + ), + ] + .into(), + }, + )] + .into(), pipeline: vec![doc! { "$vectorSearch": { "index": "movie-vector-index", @@ -147,95 +182,47 @@ mod tests { description: None, }; - let object_types = [( - "VectorResult".to_owned(), - ObjectType { - description: None, - fields: [ - ( - "_id".to_owned(), - ObjectField { - r#type: Type::Scalar(S::ObjectId), - description: None, - }, - ), - ( - "title".to_owned(), - ObjectField { - r#type: Type::Scalar(S::ObjectId), - description: None, - }, - ), - ( - "genres".to_owned(), - ObjectField { - r#type: Type::ArrayOf(Box::new(Type::Scalar(S::String))), - description: None, - }, - ), - ( - "year".to_owned(), - ObjectField { - r#type: Type::Scalar(S::Int), - description: None, - }, - ), - ] - .into(), - }, - )] - .into(); - - let config = Configuration { - native_queries: [("vectorSearch".to_owned(), native_query.clone())].into(), - object_types, - collections: Default::default(), - functions: Default::default(), - mutations: Default::default(), - native_mutations: Default::default(), - options: Default::default(), - }; + let config = MongoConfiguration(Configuration::validate( + Default::default(), + Default::default(), + [("vectorSearch".into(), native_query)].into(), + Default::default(), + )?); let request = query_request() - .target_with_arguments( - ["vectorSearch"], - [ - ( - "filter", - Argument::Literal { - value: json!({ - "$and": [ - { - "genres": { - "$nin": [ - "Drama", "Western", "Crime" - ], - "$in": [ - "Action", "Adventure", "Family" - ] - } - }, { - "year": { "$gte": 1960, "$lte": 2000 } + .collection("vectorSearch") + .arguments([ + ( + "filter", + Argument::Literal { + value: json!({ + "$and": [ + { + "genres": { + "$nin": [ + "Drama", "Western", "Crime" + ], + "$in": [ + "Action", "Adventure", "Family" + ] } - ] - }), - }, - ), - ( - "queryVector", - Argument::Literal { - value: json!([-0.020156775, -0.024996493, 0.010778184]), - }, - ), - ("numCandidates", Argument::Literal { value: json!(200) }), - ("limit", Argument::Literal { value: json!(10) }), - ], - ) - .query(query().fields([ - column!("title": "String"), - column!("genres": "String"), - column!("year": "String"), - ])) + }, { + "year": { "$gte": 1960, "$lte": 2000 } + } + ] + }), + }, + ), + ( + "queryVector", + Argument::Literal { + value: json!([-0.020156775, -0.024996493, 0.010778184]), + }, + ), + ("numCandidates", Argument::Literal { value: json!(200) }), + ("limit", Argument::Literal { value: json!(10) }), + ]) + .query(query().fields([field!("title"), field!("genres"), field!("year")])) .into(); let expected_pipeline = bson!([ @@ -273,10 +260,20 @@ mod tests { }, ]); - let expected_response = vec![ - doc! { "title": "Beau Geste", "year": 1926, "genres": ["Action", "Adventure", "Drama"] }, - doc! { "title": "For Heaven's Sake", "year": 1926, "genres": ["Action", "Comedy", "Romance"] }, - ]; + let expected_response = row_set() + .rows([ + [ + ("title", json!("Beau Geste")), + ("year", json!(1926)), + ("genres", json!(["Action", "Adventure", "Drama"])), + ], + [ + ("title", json!("For Heaven's Sake")), + ("year", json!(1926)), + ("genres", json!(["Action", "Comedy", "Romance"])), + ], + ]) + .into_response(); let db = mock_aggregate_response_for_pipeline( expected_pipeline, diff --git a/crates/mongodb-agent-common/src/query/pipeline.rs b/crates/mongodb-agent-common/src/query/pipeline.rs index ed67c2ac..260be737 100644 --- a/crates/mongodb-agent-common/src/query/pipeline.rs +++ b/crates/mongodb-agent-common/src/query/pipeline.rs @@ -1,18 +1,19 @@ use std::collections::BTreeMap; -use configuration::Configuration; -use dc_api_types::{Aggregate, Query, QueryRequest, VariableSet}; use mongodb::bson::{self, doc, Bson}; +use ndc_query_plan::VariableSet; +use tracing::instrument; use crate::{ aggregation_function::AggregationFunction, interface_types::MongoAgentError, + mongo_query_plan::{Aggregate, MongoConfiguration, Query, QueryPlan}, mongodb::{sanitize::get_field, Accumulator, Pipeline, Selection, Stage}, }; use super::{ constants::{RESULT_FIELD, ROWS_FIELD}, - foreach::{foreach_variants, pipeline_for_foreach}, + foreach::pipeline_for_foreach, make_selector, make_sort, native_query::pipeline_for_native_query, relations::pipeline_for_relations, @@ -25,25 +26,22 @@ use super::{ /// one) in a single facet stage. If we have fields, and no aggregates then the fields pipeline /// can instead be appended to `pipeline`. pub fn is_response_faceted(query: &Query) -> bool { - match &query.aggregates { - Some(aggregates) => !aggregates.is_empty(), - _ => false, - } + query.has_aggregates() } /// Shared logic to produce a MongoDB aggregation pipeline for a query request. /// /// Returns a pipeline paired with a value that indicates whether the response requires /// post-processing in the agent. +#[instrument(name = "Build Query Pipeline" skip_all, fields(internal.visibility = "user"))] pub fn pipeline_for_query_request( - config: &Configuration, - query_request: &QueryRequest, + config: &MongoConfiguration, + query_plan: &QueryPlan, ) -> Result { - let foreach = foreach_variants(query_request); - if let Some(foreach) = foreach { - pipeline_for_foreach(foreach, config, query_request) + if let Some(variable_sets) = &query_plan.variables { + pipeline_for_foreach(variable_sets, config, query_plan) } else { - pipeline_for_non_foreach(config, None, query_request) + pipeline_for_non_foreach(config, None, query_plan) } } @@ -53,31 +51,35 @@ pub fn pipeline_for_query_request( /// Returns a pipeline paired with a value that indicates whether the response requires /// post-processing in the agent. pub fn pipeline_for_non_foreach( - config: &Configuration, + config: &MongoConfiguration, variables: Option<&VariableSet>, - query_request: &QueryRequest, + query_plan: &QueryPlan, ) -> Result { - let query = &*query_request.query; + let query = &query_plan.query; let Query { offset, order_by, - r#where, + predicate, .. } = query; let mut pipeline = Pipeline::empty(); // If this is a native query then we start with the native query's pipeline - pipeline.append(pipeline_for_native_query(config, variables, query_request)?); + pipeline.append(pipeline_for_native_query(config, variables, query_plan)?); // Stages common to aggregate and row queries. - pipeline.append(pipeline_for_relations(config, variables, query_request)?); + pipeline.append(pipeline_for_relations(config, variables, query_plan)?); - let match_stage = r#where + let match_stage = predicate .as_ref() .map(|expression| make_selector(variables, expression)) .transpose()? .map(Stage::Match); - let sort_stage: Option = order_by.iter().map(|o| Stage::Sort(make_sort(o))).next(); + let sort_stage: Option = order_by + .iter() + .map(|o| Ok(Stage::Sort(make_sort(o)?)) as Result<_, MongoAgentError>) + .next() + .transpose()?; let skip_stage = offset.map(Stage::Skip); [match_stage, sort_stage, skip_stage] @@ -89,12 +91,12 @@ pub fn pipeline_for_non_foreach( // sort and limit stages if we are requesting rows only. In both cases the last stage is // a $replaceWith. let diverging_stages = if is_response_faceted(query) { - let (facet_pipelines, select_facet_results) = facet_pipelines_for_query(query_request)?; + let (facet_pipelines, select_facet_results) = facet_pipelines_for_query(query_plan)?; let aggregation_stages = Stage::Facet(facet_pipelines); let replace_with_stage = Stage::ReplaceWith(select_facet_results); Pipeline::from_iter([aggregation_stages, replace_with_stage]) } else { - pipeline_for_fields_facet(query_request)? + pipeline_for_fields_facet(query_plan)? }; pipeline.append(diverging_stages); @@ -105,14 +107,11 @@ pub fn pipeline_for_non_foreach( /// within a $facet stage. We assume that the query's `where`, `order_by`, `offset` criteria (which /// are shared with aggregates) have already been applied, and that we have already joined /// relations. -pub fn pipeline_for_fields_facet( - query_request: &QueryRequest, -) -> Result { - let Query { limit, .. } = &*query_request.query; +pub fn pipeline_for_fields_facet(query_plan: &QueryPlan) -> Result { + let Query { limit, .. } = &query_plan.query; let limit_stage = limit.map(Stage::Limit); - let replace_with_stage: Stage = - Stage::ReplaceWith(Selection::from_query_request(query_request)?); + let replace_with_stage: Stage = Stage::ReplaceWith(Selection::from_query_request(query_plan)?); Ok(Pipeline::from_iter( [limit_stage, replace_with_stage.into()] @@ -125,9 +124,9 @@ pub fn pipeline_for_fields_facet( /// a `Selection` that converts results of each pipeline to a format compatible with /// `QueryResponse`. fn facet_pipelines_for_query( - query_request: &QueryRequest, + query_plan: &QueryPlan, ) -> Result<(BTreeMap, Selection), MongoAgentError> { - let query = &*query_request.query; + let query = &query_plan.query; let Query { aggregates, aggregates_limit, @@ -146,7 +145,7 @@ fn facet_pipelines_for_query( .collect::, MongoAgentError>>()?; if fields.is_some() { - let fields_pipeline = pipeline_for_fields_facet(query_request)?; + let fields_pipeline = pipeline_for_fields_facet(query_plan)?; facet_pipelines.insert(ROWS_FIELD.to_owned(), fields_pipeline); } @@ -197,7 +196,7 @@ fn facet_pipelines_for_query( fn pipeline_for_aggregate( aggregate: Aggregate, - limit: Option, + limit: Option, ) -> Result { // Group expressions use a dollar-sign prefix to indicate a reference to a document field. // TODO: I don't think we need sanitizing, but I could use a second opinion -Jesse H. @@ -250,7 +249,7 @@ fn pipeline_for_aggregate( } => { use AggregationFunction::*; - let accumulator = match AggregationFunction::from_graphql_name(&function)? { + let accumulator = match function { Avg => Accumulator::Avg(field_ref(&column)), Count => Accumulator::Count, Min => Accumulator::Min(field_ref(&column)), diff --git a/crates/mongodb-agent-common/src/query/query_target.rs b/crates/mongodb-agent-common/src/query/query_target.rs index 25c62442..ab4f53bc 100644 --- a/crates/mongodb-agent-common/src/query/query_target.rs +++ b/crates/mongodb-agent-common/src/query/query_target.rs @@ -1,7 +1,9 @@ -use std::{collections::HashMap, fmt::Display}; +use std::{collections::BTreeMap, fmt::Display}; -use configuration::{native_query::NativeQuery, Configuration}; -use dc_api_types::{Argument, QueryRequest}; +use configuration::native_query::NativeQuery; +use ndc_models::Argument; + +use crate::mongo_query_plan::{MongoConfiguration, QueryPlan}; #[derive(Clone, Debug)] pub enum QueryTarget<'a> { @@ -9,24 +11,23 @@ pub enum QueryTarget<'a> { NativeQuery { name: String, native_query: &'a NativeQuery, - arguments: &'a HashMap, + arguments: &'a BTreeMap, }, } impl QueryTarget<'_> { pub fn for_request<'a>( - config: &'a Configuration, - query_request: &'a QueryRequest, + config: &'a MongoConfiguration, + query_request: &'a QueryPlan, ) -> QueryTarget<'a> { - let target = &query_request.target; - let target_name = target.name().join("."); - match config.native_queries.get(&target_name) { + let collection = &query_request.collection; + match config.native_queries().get(collection) { Some(native_query) => QueryTarget::NativeQuery { - name: target_name, + name: collection.to_owned(), native_query, - arguments: target.arguments(), + arguments: &query_request.arguments, }, - None => QueryTarget::Collection(target_name), + None => QueryTarget::Collection(collection.to_owned()), } } diff --git a/crates/mongodb-agent-common/src/query/relations.rs b/crates/mongodb-agent-common/src/query/relations.rs index ad2906c8..3024cd12 100644 --- a/crates/mongodb-agent-common/src/query/relations.rs +++ b/crates/mongodb-agent-common/src/query/relations.rs @@ -1,13 +1,11 @@ -use std::collections::HashMap; +use std::collections::BTreeMap; -use anyhow::anyhow; -use configuration::Configuration; -use dc_api_types::comparison_column::ColumnSelector; -use dc_api_types::relationship::ColumnMapping; -use dc_api_types::{Field, QueryRequest, Relationship, VariableSet}; +use itertools::Itertools as _; use mongodb::bson::{doc, Bson, Document}; +use ndc_query_plan::VariableSet; -use crate::mongodb::sanitize::safe_column_selector; +use crate::mongo_query_plan::{MongoConfiguration, Query, QueryPlan}; +use crate::mongodb::sanitize::safe_name; use crate::mongodb::Pipeline; use crate::{ interface_types::MongoAgentError, @@ -16,156 +14,57 @@ use crate::{ use super::pipeline::pipeline_for_non_foreach; -pub fn pipeline_for_relations( - config: &Configuration, - variables: Option<&VariableSet>, - query_request: &QueryRequest, -) -> Result { - let QueryRequest { - target, - relationships, - query, - .. - } = query_request; +type Result = std::result::Result; - let empty_field_map = HashMap::new(); - let fields = if let Some(fs) = &query.fields { - fs - } else { - &empty_field_map - }; - - let empty_relation_map = HashMap::new(); - let relationships = &relationships - .iter() - .find_map(|rels| { - if &rels.source_table == target.name() { - Some(&rels.relationships) - } else { - None - } - }) - .unwrap_or(&empty_relation_map); - - let stages = lookups_for_fields(config, query_request, variables, relationships, &[], fields)?; - Ok(Pipeline::new(stages)) -} - -/// Produces $lookup stages for any necessary joins -fn lookups_for_fields( - config: &Configuration, - query_request: &QueryRequest, +/// Defines any necessary $lookup stages for the given section of the pipeline. This is called for +/// each sub-query in the plan. +pub fn pipeline_for_relations( + config: &MongoConfiguration, variables: Option<&VariableSet>, - relationships: &HashMap, - parent_columns: &[&str], - fields: &HashMap, -) -> Result, MongoAgentError> { - let stages = fields + query_plan: &QueryPlan, +) -> Result { + let QueryPlan { query, .. } = query_plan; + let Query { relationships, .. } = query; + + // Lookup stages perform the join for each relationship, and assign the list of rows or mapping + // of aggregate results to a field in the parent document. + let lookup_stages = relationships .iter() - .map(|(field_name, field)| { - lookups_for_field( - config, - query_request, - variables, - relationships, - parent_columns, - field_name, - field, - ) - }) - .collect::>, MongoAgentError>>()? - .into_iter() - .flatten() - .collect(); - Ok(stages) -} - -/// Produces $lookup stages for any necessary joins -fn lookups_for_field( - config: &Configuration, - query_request: &QueryRequest, - variables: Option<&VariableSet>, - relationships: &HashMap, - parent_columns: &[&str], - field_name: &str, - field: &Field, -) -> Result, MongoAgentError> { - match field { - Field::Column { .. } => Ok(vec![]), - Field::NestedObject { column, query } => { - let nested_parent_columns = append_to_path(parent_columns, column); - let fields = query.fields.clone().unwrap_or_default(); - lookups_for_fields( - config, - query_request, - variables, - relationships, - &nested_parent_columns, - &fields, - ) - .map(Into::into) - } - Field::NestedArray { - field, - // NOTE: We can use a $slice in our selection to do offsets and limits: - // https://www.mongodb.com/docs/manual/reference/operator/projection/slice/#mongodb-projection-proj.-slice - limit: _, - offset: _, - r#where: _, - } => lookups_for_field( - config, - query_request, - variables, - relationships, - parent_columns, - field_name, - field, - ), - Field::Relationship { - query, - relationship: relationship_name, - } => { - let r#as = match parent_columns { - [] => field_name.to_owned(), - _ => format!("{}.{}", parent_columns.join("."), field_name), - }; - - let Relationship { - column_mapping, - target, - .. - } = get_relationship(relationships, relationship_name)?; - let from = collection_reference(target.name())?; - + .map(|(name, relationship)| { // Recursively build pipeline according to relation query let lookup_pipeline = pipeline_for_non_foreach( config, variables, - &QueryRequest { - query: query.clone(), - target: target.clone(), - ..query_request.clone() + &QueryPlan { + query: relationship.query.clone(), + collection: relationship.target_collection.clone(), + ..query_plan.clone() }, )?; - let lookup = make_lookup_stage(from, column_mapping, r#as, lookup_pipeline)?; + make_lookup_stage( + relationship.target_collection.clone(), + &relationship.column_mapping, + name.to_owned(), + lookup_pipeline, + ) + }) + .try_collect()?; - Ok(vec![lookup]) - } - } + Ok(lookup_stages) } fn make_lookup_stage( from: String, - column_mapping: &ColumnMapping, + column_mapping: &BTreeMap, r#as: String, lookup_pipeline: Pipeline, -) -> Result { +) -> Result { // If we are mapping a single field in the source collection to a single field in the target // collection then we can use the correlated subquery syntax. - if column_mapping.0.len() == 1 { + if column_mapping.len() == 1 { // Safe to unwrap because we just checked the hashmap size - let (source_selector, target_selector) = column_mapping.0.iter().next().unwrap(); + let (source_selector, target_selector) = column_mapping.iter().next().unwrap(); single_column_mapping_lookup( from, source_selector, @@ -180,15 +79,15 @@ fn make_lookup_stage( fn single_column_mapping_lookup( from: String, - source_selector: &ColumnSelector, - target_selector: &ColumnSelector, + source_selector: &str, + target_selector: &str, r#as: String, lookup_pipeline: Pipeline, -) -> Result { +) -> Result { Ok(Stage::Lookup { from: Some(from), - local_field: Some(safe_column_selector(source_selector)?.to_string()), - foreign_field: Some(safe_column_selector(target_selector)?.to_string()), + local_field: Some(safe_name(source_selector)?.into_owned()), + foreign_field: Some(safe_name(target_selector)?.into_owned()), r#let: None, pipeline: if lookup_pipeline.is_empty() { None @@ -201,37 +100,35 @@ fn single_column_mapping_lookup( fn multiple_column_mapping_lookup( from: String, - column_mapping: &ColumnMapping, + column_mapping: &BTreeMap, r#as: String, lookup_pipeline: Pipeline, -) -> Result { +) -> Result { let let_bindings: Document = column_mapping - .0 .keys() .map(|local_field| { Ok(( - variable(&local_field.as_var())?, - Bson::String(format!("${}", safe_column_selector(local_field)?)), + variable(local_field)?, + Bson::String(format!("${}", safe_name(local_field)?.into_owned())), )) }) - .collect::>()?; + .collect::>()?; // Creating an intermediate Vec and sorting it is done just to help with testing. // A stable order for matchers makes it easier to assert equality between actual // and expected pipelines. - let mut column_pairs: Vec<(&ColumnSelector, &ColumnSelector)> = - column_mapping.0.iter().collect(); + let mut column_pairs: Vec<(&String, &String)> = column_mapping.iter().collect(); column_pairs.sort(); let matchers: Vec = column_pairs .into_iter() .map(|(local_field, remote_field)| { Ok(doc! { "$eq": [ - format!("$${}", variable(&local_field.as_var())?), - format!("${}", safe_column_selector(remote_field)?) + format!("$${}", variable(local_field)?), + format!("${}", safe_name(remote_field)?) ] }) }) - .collect::>()?; + .collect::>()?; // Match only documents on the right side of the join that match the column-mapping // criteria. In the case where we have only one column mapping using the $lookup stage's @@ -255,83 +152,51 @@ fn multiple_column_mapping_lookup( }) } -/// Transform an Agent IR qualified table reference into a MongoDB collection reference. -fn collection_reference(table_ref: &[String]) -> Result { - if table_ref.len() == 1 { - Ok(table_ref[0].clone()) - } else { - Err(MongoAgentError::BadQuery(anyhow!( - "expected \"from\" field of relationship to contain one element" - ))) - } -} - -fn get_relationship<'a>( - relationships: &'a HashMap, - relationship_name: &str, -) -> Result<&'a Relationship, MongoAgentError> { - match relationships.get(relationship_name) { - Some(relationship) => Ok(relationship), - None => Err(MongoAgentError::UnspecifiedRelation( - relationship_name.to_owned(), - )), - } -} - -fn append_to_path<'a, 'b, 'c>(parent_columns: &'a [&'b str], column: &'c str) -> Vec<&'c str> -where - 'b: 'c, -{ - parent_columns.iter().copied().chain(Some(column)).collect() -} - #[cfg(test)] mod tests { - use dc_api_types::QueryRequest; - use mongodb::bson::{bson, doc, Bson}; + use configuration::Configuration; + use mongodb::bson::{bson, Bson}; + use ndc_test_helpers::{ + binop, collection, exists, field, named_type, object_type, query, query_request, + relation_field, relationship, row_set, star_count_aggregate, target, value, + }; use pretty_assertions::assert_eq; - use serde_json::{from_value, json}; + use serde_json::json; use super::super::execute_query_request; - use crate::mongodb::test_helpers::mock_collection_aggregate_response_for_pipeline; + use crate::{ + mongo_query_plan::MongoConfiguration, + mongodb::test_helpers::mock_collection_aggregate_response_for_pipeline, + }; #[tokio::test] async fn looks_up_an_array_relation() -> Result<(), anyhow::Error> { - let query_request: QueryRequest = from_value(json!({ - "query": { - "fields": { - "class_title": { "type": "column", "column": "title", "column_type": "string" }, - "students": { - "type": "relationship", - "query": { - "fields": { - "student_name": { "type": "column", "column": "name", "column_type": "string" }, - }, - }, - "relationship": "class_students", - }, - }, - }, - "target": {"name": ["classes"], "type": "table"}, - "relationships": [{ - "source_table": ["classes"], - "relationships": { - "class_students": { - "column_mapping": { "_id": "classId" }, - "relationship_type": "array", - "target": { "name": ["students"], "type": "table"}, - }, - }, - }], - }))?; - - let expected_response = vec![doc! { - "class_title": "MongoDB 101", - "students": { "rows": [ - { "student_name": "Alice" }, - { "student_name": "Bob" }, - ] }, - }]; + let query_request = query_request() + .collection("classes") + .query(query().fields([ + field!("class_title" => "title"), + relation_field!("students" => "class_students", query().fields([ + field!("student_name" => "name") + ])), + ])) + .relationships([( + "class_students", + relationship("students", [("_id", "classId")]), + )]) + .into(); + + let expected_response = row_set() + .row([ + ("class_title", json!("MongoDB 101")), + ( + "students", + json!({ "rows": [ + { "student_name": "Alice" }, + { "student_name": "Bob" }, + ]}), + ), + ]) + .into_response(); let expected_pipeline = bson!([ { @@ -346,7 +211,7 @@ mod tests { }, } ], - "as": "students", + "as": "class_students", }, }, { @@ -354,7 +219,7 @@ mod tests { "class_title": { "$ifNull": ["$title", null] }, "students": { "rows": { - "$getField": { "$literal": "students" }, + "$getField": { "$literal": "class_students" }, }, }, }, @@ -373,7 +238,7 @@ mod tests { }]), ); - let result = execute_query_request(db, &Default::default(), query_request).await?; + let result = execute_query_request(db, &students_config(), query_request).await?; assert_eq!(expected_response, result); Ok(()) @@ -381,44 +246,38 @@ mod tests { #[tokio::test] async fn looks_up_an_object_relation() -> Result<(), anyhow::Error> { - let query_request: QueryRequest = from_value(json!({ - "query": { - "fields": { - "student_name": { "type": "column", "column": "name", "column_type": "string" }, - "class": { - "type": "relationship", - "query": { - "fields": { - "class_title": { "type": "column", "column": "title", "column_type": "string" }, - }, - }, - "relationship": "student_class", - }, - }, - }, - "target": {"name": ["students"], "type": "table"}, - "relationships": [{ - "source_table": ["students"], - "relationships": { - "student_class": { - "column_mapping": { "classId": "_id" }, - "relationship_type": "object", - "target": {"name": ["classes"], "type": "table"}, - }, - }, - }], - }))?; - - let expected_response = vec![ - doc! { - "student_name": "Alice", - "class": { "rows": [{ "class_title": "MongoDB 101" }] }, - }, - doc! { - "student_name": "Bob", - "class": { "rows": [{ "class_title": "MongoDB 101" }] }, - }, - ]; + let query_request = query_request() + .collection("students") + .query(query().fields([ + field!("student_name" => "name"), + relation_field!("class" => "student_class", query().fields([ + field!("class_title" => "title") + ])), + ])) + .relationships([( + "student_class", + relationship("classes", [("classId", "_id")]), + )]) + .into(); + + let expected_response = row_set() + .rows([ + [ + ("student_name", json!("Alice")), + ( + "class", + json!({ "rows": [{ "class_title": "MongoDB 101" }] }), + ), + ], + [ + ("student_name", json!("Bob")), + ( + "class", + json!({ "rows": [{ "class_title": "MongoDB 101" }] }), + ), + ], + ]) + .into_response(); let expected_pipeline = bson!([ { @@ -433,14 +292,14 @@ mod tests { }, } ], - "as": "class", + "as": "student_class", }, }, { "$replaceWith": { "student_name": { "$ifNull": ["$name", null] }, "class": { "rows": { - "$getField": { "$literal": "class" } } + "$getField": { "$literal": "student_class" } } }, }, }, @@ -461,7 +320,7 @@ mod tests { ]), ); - let result = execute_query_request(db, &Default::default(), query_request).await?; + let result = execute_query_request(db, &students_config(), query_request).await?; assert_eq!(expected_response, result); Ok(()) @@ -469,41 +328,32 @@ mod tests { #[tokio::test] async fn looks_up_a_relation_with_multiple_column_mappings() -> Result<(), anyhow::Error> { - let query_request: QueryRequest = from_value(json!({ - "query": { - "fields": { - "class_title": { "type": "column", "column": "title", "column_type": "string" }, - "students": { - "type": "relationship", - "query": { - "fields": { - "student_name": { "type": "column", "column": "name", "column_type": "string" }, - }, - }, - "relationship": "students", - }, - }, - }, - "target": {"name": ["classes"], "type": "table"}, - "relationships": [{ - "source_table": ["classes"], - "relationships": { - "students": { - "column_mapping": { "title": "class_title", "year": "year" }, - "relationship_type": "array", - "target": {"name": ["students"], "type": "table"}, - }, - }, - }], - }))?; - - let expected_response = vec![doc! { - "class_title": "MongoDB 101", - "students": { "rows": [ - { "student_name": "Alice" }, - { "student_name": "Bob" }, - ] }, - }]; + let query_request = query_request() + .collection("classes") + .query(query().fields([ + field!("class_title" => "title"), + relation_field!("students" => "students", query().fields([ + field!("student_name" => "name") + ])), + ])) + .relationships([( + "students", + relationship("students", [("title", "class_title"), ("year", "year")]), + )]) + .into(); + + let expected_response = row_set() + .row([ + ("class_title", json!("MongoDB 101")), + ( + "students", + json!({ "rows": [ + { "student_name": "Alice" }, + { "student_name": "Bob" }, + ]}), + ), + ]) + .into_response(); let expected_pipeline = bson!([ { @@ -553,7 +403,7 @@ mod tests { }]), ); - let result = execute_query_request(db, &Default::default(), query_request).await?; + let result = execute_query_request(db, &students_config(), query_request).await?; assert_eq!(expected_response, result); Ok(()) @@ -561,74 +411,49 @@ mod tests { #[tokio::test] async fn makes_recursive_lookups_for_nested_relations() -> Result<(), anyhow::Error> { - let query_request: QueryRequest = from_value(json!({ - "query": { - "fields": { - "class_title": { "type": "column", "column": "title", "column_type": "string" }, - "students": { - "type": "relationship", - "relationship": "students", - "query": { - "fields": { - "student_name": { "type": "column", "column": "name", "column_type": "string" }, - "assignments": { - "type": "relationship", - "relationship": "assignments", - "query": { - "fields": { - "assignment_title": { "type": "column", "column": "title", "column_type": "string" }, - }, - }, - }, - }, - }, - "relationship": "students", - }, - }, - }, - "target": {"name": ["classes"], "type": "table"}, - "relationships": [ - { - "source_table": ["classes"], - "relationships": { - "students": { - "column_mapping": { "_id": "class_id" }, - "relationship_type": "array", - "target": {"name": ["students"], "type": "table"}, + let query_request = query_request() + .collection("classes") + .query(query().fields([ + field!("class_title" => "title"), + relation_field!("students" => "students", query().fields([ + field!("student_name" => "name"), + relation_field!("assignments" => "assignments", query().fields([ + field!("assignment_title" => "title") + ])) + ])), + ])) + .relationships([ + ("students", relationship("students", [("_id", "class_id")])), + ( + "assignments", + relationship("assignments", [("_id", "student_id")]), + ), + ]) + .into(); + + let expected_response = row_set() + .row([ + ("class_title", json!("MongoDB 101")), + ( + "students", + json!({ "rows": [ + { + "student_name": "Alice", + "assignments": { "rows": [ + { "assignment_title": "read chapter 2" }, + ]} }, - }, - }, - { - "source_table": ["students"], - "relationships": { - "assignments": { - "column_mapping": { "_id": "student_id" }, - "relationship_type": "array", - "target": {"name": ["assignments"], "type": "table"}, + { + "student_name": "Bob", + "assignments": { "rows": [ + { "assignment_title": "JSON Basics" }, + { "assignment_title": "read chapter 2" }, + ]} }, - }, - } - ], - }))?; - - let expected_response = vec![doc! { - "class_title": "MongoDB 101", - "students": { "rows": [ - { - "student_name": "Alice", - "assignments": { "rows": [ - { "assignment_title": "read chapter 2" }, - ]} - }, - { - "student_name": "Bob", - "assignments": { "rows": [ - { "assignment_title": "JSON Basics" }, - { "assignment_title": "read chapter 2" }, - ]} - }, - ]}, - }]; + ]}), + ), + ]) + .into_response(); let expected_pipeline = bson!([ { @@ -703,7 +528,7 @@ mod tests { }]), ); - let result = execute_query_request(db, &Default::default(), query_request).await?; + let result = execute_query_request(db, &students_config(), query_request).await?; assert_eq!(expected_response, result); Ok(()) @@ -711,40 +536,26 @@ mod tests { #[tokio::test] async fn executes_aggregation_in_relation() -> Result<(), anyhow::Error> { - let query_request: QueryRequest = from_value(json!({ - "query": { - "fields": { - "students_aggregate": { - "type": "relationship", - "query": { - "aggregates": { - "aggregate_count": { "type": "star_count" }, - }, - }, - "relationship": "students", - }, - }, - }, - "table": ["classes"], - "table_relationships": [{ - "source_table": ["classes"], - "relationships": { - "students": { - "column_mapping": { "_id": "classId" }, - "relationship_type": "array", - "target_table": ["students"], - }, - }, - }], - }))?; - - let expected_response = vec![doc! { - "students_aggregate": { - "aggregates": { - "aggregate_count": 2, - }, - }, - }]; + let query_request = query_request() + .collection("classes") + .query(query().fields([ + relation_field!("students_aggregate" => "students", query().aggregates([ + star_count_aggregate!("aggregate_count") + ])), + ])) + .relationships([("students", relationship("students", [("_id", "classId")]))]) + .into(); + + let expected_response = row_set() + .row([( + "students_aggregate", + json!({ + "aggregates": { + "aggregate_count": { "$numberInt": "2" } + } + }), + )]) + .into_response(); let expected_pipeline = bson!([ { @@ -773,13 +584,13 @@ mod tests { }, } ], - "as": "students_aggregate", + "as": "students", }, }, { "$replaceWith": { "students_aggregate": { "$first": { - "$getField": { "$literal": "students_aggregate" } + "$getField": { "$literal": "students" } } } }, }, @@ -797,76 +608,56 @@ mod tests { }]), ); - let result = execute_query_request(db, &Default::default(), query_request).await?; - assert_eq!(expected_response, result); + let result = execute_query_request(db, &students_config(), query_request).await?; + assert_eq!(result, expected_response); Ok(()) } #[tokio::test] async fn filters_by_field_of_related_collection() -> Result<(), anyhow::Error> { - let query_request: QueryRequest = from_value(json!({ - "query": { - "fields": { - "movie": { - "type": "relationship", - "query": { - "fields": { - "title": { "type": "column", "column": "title", "column_type": "string" }, - "year": { "type": "column", "column": "year", "column_type": "int" } - } - }, - "relationship": "movie" - }, - "name": { - "type": "column", - "column": "name", - "column_type": "string" - } - }, - "limit": 50, - "where": { - "type": "exists", - "in_table": { "type": "related", "relationship": "movie" }, - "where": { - "type": "binary_op", - "column": { "column_type": "string", "name": "title" }, - "operator": "equal", - "value": { "type": "scalar", "value": "The Land Beyond the Sunset", "value_type": "string" } - } - } - }, - "target": { - "type": "table", - "name": [ - "comments" - ] - }, - "relationships": [ - { - "relationships": { - "movie": { - "column_mapping": { - "movie_id": "_id" - }, - "relationship_type": "object", - "target": { "type": "table", "name": [ "movies" ] } - } - }, - "source_table": [ - "comments" - ] - } - ] - }))?; - - let expected_response = vec![doc! { - "name": "Mercedes Tyler", - "movie": { "rows": [{ - "title": "The Land Beyond the Sunset", - "year": 1912 - }] }, - }]; + let query_request = query_request() + .collection("comments") + .query( + query() + .fields([ + relation_field!("movie" => "movie", query().fields([ + field!("title"), + field!("year"), + ])), + field!("name"), + ]) + .limit(50) + .predicate(exists( + ndc_models::ExistsInCollection::Related { + relationship: "movie".into(), + arguments: Default::default(), + }, + binop( + "_eq", + target!("title"), + value!("The Land Beyond the Sunset"), + ), + )), + ) + .relationships([( + "movie", + relationship("movies", [("movie_id", "_id")]).object_type(), + )]) + .into(); + + let expected_response = row_set() + .row([ + ("name", json!("Mercedes Tyler")), + ( + "movie", + json!({ "rows": [{ + "title": "The Land Beyond the Sunset", + "year": 1912 + }]}), + ), + ]) + .into_response(); let expected_pipeline = bson!([ { @@ -887,8 +678,8 @@ mod tests { }, { "$match": { - "movie.title": { - "$eq": "The Land Beyond the Sunset" + "movie": { + "$elemMatch": { "title": { "$eq": "The Land Beyond the Sunset" } } } } }, @@ -921,144 +712,198 @@ mod tests { }]), ); - let result = execute_query_request(db, &Default::default(), query_request).await?; - assert_eq!(expected_response, result); + let result = execute_query_request(db, &mflix_config(), query_request).await?; + assert_eq!(result, expected_response); Ok(()) } - #[tokio::test] - async fn filters_by_field_nested_in_object_in_related_collection() -> Result<(), anyhow::Error> - { - let query_request: QueryRequest = from_value(json!({ - "query": { - "fields": { - "movie": { - "type": "relationship", - "query": { - "fields": { - "credits": { "type": "object", "column": "credits", "query": { - "fields": { - "director": { "type": "column", "column": "director", "column_type": "string" }, - } - } }, - } - }, - "relationship": "movie" - }, - "name": { - "type": "column", - "column": "name", - "column_type": "string" - } - }, - "limit": 50, - "where": { - "type": "exists", - "in_table": { "type": "related", "relationship": "movie" }, - "where": { - "type": "binary_op", - "column": { "column_type": "string", "name": ["credits", "director"] }, - "operator": "equal", - "value": { "type": "scalar", "value": "Martin Scorsese", "value_type": "string" } - } - } - }, - "target": { - "type": "table", - "name": [ - "comments" + // TODO: This test requires updated ndc_models that add `field_path` to + // [ndc::ComparisonTarget::Column] + // #[tokio::test] + // async fn filters_by_field_nested_in_object_in_related_collection() -> Result<(), anyhow::Error> + // { + // let query_request = query_request() + // .collection("comments") + // .query( + // query() + // .fields([relation_field!("movie" => "movie", query().fields([ + // field!("credits" => "credits", object!([ + // field!("director"), + // ])), + // ]))]) + // .limit(50) + // .predicate(exists( + // ndc_models::ExistsInCollection::Related { + // relationship: "movie".into(), + // arguments: Default::default(), + // }, + // binop( + // "_eq", + // target!("credits", field_path: ["director"]), + // value!("Martin Scorsese"), + // ), + // )), + // ) + // .relationships([("movie", relationship("movies", [("movie_id", "_id")]))]) + // .into(); + // + // let expected_response = row_set() + // .row([ + // ("name", "Beric Dondarrion"), + // ( + // "movie", + // json!({ "rows": [{ + // "credits": { + // "director": "Martin Scorsese", + // } + // }]}), + // ), + // ]) + // .into(); + // + // let expected_pipeline = bson!([ + // { + // "$lookup": { + // "from": "movies", + // "localField": "movie_id", + // "foreignField": "_id", + // "pipeline": [ + // { + // "$replaceWith": { + // "credits": { + // "$cond": { + // "if": "$credits", + // "then": { "director": { "$ifNull": ["$credits.director", null] } }, + // "else": null, + // } + // }, + // } + // } + // ], + // "as": "movie" + // } + // }, + // { + // "$match": { + // "movie.credits.director": { + // "$eq": "Martin Scorsese" + // } + // } + // }, + // { + // "$limit": Bson::Int64(50), + // }, + // { + // "$replaceWith": { + // "name": { "$ifNull": ["$name", null] }, + // "movie": { + // "rows": { + // "$getField": { + // "$literal": "movie" + // } + // } + // }, + // } + // }, + // ]); + // + // let db = mock_collection_aggregate_response_for_pipeline( + // "comments", + // expected_pipeline, + // bson!([{ + // "name": "Beric Dondarrion", + // "movie": { "rows": [{ + // "credits": { + // "director": "Martin Scorsese" + // } + // }] }, + // }]), + // ); + // + // let result = execute_query_request(db, &mflix_config(), query_request).await?; + // assert_eq!(expected_response, result); + // + // Ok(()) + // } + + fn students_config() -> MongoConfiguration { + MongoConfiguration(Configuration { + collections: [ + collection("assignments"), + collection("classes"), + collection("students"), ] - }, - "relationships": [ - { - "relationships": { - "movie": { - "column_mapping": { - "movie_id": "_id" - }, - "relationship_type": "object", - "target": { "type": "table", "name": [ "movies" ] } - } - }, - "source_table": [ - "comments" - ] - } - ] - }))?; - - let expected_response = vec![doc! { - "name": "Beric Dondarrion", - "movie": { "rows": [{ - "credits": { - "director": "Martin Scorsese", - } - }] }, - }]; - - let expected_pipeline = bson!([ - { - "$lookup": { - "from": "movies", - "localField": "movie_id", - "foreignField": "_id", - "pipeline": [ - { - "$replaceWith": { - "credits": { - "$cond": { - "if": "$credits", - "then": { "director": { "$ifNull": ["$credits.director", null] } }, - "else": null, - } - }, - } - } - ], - "as": "movie" - } - }, - { - "$match": { - "movie.credits.director": { - "$eq": "Martin Scorsese" - } - } - }, - { - "$limit": Bson::Int64(50), - }, - { - "$replaceWith": { - "name": { "$ifNull": ["$name", null] }, - "movie": { - "rows": { - "$getField": { - "$literal": "movie" - } - } - }, - } - }, - ]); - - let db = mock_collection_aggregate_response_for_pipeline( - "comments", - expected_pipeline, - bson!([{ - "name": "Beric Dondarrion", - "movie": { "rows": [{ - "credits": { - "director": "Martin Scorsese" - } - }] }, - }]), - ); - - let result = execute_query_request(db, &Default::default(), query_request).await?; - assert_eq!(expected_response, result); + .into(), + object_types: [ + ( + "assignments".into(), + object_type([ + ("_id", named_type("ObjectId")), + ("student_id", named_type("ObjectId")), + ("title", named_type("String")), + ]), + ), + ( + "classes".into(), + object_type([ + ("_id", named_type("ObjectId")), + ("title", named_type("String")), + ("year", named_type("Int")), + ]), + ), + ( + "students".into(), + object_type([ + ("_id", named_type("ObjectId")), + ("classId", named_type("ObjectId")), + ("gpa", named_type("Double")), + ("name", named_type("String")), + ("year", named_type("Int")), + ]), + ), + ] + .into(), + functions: Default::default(), + procedures: Default::default(), + native_mutations: Default::default(), + native_queries: Default::default(), + options: Default::default(), + }) + } - Ok(()) + fn mflix_config() -> MongoConfiguration { + MongoConfiguration(Configuration { + collections: [collection("comments"), collection("movies")].into(), + object_types: [ + ( + "comments".into(), + object_type([ + ("_id", named_type("ObjectId")), + ("movie_id", named_type("ObjectId")), + ("name", named_type("String")), + ]), + ), + ( + "credits".into(), + object_type([("director", named_type("String"))]), + ), + ( + "movies".into(), + object_type([ + ("_id", named_type("ObjectId")), + ("credits", named_type("credits")), + ("title", named_type("String")), + ("year", named_type("Int")), + ]), + ), + ] + .into(), + functions: Default::default(), + procedures: Default::default(), + native_mutations: Default::default(), + native_queries: Default::default(), + options: Default::default(), + }) } } diff --git a/crates/mongodb-agent-common/src/query/response.rs b/crates/mongodb-agent-common/src/query/response.rs new file mode 100644 index 00000000..3149b7b1 --- /dev/null +++ b/crates/mongodb-agent-common/src/query/response.rs @@ -0,0 +1,657 @@ +use std::collections::BTreeMap; + +use configuration::MongoScalarType; +use indexmap::IndexMap; +use itertools::Itertools; +use mongodb::bson::{self, Bson}; +use ndc_models::{QueryResponse, RowFieldValue, RowSet}; +use serde::Deserialize; +use thiserror::Error; +use tracing::instrument; + +use crate::{ + mongo_query_plan::{ + Aggregate, Field, NestedArray, NestedField, NestedObject, ObjectType, Query, QueryPlan, + Type, + }, + query::serialization::{bson_to_json, BsonToJsonError}, +}; + +use super::serialization::is_nullable; + +#[derive(Debug, Error)] +pub enum QueryResponseError { + #[error("expected aggregates to be an object at path {}", path.join("."))] + AggregatesNotObject { path: Vec }, + + #[error("{0}")] + BsonDeserialization(#[from] bson::de::Error), + + #[error("{0}")] + BsonToJson(#[from] BsonToJsonError), + + #[error("expected a single response document from MongoDB, but did not get one")] + ExpectedSingleDocument, + + #[error("a query field referenced a relationship, but no fields from the relationship were selected")] + NoFieldsSelected { path: Vec }, +} + +type Result = std::result::Result; + +// These structs describe possible shapes of data returned by MongoDB query plans + +#[derive(Debug, Deserialize)] +struct ResponseForVariableSetsRowsOnly { + row_sets: Vec>, +} + +#[derive(Debug, Deserialize)] +struct ResponseForVariableSetsAggregates { + row_sets: Vec, +} + +#[derive(Debug, Deserialize)] +struct BsonRowSet { + #[serde(default)] + aggregates: Bson, + #[serde(default)] + rows: Vec, +} + +#[instrument(name = "Serialize Query Response", skip_all, fields(internal.visibility = "user"))] +pub fn serialize_query_response( + query_plan: &QueryPlan, + response_documents: Vec, +) -> Result { + let collection_name = &query_plan.collection; + + // If the query request specified variable sets then we should have gotten a single document + // from MongoDB with fields for multiple sets of results - one for each set of variables. + let row_sets = if query_plan.has_variables() && query_plan.query.has_aggregates() { + let responses: ResponseForVariableSetsAggregates = + parse_single_document(response_documents)?; + responses + .row_sets + .into_iter() + .map(|row_set| { + serialize_row_set_with_aggregates(&[collection_name], &query_plan.query, row_set) + }) + .try_collect() + } else if query_plan.variables.is_some() { + let responses: ResponseForVariableSetsRowsOnly = parse_single_document(response_documents)?; + responses + .row_sets + .into_iter() + .map(|row_set| { + serialize_row_set_rows_only(&[collection_name], &query_plan.query, row_set) + }) + .try_collect() + } else if query_plan.query.has_aggregates() { + let row_set = parse_single_document(response_documents)?; + Ok(vec![serialize_row_set_with_aggregates( + &[], + &query_plan.query, + row_set, + )?]) + } else { + Ok(vec![serialize_row_set_rows_only( + &[], + &query_plan.query, + response_documents, + )?]) + }?; + let response = QueryResponse(row_sets); + tracing::debug!(query_response = %serde_json::to_string(&response).unwrap()); + Ok(response) +} + +// When there are no aggregates we expect a list of rows +fn serialize_row_set_rows_only( + path: &[&str], + query: &Query, + docs: Vec, +) -> Result { + let rows = query + .fields + .as_ref() + .map(|fields| serialize_rows(path, fields, docs)) + .transpose()?; + + Ok(RowSet { + aggregates: None, + rows, + }) +} + +// When there are aggregates we expect a single document with `rows` and `aggregates` +// fields +fn serialize_row_set_with_aggregates( + path: &[&str], + query: &Query, + row_set: BsonRowSet, +) -> Result { + let aggregates = query + .aggregates + .as_ref() + .map(|aggregates| serialize_aggregates(path, aggregates, row_set.aggregates)) + .transpose()?; + + let rows = query + .fields + .as_ref() + .map(|fields| serialize_rows(path, fields, row_set.rows)) + .transpose()?; + + Ok(RowSet { aggregates, rows }) +} + +fn serialize_aggregates( + path: &[&str], + _query_aggregates: &IndexMap, + value: Bson, +) -> Result> { + let aggregates_type = type_for_aggregates()?; + let json = bson_to_json(&aggregates_type, value)?; + + // The NDC type uses an IndexMap for aggregate values; we need to convert the map + // underlying the Value::Object value to an IndexMap + let aggregate_values = match json { + serde_json::Value::Object(obj) => obj.into_iter().collect(), + _ => Err(QueryResponseError::AggregatesNotObject { + path: path_to_owned(path), + })?, + }; + Ok(aggregate_values) +} + +fn serialize_rows( + path: &[&str], + query_fields: &IndexMap, + docs: Vec, +) -> Result>> { + let row_type = type_for_row(path, query_fields)?; + + docs.into_iter() + .map(|doc| { + let json = bson_to_json(&row_type, doc.into())?; + // The NDC types use an IndexMap for each row value; we need to convert the map + // underlying the Value::Object value to an IndexMap + let index_map = match json { + serde_json::Value::Object(obj) => obj + .into_iter() + .map(|(key, value)| (key, RowFieldValue(value))) + .collect(), + _ => unreachable!(), + }; + Ok(index_map) + }) + .try_collect() +} + +fn type_for_row_set( + path: &[&str], + aggregates: &Option>, + fields: &Option>, +) -> Result { + let mut type_fields = BTreeMap::new(); + + if aggregates.is_some() { + type_fields.insert("aggregates".to_owned(), type_for_aggregates()?); + } + + if let Some(query_fields) = fields { + let row_type = type_for_row(path, query_fields)?; + type_fields.insert("rows".to_owned(), Type::ArrayOf(Box::new(row_type))); + } + + Ok(Type::Object(ObjectType { + fields: type_fields, + name: None, + })) +} + +// TODO: infer response type for aggregates MDB-130 +fn type_for_aggregates() -> Result { + Ok(Type::Scalar(MongoScalarType::ExtendedJSON)) +} + +fn type_for_row(path: &[&str], query_fields: &IndexMap) -> Result { + let fields = query_fields + .iter() + .map(|(field_name, field_definition)| { + let field_type = type_for_field( + &append_to_path(path, [field_name.as_ref()]), + field_definition, + )?; + Ok((field_name.clone(), field_type)) + }) + .try_collect::<_, _, QueryResponseError>()?; + Ok(Type::Object(ObjectType { fields, name: None })) +} + +fn type_for_field(path: &[&str], field_definition: &Field) -> Result { + let field_type: Type = match field_definition { + Field::Column { + column_type, + fields: None, + .. + } => column_type.clone(), + Field::Column { + column_type, + fields: Some(nested_field), + .. + } => type_for_nested_field(path, column_type, nested_field)?, + Field::Relationship { + aggregates, fields, .. + } => type_for_row_set(path, aggregates, fields)?, + }; + Ok(field_type) +} + +pub fn type_for_nested_field( + path: &[&str], + parent_type: &Type, + nested_field: &NestedField, +) -> Result { + let field_type = match nested_field { + ndc_query_plan::NestedField::Object(NestedObject { fields }) => { + let t = type_for_row(path, fields)?; + if is_nullable(parent_type) { + t.into_nullable() + } else { + t + } + } + ndc_query_plan::NestedField::Array(NestedArray { + fields: nested_field, + }) => { + let element_type = type_for_nested_field( + &append_to_path(path, ["[]"]), + element_type(parent_type), + nested_field, + )?; + let t = Type::ArrayOf(Box::new(element_type)); + if is_nullable(parent_type) { + t.into_nullable() + } else { + t + } + } + }; + Ok(field_type) +} + +/// Get type for elements within an array type. Be permissive if the given type is not an array. +fn element_type(probably_array_type: &Type) -> &Type { + match probably_array_type { + Type::Nullable(pt) => element_type(pt), + Type::ArrayOf(pt) => pt, + pt => pt, + } +} + +fn parse_single_document(documents: Vec) -> Result +where + T: for<'de> serde::Deserialize<'de>, +{ + let document = documents + .into_iter() + .next() + .ok_or(QueryResponseError::ExpectedSingleDocument)?; + let value = bson::from_document(document)?; + Ok(value) +} + +fn append_to_path<'a>(path: &[&'a str], elems: impl IntoIterator) -> Vec<&'a str> { + path.iter().copied().chain(elems).collect() +} + +fn path_to_owned(path: &[&str]) -> Vec { + path.iter().map(|x| (*x).to_owned()).collect() +} + +#[cfg(test)] +mod tests { + use std::str::FromStr; + + use configuration::{Configuration, MongoScalarType}; + use mongodb::bson::{self, Bson}; + use mongodb_support::BsonScalarType; + use ndc_models::{QueryRequest, QueryResponse, RowFieldValue, RowSet}; + use ndc_query_plan::plan_for_query_request; + use ndc_test_helpers::{ + array, collection, field, named_type, object, object_type, query, query_request, + relation_field, relationship, + }; + use pretty_assertions::assert_eq; + use serde_json::json; + + use crate::{ + mongo_query_plan::{MongoConfiguration, ObjectType, Type}, + test_helpers::make_nested_schema, + }; + + use super::{serialize_query_response, type_for_row_set}; + + #[test] + fn serializes_response_with_nested_fields() -> anyhow::Result<()> { + let request = query_request() + .collection("authors") + .query(query().fields([field!("address" => "address", object!([ + field!("street"), + field!("geocode" => "geocode", object!([ + field!("longitude"), + ])), + ]))])) + .into(); + let query_plan = plan_for_query_request(&make_nested_schema(), request)?; + + let response_documents = vec![bson::doc! { + "address": { + "street": "137 Maple Dr", + "geocode": { + "longitude": 122.4194, + }, + }, + }]; + + let response = serialize_query_response(&query_plan, response_documents)?; + assert_eq!( + response, + QueryResponse(vec![RowSet { + aggregates: Default::default(), + rows: Some(vec![[( + "address".into(), + RowFieldValue(json!({ + "street": "137 Maple Dr", + "geocode": { + "longitude": 122.4194, + }, + })) + )] + .into()]), + }]) + ); + Ok(()) + } + + #[test] + fn serializes_response_with_nested_object_inside_array() -> anyhow::Result<()> { + let request = query_request() + .collection("authors") + .query(query().fields([field!("articles" => "articles", array!( + object!([ + field!("title"), + ]) + ))])) + .into(); + let query_plan = plan_for_query_request(&make_nested_schema(), request)?; + + let response_documents = vec![bson::doc! { + "articles": [ + { "title": "Modeling MongoDB with relational model" }, + { "title": "NoSQL databases: MongoDB vs cassandra" }, + ], + }]; + + let response = serialize_query_response(&query_plan, response_documents)?; + assert_eq!( + response, + QueryResponse(vec![RowSet { + aggregates: Default::default(), + rows: Some(vec![[( + "articles".into(), + RowFieldValue(json!([ + { "title": "Modeling MongoDB with relational model" }, + { "title": "NoSQL databases: MongoDB vs cassandra" }, + ])) + )] + .into()]), + }]) + ); + Ok(()) + } + + #[test] + fn serializes_response_with_aliased_fields() -> anyhow::Result<()> { + let request = query_request() + .collection("authors") + .query(query().fields([ + field!("address1" => "address", object!([ + field!("line1" => "street"), + ])), + field!("address2" => "address", object!([ + field!("latlong" => "geocode", object!([ + field!("long" => "longitude"), + ])), + ])), + ])) + .into(); + let query_plan = plan_for_query_request(&make_nested_schema(), request)?; + + let response_documents = vec![bson::doc! { + "address1": { + "line1": "137 Maple Dr", + }, + "address2": { + "latlong": { + "long": 122.4194, + }, + }, + }]; + + let response = serialize_query_response(&query_plan, response_documents)?; + assert_eq!( + response, + QueryResponse(vec![RowSet { + aggregates: Default::default(), + rows: Some(vec![[ + ( + "address1".into(), + RowFieldValue(json!({ + "line1": "137 Maple Dr", + })) + ), + ( + "address2".into(), + RowFieldValue(json!({ + "latlong": { + "long": 122.4194, + }, + })) + ) + ] + .into()]), + }]) + ); + Ok(()) + } + + #[test] + fn serializes_response_with_decimal_128_fields() -> anyhow::Result<()> { + let query_context = MongoConfiguration(Configuration { + collections: [collection("business")].into(), + object_types: [( + "business".into(), + object_type([ + ("price", named_type("Decimal")), + ("price_extjson", named_type("ExtendedJSON")), + ]), + )] + .into(), + functions: Default::default(), + procedures: Default::default(), + native_mutations: Default::default(), + native_queries: Default::default(), + options: Default::default(), + }); + + let request = query_request() + .collection("business") + .query(query().fields([field!("price"), field!("price_extjson")])) + .into(); + + let query_plan = plan_for_query_request(&query_context, request)?; + + let response_documents = vec![bson::doc! { + "price": Bson::Decimal128(bson::Decimal128::from_str("127.6486654").unwrap()), + "price_extjson": Bson::Decimal128(bson::Decimal128::from_str("-4.9999999999").unwrap()), + }]; + + let response = serialize_query_response(&query_plan, response_documents)?; + assert_eq!( + response, + QueryResponse(vec![RowSet { + aggregates: Default::default(), + rows: Some(vec![[ + ("price".into(), RowFieldValue(json!("127.6486654"))), + ( + "price_extjson".into(), + RowFieldValue(json!({ + "$numberDecimal": "-4.9999999999" + })) + ), + ] + .into()]), + }]) + ); + Ok(()) + } + + #[test] + fn serializes_response_with_nested_extjson() -> anyhow::Result<()> { + let query_context = MongoConfiguration(Configuration { + collections: [collection("data")].into(), + object_types: [( + "data".into(), + object_type([("value", named_type("ExtendedJSON"))]), + )] + .into(), + functions: Default::default(), + procedures: Default::default(), + native_mutations: Default::default(), + native_queries: Default::default(), + options: Default::default(), + }); + + let request = query_request() + .collection("data") + .query(query().fields([field!("value")])) + .into(); + + let query_plan = plan_for_query_request(&query_context, request)?; + + let response_documents = vec![bson::doc! { + "value": { + "array": [ + { "number": Bson::Int32(3) }, + { "number": Bson::Decimal128(bson::Decimal128::from_str("127.6486654").unwrap()) }, + ], + "string": "hello", + "object": { + "foo": 1, + "bar": 2, + }, + }, + }]; + + let response = serialize_query_response(&query_plan, response_documents)?; + assert_eq!( + response, + QueryResponse(vec![RowSet { + aggregates: Default::default(), + rows: Some(vec![[( + "value".into(), + RowFieldValue(json!({ + "array": [ + { "number": { "$numberInt": "3" } }, + { "number": { "$numberDecimal": "127.6486654" } }, + ], + "string": "hello", + "object": { + "foo": { "$numberInt": "1" }, + "bar": { "$numberInt": "2" }, + }, + })) + )] + .into()]), + }]) + ); + Ok(()) + } + + #[test] + fn uses_field_path_to_guarantee_distinct_type_names() -> anyhow::Result<()> { + let collection_name = "appearances"; + let request: QueryRequest = query_request() + .collection(collection_name) + .relationships([("author", relationship("authors", [("authorId", "id")]))]) + .query( + query().fields([relation_field!("presenter" => "author", query().fields([ + field!("addr" => "address", object!([ + field!("street"), + field!("geocode" => "geocode", object!([ + field!("latitude"), + field!("long" => "longitude"), + ])) + ])), + field!("articles" => "articles", array!(object!([ + field!("article_title" => "title") + ]))), + ]))]), + ) + .into(); + let query_plan = plan_for_query_request(&make_nested_schema(), request)?; + let path = [collection_name]; + + let row_set_type = type_for_row_set( + &path, + &query_plan.query.aggregates, + &query_plan.query.fields, + )?; + + let expected = Type::Object(ObjectType { + name: None, + fields: [ + ("rows".into(), Type::ArrayOf(Box::new(Type::Object(ObjectType { + name: None, + fields: [ + ("presenter".into(), Type::Object(ObjectType { + name: None, + fields: [ + ("rows".into(), Type::ArrayOf(Box::new(Type::Object(ObjectType { + name: None, + fields: [ + ("addr".into(), Type::Object(ObjectType { + name: None, + fields: [ + ("geocode".into(), Type::Nullable(Box::new(Type::Object(ObjectType { + name: None, + fields: [ + ("latitude".into(), Type::Scalar(MongoScalarType::Bson(BsonScalarType::Double))), + ("long".into(), Type::Scalar(MongoScalarType::Bson(BsonScalarType::Double))), + ].into(), + })))), + ("street".into(), Type::Scalar(MongoScalarType::Bson(BsonScalarType::String))), + ].into(), + })), + ("articles".into(), Type::ArrayOf(Box::new(Type::Object(ObjectType { + name: None, + fields: [ + ("article_title".into(), Type::Scalar(MongoScalarType::Bson(BsonScalarType::String))), + ].into(), + })))), + ].into(), + })))) + ].into(), + })) + ].into() + })))) + ].into(), + }); + + assert_eq!(row_set_type, expected); + Ok(()) + } +} diff --git a/crates/mongodb-agent-common/src/query/serialization/bson_to_json.rs b/crates/mongodb-agent-common/src/query/serialization/bson_to_json.rs index 2d4adbc9..8c5c8499 100644 --- a/crates/mongodb-agent-common/src/query/serialization/bson_to_json.rs +++ b/crates/mongodb-agent-common/src/query/serialization/bson_to_json.rs @@ -1,9 +1,4 @@ -use std::collections::BTreeMap; - -use configuration::{ - schema::{ObjectField, ObjectType, Type}, - WithNameRef, -}; +use configuration::MongoScalarType; use itertools::Itertools as _; use mongodb::bson::{self, Bson}; use mongodb_support::BsonScalarType; @@ -11,7 +6,9 @@ use serde_json::{to_value, Number, Value}; use thiserror::Error; use time::{format_description::well_known::Iso8601, OffsetDateTime}; -use super::json_formats; +use crate::mongo_query_plan::{ObjectType, Type}; + +use super::{is_nullable, json_formats}; #[derive(Debug, Error)] pub enum BsonToJsonError { @@ -21,7 +18,7 @@ pub enum BsonToJsonError { #[error("error converting 64-bit floating point number from BSON to JSON: {0}")] DoubleConversion(f64), - #[error("input object of type \"{0:?}\" is missing a field, \"{1}\"")] + #[error("input object of type {0:?} is missing a field, \"{1}\"")] MissingObjectField(Type, String), #[error("error converting value to JSON: {0}")] @@ -44,22 +41,17 @@ type Result = std::result::Result; /// disambiguate types on the BSON side. We don't want those tags because we communicate type /// information out of band. That is except for the `Type::ExtendedJSON` type where we do want to emit /// Extended JSON because we don't have out-of-band information in that case. -pub fn bson_to_json( - expected_type: &Type, - object_types: &BTreeMap, - value: Bson, -) -> Result { +pub fn bson_to_json(expected_type: &Type, value: Bson) -> Result { match expected_type { - Type::ExtendedJSON => Ok(value.into_canonical_extjson()), - Type::Scalar(scalar_type) => bson_scalar_to_json(*scalar_type, value), - Type::Object(object_type_name) => { - let object_type = object_types - .get(object_type_name) - .ok_or_else(|| BsonToJsonError::UnknownObjectType(object_type_name.to_owned()))?; - convert_object(object_type_name, object_type, object_types, value) + Type::Scalar(configuration::MongoScalarType::ExtendedJSON) => { + Ok(value.into_canonical_extjson()) + } + Type::Scalar(MongoScalarType::Bson(scalar_type)) => { + bson_scalar_to_json(*scalar_type, value) } - Type::ArrayOf(element_type) => convert_array(element_type, object_types, value), - Type::Nullable(t) => convert_nullable(t, object_types, value), + Type::Object(object_type) => convert_object(object_type, value), + Type::ArrayOf(element_type) => convert_array(element_type, value), + Type::Nullable(t) => convert_nullable(t, value), } } @@ -95,17 +87,13 @@ fn bson_scalar_to_json(expected_type: BsonScalarType, value: Bson) -> Result Ok(Value::String(oid.to_hex())), (BsonScalarType::DbPointer, v) => Ok(v.into_canonical_extjson()), (_, v) => Err(BsonToJsonError::TypeMismatch( - Type::Scalar(expected_type), + Type::Scalar(MongoScalarType::Bson(expected_type)), v, )), } } -fn convert_array( - element_type: &Type, - object_types: &BTreeMap, - value: Bson, -) -> Result { +fn convert_array(element_type: &Type, value: Bson) -> Result { let values = match value { Bson::Array(values) => Ok(values), _ => Err(BsonToJsonError::TypeMismatch( @@ -115,21 +103,16 @@ fn convert_array( }?; let json_array = values .into_iter() - .map(|value| bson_to_json(element_type, object_types, value)) + .map(|value| bson_to_json(element_type, value)) .try_collect()?; Ok(Value::Array(json_array)) } -fn convert_object( - object_type_name: &str, - object_type: &ObjectType, - object_types: &BTreeMap, - value: Bson, -) -> Result { +fn convert_object(object_type: &ObjectType, value: Bson) -> Result { let input_doc = match value { Bson::Document(fields) => Ok(fields), _ => Err(BsonToJsonError::TypeMismatch( - Type::Object(object_type_name.to_owned()), + Type::Object(object_type.to_owned()), value, )), }?; @@ -137,13 +120,13 @@ fn convert_object( .named_fields() .filter_map(|field| { let field_value_result = - get_object_field_value(object_type_name, field.clone(), &input_doc).transpose()?; + get_object_field_value(object_type, field, &input_doc).transpose()?; Some((field, field_value_result)) }) - .map(|(field, field_value_result)| { + .map(|((field_name, field_type), field_value_result)| { Ok(( - field.name.to_owned(), - bson_to_json(&field.value.r#type, object_types, field_value_result?)?, + field_name.to_owned(), + bson_to_json(field_type, field_value_result?)?, )) }) .try_collect::<_, _, BsonToJsonError>()?; @@ -154,30 +137,26 @@ fn convert_object( // missing, and the field is nullable. Returns `Err` if the value is missing and the field is *not* // nullable. fn get_object_field_value( - object_type_name: &str, - field: WithNameRef<'_, ObjectField>, + object_type: &ObjectType, + (field_name, field_type): (&str, &Type), doc: &bson::Document, ) -> Result> { - let value = doc.get(field.name); - if value.is_none() && field.value.r#type.is_nullable() { + let value = doc.get(field_name); + if value.is_none() && is_nullable(field_type) { return Ok(None); } Ok(Some(value.cloned().ok_or_else(|| { BsonToJsonError::MissingObjectField( - Type::Object(object_type_name.to_owned()), - field.name.to_owned(), + Type::Object(object_type.clone()), + field_name.to_owned(), ) })?)) } -fn convert_nullable( - underlying_type: &Type, - object_types: &BTreeMap, - value: Bson, -) -> Result { +fn convert_nullable(underlying_type: &Type, value: Bson) -> Result { match value { Bson::Null => Ok(Value::Null), - non_null_value => bson_to_json(underlying_type, object_types, non_null_value), + non_null_value => bson_to_json(underlying_type, non_null_value), } } @@ -218,7 +197,7 @@ fn convert_small_number(expected_type: BsonScalarType, value: Bson) -> Result Ok(Value::Number(n.into())), _ => Err(BsonToJsonError::TypeMismatch( - Type::Scalar(expected_type), + Type::Scalar(MongoScalarType::Bson(expected_type)), value, )), } @@ -237,8 +216,7 @@ mod tests { fn serializes_object_id_to_string() -> anyhow::Result<()> { let expected_string = "573a1390f29313caabcd446f"; let json = bson_to_json( - &Type::Scalar(BsonScalarType::ObjectId), - &Default::default(), + &Type::Scalar(MongoScalarType::Bson(BsonScalarType::ObjectId)), Bson::ObjectId(FromStr::from_str(expected_string)?), )?; assert_eq!(json, Value::String(expected_string.to_owned())); @@ -247,24 +225,18 @@ mod tests { #[test] fn serializes_document_with_missing_nullable_field() -> anyhow::Result<()> { - let expected_type = Type::Object("test_object".to_owned()); - let object_types = [( - "test_object".to_owned(), - ObjectType { - fields: [( - "field".to_owned(), - ObjectField { - r#type: Type::Nullable(Box::new(Type::Scalar(BsonScalarType::String))), - description: None, - }, - )] - .into(), - description: None, - }, - )] - .into(); + let expected_type = Type::Object(ObjectType { + name: Some("test_object".into()), + fields: [( + "field".to_owned(), + Type::Nullable(Box::new(Type::Scalar(MongoScalarType::Bson( + BsonScalarType::String, + )))), + )] + .into(), + }); let value = bson::doc! {}; - let actual = bson_to_json(&expected_type, &object_types, value.into())?; + let actual = bson_to_json(&expected_type, value.into())?; assert_eq!(actual, json!({})); Ok(()) } diff --git a/crates/mongodb-agent-common/src/query/serialization/helpers.rs b/crates/mongodb-agent-common/src/query/serialization/helpers.rs new file mode 100644 index 00000000..51deebd5 --- /dev/null +++ b/crates/mongodb-agent-common/src/query/serialization/helpers.rs @@ -0,0 +1,13 @@ +use configuration::MongoScalarType; +use mongodb_support::BsonScalarType; +use ndc_query_plan::Type; + +pub fn is_nullable(t: &Type) -> bool { + matches!( + t, + Type::Nullable(_) + | Type::Scalar( + MongoScalarType::Bson(BsonScalarType::Null) | MongoScalarType::ExtendedJSON + ) + ) +} diff --git a/crates/mongodb-agent-common/src/query/serialization/json_to_bson.rs b/crates/mongodb-agent-common/src/query/serialization/json_to_bson.rs index 808b2f70..ac6dad86 100644 --- a/crates/mongodb-agent-common/src/query/serialization/json_to_bson.rs +++ b/crates/mongodb-agent-common/src/query/serialization/json_to_bson.rs @@ -1,9 +1,6 @@ use std::{collections::BTreeMap, num::ParseIntError, str::FromStr}; -use configuration::{ - schema::{ObjectField, ObjectType, Type}, - WithNameRef, -}; +use configuration::MongoScalarType; use itertools::Itertools as _; use mongodb::bson::{self, Bson, Decimal128}; use mongodb_support::BsonScalarType; @@ -12,7 +9,9 @@ use serde_json::Value; use thiserror::Error; use time::{format_description::well_known::Iso8601, OffsetDateTime}; -use super::json_formats; +use crate::mongo_query_plan::{ObjectType, Type}; + +use super::{helpers::is_nullable, json_formats}; #[derive(Debug, Error)] pub enum JsonToBsonError { @@ -55,24 +54,15 @@ type Result = std::result::Result; /// The BSON library already has a `Deserialize` impl that can convert from JSON. But that /// implementation cannot take advantage of the type information that we have available. Instead it /// uses Extended JSON which uses tags in JSON data to distinguish BSON types. -pub fn json_to_bson( - expected_type: &Type, - object_types: &BTreeMap, - value: Value, -) -> Result { +pub fn json_to_bson(expected_type: &Type, value: Value) -> Result { match expected_type { - Type::ExtendedJSON => { + Type::Scalar(MongoScalarType::ExtendedJSON) => { serde_json::from_value::(value).map_err(JsonToBsonError::SerdeError) } - Type::Scalar(t) => json_to_bson_scalar(*t, value), - Type::Object(object_type_name) => { - let object_type = object_types - .get(object_type_name) - .ok_or_else(|| JsonToBsonError::UnknownObjectType(object_type_name.to_owned()))?; - convert_object(object_type_name, object_type, object_types, value) - } - Type::ArrayOf(element_type) => convert_array(element_type, object_types, value), - Type::Nullable(t) => convert_nullable(t, object_types, value), + Type::Scalar(MongoScalarType::Bson(t)) => json_to_bson_scalar(*t, value), + Type::Object(object_type) => convert_object(object_type, value), + Type::ArrayOf(element_type) => convert_array(element_type, value), + Type::Nullable(t) => convert_nullable(t, value), } } @@ -85,7 +75,7 @@ pub fn json_to_bson_scalar(expected_type: BsonScalarType, value: Value) -> Resul BsonScalarType::Decimal => Bson::Decimal128( Decimal128::from_str(&from_string(expected_type, value.clone())?).map_err(|err| { JsonToBsonError::ConversionErrorWithContext( - Type::Scalar(expected_type), + Type::Scalar(MongoScalarType::Bson(expected_type)), value, err.into(), ) @@ -126,38 +116,28 @@ pub fn json_to_bson_scalar(expected_type: BsonScalarType, value: Value) -> Resul Ok(result) } -fn convert_array( - element_type: &Type, - object_types: &BTreeMap, - value: Value, -) -> Result { +fn convert_array(element_type: &Type, value: Value) -> Result { let input_elements: Vec = serde_json::from_value(value)?; let bson_array = input_elements .into_iter() - .map(|v| json_to_bson(element_type, object_types, v)) + .map(|v| json_to_bson(element_type, v)) .try_collect()?; Ok(Bson::Array(bson_array)) } -fn convert_object( - object_type_name: &str, - object_type: &ObjectType, - object_types: &BTreeMap, - value: Value, -) -> Result { +fn convert_object(object_type: &ObjectType, value: Value) -> Result { let input_fields: BTreeMap = serde_json::from_value(value)?; let bson_doc: bson::Document = object_type .named_fields() - .filter_map(|field| { + .filter_map(|(name, field_type)| { let field_value_result = - get_object_field_value(object_type_name, field.clone(), &input_fields) - .transpose()?; - Some((field, field_value_result)) + get_object_field_value(object_type, name, field_type, &input_fields).transpose()?; + Some((name, field_type, field_value_result)) }) - .map(|(field, field_value_result)| { + .map(|(name, field_type, field_value_result)| { Ok(( - field.name.to_owned(), - json_to_bson(&field.value.r#type, object_types, field_value_result?)?, + name.to_owned(), + json_to_bson(field_type, field_value_result?)?, )) }) .try_collect::<_, _, JsonToBsonError>()?; @@ -168,37 +148,34 @@ fn convert_object( // missing, and the field is nullable. Returns `Err` if the value is missing and the field is *not* // nullable. fn get_object_field_value( - object_type_name: &str, - field: WithNameRef<'_, ObjectField>, + object_type: &ObjectType, + field_name: &str, + field_type: &Type, object: &BTreeMap, ) -> Result> { - let value = object.get(field.name); - if value.is_none() && field.value.r#type.is_nullable() { + let value = object.get(field_name); + if value.is_none() && is_nullable(field_type) { return Ok(None); } Ok(Some(value.cloned().ok_or_else(|| { JsonToBsonError::MissingObjectField( - Type::Object(object_type_name.to_owned()), - field.name.to_owned(), + Type::Object(object_type.clone()), + field_name.to_owned(), ) })?)) } -fn convert_nullable( - underlying_type: &Type, - object_types: &BTreeMap, - value: Value, -) -> Result { +fn convert_nullable(underlying_type: &Type, value: Value) -> Result { match value { Value::Null => Ok(Bson::Null), - non_null_value => json_to_bson(underlying_type, object_types, non_null_value), + non_null_value => json_to_bson(underlying_type, non_null_value), } } fn convert_date(value: &str) -> Result { let date = OffsetDateTime::parse(value, &Iso8601::DEFAULT).map_err(|err| { JsonToBsonError::ConversionErrorWithContext( - Type::Scalar(BsonScalarType::Date), + Type::Scalar(MongoScalarType::Bson(BsonScalarType::Date)), Value::String(value.to_owned()), err.into(), ) @@ -220,7 +197,11 @@ where T: DeserializeOwned, { serde_json::from_value::(value.clone()).map_err(|err| { - JsonToBsonError::ConversionErrorWithContext(Type::Scalar(expected_type), value, err.into()) + JsonToBsonError::ConversionErrorWithContext( + Type::Scalar(MongoScalarType::Bson(expected_type)), + value, + err.into(), + ) }) } @@ -228,7 +209,7 @@ fn from_string(expected_type: BsonScalarType, value: Value) -> Result { match value { Value::String(s) => Ok(s), _ => Err(JsonToBsonError::IncompatibleBackingType { - expected_type: Type::Scalar(expected_type), + expected_type: Type::Scalar(MongoScalarType::Bson(expected_type)), expected_backing_type: "String", value, }), @@ -237,52 +218,53 @@ fn from_string(expected_type: BsonScalarType, value: Value) -> Result { fn incompatible_scalar_type(expected_type: BsonScalarType, value: Value) -> Result { Err(JsonToBsonError::IncompatibleType( - Type::Scalar(expected_type), + Type::Scalar(MongoScalarType::Bson(expected_type)), value, )) } #[cfg(test)] mod tests { - use std::{collections::BTreeMap, str::FromStr}; + use std::str::FromStr; - use configuration::schema::{ObjectField, ObjectType, Type}; + use configuration::MongoScalarType; use mongodb::bson::{self, bson, datetime::DateTimeBuilder, Bson}; use mongodb_support::BsonScalarType; use pretty_assertions::assert_eq; use serde_json::json; + use crate::mongo_query_plan::{ObjectType, Type}; + use super::json_to_bson; #[test] #[allow(clippy::approx_constant)] fn deserializes_specialized_scalar_types() -> anyhow::Result<()> { - let object_type_name = "scalar_test".to_owned(); let object_type = ObjectType { - fields: BTreeMap::from([ - ObjectField::new("double", Type::Scalar(BsonScalarType::Double)), - ObjectField::new("int", Type::Scalar(BsonScalarType::Int)), - ObjectField::new("long", Type::Scalar(BsonScalarType::Long)), - ObjectField::new("decimal", Type::Scalar(BsonScalarType::Decimal)), - ObjectField::new("string", Type::Scalar(BsonScalarType::String)), - ObjectField::new("date", Type::Scalar(BsonScalarType::Date)), - ObjectField::new("timestamp", Type::Scalar(BsonScalarType::Timestamp)), - ObjectField::new("binData", Type::Scalar(BsonScalarType::BinData)), - ObjectField::new("objectId", Type::Scalar(BsonScalarType::ObjectId)), - ObjectField::new("bool", Type::Scalar(BsonScalarType::Bool)), - ObjectField::new("null", Type::Scalar(BsonScalarType::Null)), - ObjectField::new("undefined", Type::Scalar(BsonScalarType::Undefined)), - ObjectField::new("regex", Type::Scalar(BsonScalarType::Regex)), - ObjectField::new("javascript", Type::Scalar(BsonScalarType::Javascript)), - ObjectField::new( - "javascriptWithScope", - Type::Scalar(BsonScalarType::JavascriptWithScope), - ), - ObjectField::new("minKey", Type::Scalar(BsonScalarType::MinKey)), - ObjectField::new("maxKey", Type::Scalar(BsonScalarType::MaxKey)), - ObjectField::new("symbol", Type::Scalar(BsonScalarType::Symbol)), - ]), - description: Default::default(), + name: Some("scalar_test".to_owned()), + fields: [ + ("double", BsonScalarType::Double), + ("int", BsonScalarType::Int), + ("long", BsonScalarType::Long), + ("decimal", BsonScalarType::Decimal), + ("string", BsonScalarType::String), + ("date", BsonScalarType::Date), + ("timestamp", BsonScalarType::Timestamp), + ("binData", BsonScalarType::BinData), + ("objectId", BsonScalarType::ObjectId), + ("bool", BsonScalarType::Bool), + ("null", BsonScalarType::Null), + ("undefined", BsonScalarType::Undefined), + ("regex", BsonScalarType::Regex), + ("javascript", BsonScalarType::Javascript), + ("javascriptWithScope", BsonScalarType::JavascriptWithScope), + ("minKey", BsonScalarType::MinKey), + ("maxKey", BsonScalarType::MaxKey), + ("symbol", BsonScalarType::Symbol), + ] + .into_iter() + .map(|(name, t)| (name.to_owned(), Type::Scalar(MongoScalarType::Bson(t)))) + .collect(), }; let input = json!({ @@ -339,13 +321,7 @@ mod tests { "symbol": Bson::Symbol("a_symbol".to_owned()), }; - let actual = json_to_bson( - &Type::Object(object_type_name.clone()), - &[(object_type_name.clone(), object_type)] - .into_iter() - .collect(), - input, - )?; + let actual = json_to_bson(&Type::Object(object_type), input)?; assert_eq!(actual, expected.into()); Ok(()) } @@ -363,8 +339,9 @@ mod tests { Bson::ObjectId(FromStr::from_str("fae1840a2b85872385c67de5")?), ]); let actual = json_to_bson( - &Type::ArrayOf(Box::new(Type::Scalar(BsonScalarType::ObjectId))), - &Default::default(), + &Type::ArrayOf(Box::new(Type::Scalar(MongoScalarType::Bson( + BsonScalarType::ObjectId, + )))), input, )?; assert_eq!(actual, expected); @@ -381,9 +358,8 @@ mod tests { ]); let actual = json_to_bson( &Type::ArrayOf(Box::new(Type::Nullable(Box::new(Type::Scalar( - BsonScalarType::ObjectId, + MongoScalarType::Bson(BsonScalarType::ObjectId), ))))), - &Default::default(), input, )?; assert_eq!(actual, expected); @@ -392,24 +368,18 @@ mod tests { #[test] fn deserializes_object_with_missing_nullable_field() -> anyhow::Result<()> { - let expected_type = Type::Object("test_object".to_owned()); - let object_types = [( - "test_object".to_owned(), - ObjectType { - fields: [( - "field".to_owned(), - ObjectField { - r#type: Type::Nullable(Box::new(Type::Scalar(BsonScalarType::String))), - description: None, - }, - )] - .into(), - description: None, - }, - )] - .into(); + let expected_type = Type::Object(ObjectType { + name: Some("test_object".to_owned()), + fields: [( + "field".to_owned(), + Type::Nullable(Box::new(Type::Scalar(MongoScalarType::Bson( + BsonScalarType::String, + )))), + )] + .into(), + }); let value = json!({}); - let actual = json_to_bson(&expected_type, &object_types, value)?; + let actual = json_to_bson(&expected_type, value)?; assert_eq!(actual, bson!({})); Ok(()) } diff --git a/crates/mongodb-agent-common/src/query/serialization/mod.rs b/crates/mongodb-agent-common/src/query/serialization/mod.rs index be3becd0..ab82bee2 100644 --- a/crates/mongodb-agent-common/src/query/serialization/mod.rs +++ b/crates/mongodb-agent-common/src/query/serialization/mod.rs @@ -1,9 +1,11 @@ mod bson_to_json; +mod helpers; mod json_formats; mod json_to_bson; #[cfg(test)] mod tests; -pub use self::bson_to_json::{bson_to_json, BsonToJsonError}; -pub use self::json_to_bson::{json_to_bson, json_to_bson_scalar, JsonToBsonError}; +pub use bson_to_json::{bson_to_json, BsonToJsonError}; +pub use helpers::is_nullable; +pub use json_to_bson::{json_to_bson, json_to_bson_scalar, JsonToBsonError}; diff --git a/crates/mongodb-agent-common/src/query/serialization/tests.rs b/crates/mongodb-agent-common/src/query/serialization/tests.rs index 79ace254..75395f41 100644 --- a/crates/mongodb-agent-common/src/query/serialization/tests.rs +++ b/crates/mongodb-agent-common/src/query/serialization/tests.rs @@ -1,19 +1,26 @@ -use configuration::schema::Type; +use configuration::MongoScalarType; use mongodb::bson::Bson; use mongodb_cli_plugin::type_from_bson; use mongodb_support::BsonScalarType; +use ndc_query_plan::{self as plan, inline_object_types}; +use plan::QueryContext; use proptest::prelude::*; use test_helpers::arb_bson::{arb_bson, arb_datetime}; +use crate::mongo_query_plan::MongoConfiguration; + use super::{bson_to_json, json_to_bson}; proptest! { #[test] fn converts_bson_to_json_and_back(bson in arb_bson()) { - let (object_types, inferred_type) = type_from_bson("test_object", &bson, false); + let (schema_object_types, inferred_schema_type) = type_from_bson("test_object", &bson, false); + let object_types = schema_object_types.into_iter().map(|(name, t)| (name, t.into())).collect(); + let inferred_type = inline_object_types(&object_types, &inferred_schema_type.into(), MongoConfiguration::lookup_scalar_type)?; let error_context = |msg: &str, source: String| TestCaseError::fail(format!("{msg}: {source}\ninferred type: {inferred_type:?}\nobject types: {object_types:?}")); - let json = bson_to_json(&inferred_type, &object_types, bson.clone()).map_err(|e| error_context("error converting bson to json", e.to_string()))?; - let actual = json_to_bson(&inferred_type, &object_types, json.clone()).map_err(|e| error_context("error converting json to bson", e.to_string()))?; + + let json = bson_to_json(&inferred_type, bson.clone()).map_err(|e| error_context("error converting bson to json", e.to_string()))?; + let actual = json_to_bson(&inferred_type, json.clone()).map_err(|e| error_context("error converting json to bson", e.to_string()))?; prop_assert_eq!(actual, bson, "\ninferred type: {:?}\nobject types: {:?}\njson_representation: {}", inferred_type, @@ -26,10 +33,10 @@ proptest! { proptest! { #[test] fn converts_datetime_from_bson_to_json_and_back(d in arb_datetime()) { - let t = Type::Scalar(BsonScalarType::Date); + let t = plan::Type::Scalar(MongoScalarType::Bson(BsonScalarType::Date)); let bson = Bson::DateTime(d); - let json = bson_to_json(&t, &Default::default(), bson.clone())?; - let actual = json_to_bson(&t, &Default::default(), json.clone())?; + let json = bson_to_json(&t, bson.clone())?; + let actual = json_to_bson(&t, json.clone())?; prop_assert_eq!(actual, bson, "json representation: {}", json) } } diff --git a/crates/mongodb-agent-common/src/scalar_types_capabilities.rs b/crates/mongodb-agent-common/src/scalar_types_capabilities.rs index ea4bba6e..eaf41183 100644 --- a/crates/mongodb-agent-common/src/scalar_types_capabilities.rs +++ b/crates/mongodb-agent-common/src/scalar_types_capabilities.rs @@ -1,24 +1,109 @@ -use std::collections::HashMap; +use std::collections::BTreeMap; -use dc_api_types::ScalarTypeCapabilities; -use enum_iterator::all; use itertools::Either; +use lazy_static::lazy_static; use mongodb_support::BsonScalarType; +use ndc_models::{ + AggregateFunctionDefinition, ComparisonOperatorDefinition, ScalarType, Type, TypeRepresentation, +}; use crate::aggregation_function::{AggregationFunction, AggregationFunction as A}; use crate::comparison_function::{ComparisonFunction, ComparisonFunction as C}; use BsonScalarType as S; -pub fn scalar_types_capabilities() -> HashMap { - let mut map = all::() - .map(|t| (t.graphql_name(), capabilities(t))) - .collect::>(); - map.insert( +lazy_static! { + pub static ref SCALAR_TYPES: BTreeMap = scalar_types(); +} + +pub fn scalar_types() -> BTreeMap { + enum_iterator::all::() + .map(make_scalar_type) + .chain([extended_json_scalar_type()]) + .collect::>() +} + +fn extended_json_scalar_type() -> (String, ScalarType) { + ( mongodb_support::EXTENDED_JSON_TYPE_NAME.to_owned(), - ScalarTypeCapabilities::new(), - ); - map + ScalarType { + representation: Some(TypeRepresentation::JSON), + aggregate_functions: BTreeMap::new(), + comparison_operators: BTreeMap::new(), + }, + ) +} + +fn make_scalar_type(bson_scalar_type: BsonScalarType) -> (String, ScalarType) { + let scalar_type_name = bson_scalar_type.graphql_name(); + let scalar_type = ScalarType { + representation: bson_scalar_type_representation(bson_scalar_type), + aggregate_functions: bson_aggregation_functions(bson_scalar_type), + comparison_operators: bson_comparison_operators(bson_scalar_type), + }; + (scalar_type_name.to_owned(), scalar_type) +} + +fn bson_scalar_type_representation(bson_scalar_type: BsonScalarType) -> Option { + match bson_scalar_type { + BsonScalarType::Double => Some(TypeRepresentation::Float64), + BsonScalarType::Decimal => Some(TypeRepresentation::BigDecimal), // Not quite.... Mongo Decimal is 128-bit, BigDecimal is unlimited + BsonScalarType::Int => Some(TypeRepresentation::Int32), + BsonScalarType::Long => Some(TypeRepresentation::Int64), + BsonScalarType::String => Some(TypeRepresentation::String), + BsonScalarType::Date => Some(TypeRepresentation::Timestamp), // Mongo Date is milliseconds since unix epoch + BsonScalarType::Timestamp => None, // Internal Mongo timestamp type + BsonScalarType::BinData => None, + BsonScalarType::ObjectId => Some(TypeRepresentation::String), // Mongo ObjectId is usually expressed as a 24 char hex string (12 byte number) + BsonScalarType::Bool => Some(TypeRepresentation::Boolean), + BsonScalarType::Null => None, + BsonScalarType::Regex => None, + BsonScalarType::Javascript => None, + BsonScalarType::JavascriptWithScope => None, + BsonScalarType::MinKey => None, + BsonScalarType::MaxKey => None, + BsonScalarType::Undefined => None, + BsonScalarType::DbPointer => None, + BsonScalarType::Symbol => None, + } +} + +fn bson_comparison_operators( + bson_scalar_type: BsonScalarType, +) -> BTreeMap { + comparison_operators(bson_scalar_type) + .map(|(comparison_fn, arg_type)| { + let fn_name = comparison_fn.graphql_name().to_owned(); + match comparison_fn { + ComparisonFunction::Equal => (fn_name, ComparisonOperatorDefinition::Equal), + _ => ( + fn_name, + ComparisonOperatorDefinition::Custom { + argument_type: bson_to_named_type(arg_type), + }, + ), + } + }) + .collect() +} + +fn bson_aggregation_functions( + bson_scalar_type: BsonScalarType, +) -> BTreeMap { + aggregate_functions(bson_scalar_type) + .map(|(fn_name, result_type)| { + let aggregation_definition = AggregateFunctionDefinition { + result_type: bson_to_named_type(result_type), + }; + (fn_name.graphql_name().to_owned(), aggregation_definition) + }) + .collect() +} + +fn bson_to_named_type(bson_scalar_type: BsonScalarType) -> Type { + Type::Named { + name: bson_scalar_type.graphql_name().to_owned(), + } } pub fn aggregate_functions( @@ -64,25 +149,6 @@ pub fn comparison_operators( }) } -fn capabilities(scalar_type: BsonScalarType) -> ScalarTypeCapabilities { - let aggregations: HashMap = aggregate_functions(scalar_type) - .map(|(a, t)| (a.graphql_name().to_owned(), t.graphql_name())) - .collect(); - let comparisons: HashMap = comparison_operators(scalar_type) - .map(|(c, t)| (c.graphql_name().to_owned(), t.graphql_name())) - .collect(); - ScalarTypeCapabilities { - graphql_type: scalar_type.graphql_type(), - aggregate_functions: Some(aggregations), - comparison_operators: if comparisons.is_empty() { - None - } else { - Some(comparisons) - }, - update_column_operators: None, - } -} - /// If `condition` is true returns an iterator with the same items as the given `iter` input. /// Otherwise returns an empty iterator. fn iter_if(condition: bool, iter: impl Iterator) -> impl Iterator { diff --git a/crates/mongodb-agent-common/src/test_helpers.rs b/crates/mongodb-agent-common/src/test_helpers.rs new file mode 100644 index 00000000..bc566123 --- /dev/null +++ b/crates/mongodb-agent-common/src/test_helpers.rs @@ -0,0 +1,85 @@ +use std::collections::BTreeMap; + +use configuration::{schema, Configuration}; +use mongodb_support::BsonScalarType; +use ndc_models::CollectionInfo; +use ndc_test_helpers::{collection, make_primary_key_uniqueness_constraint, object_type}; + +use crate::mongo_query_plan::MongoConfiguration; + +pub fn make_nested_schema() -> MongoConfiguration { + MongoConfiguration(Configuration { + collections: BTreeMap::from([ + ( + "authors".into(), + CollectionInfo { + name: "authors".into(), + description: None, + collection_type: "Author".into(), + arguments: Default::default(), + uniqueness_constraints: make_primary_key_uniqueness_constraint("authors"), + foreign_keys: Default::default(), + }, + ), + collection("appearances"), // new helper gives more concise syntax + ]), + functions: Default::default(), + object_types: BTreeMap::from([ + ( + "Author".to_owned(), + object_type([ + ("name", schema::Type::Scalar(BsonScalarType::String)), + ("address", schema::Type::Object("Address".into())), + ( + "articles", + schema::Type::ArrayOf(Box::new(schema::Type::Object("Article".into()))), + ), + ( + "array_of_arrays", + schema::Type::ArrayOf(Box::new(schema::Type::ArrayOf(Box::new( + schema::Type::Object("Article".into()), + )))), + ), + ]), + ), + ( + "Address".into(), + object_type([ + ("country", schema::Type::Scalar(BsonScalarType::String)), + ("street", schema::Type::Scalar(BsonScalarType::String)), + ( + "apartment", + schema::Type::Nullable(Box::new(schema::Type::Scalar( + BsonScalarType::String, + ))), + ), + ( + "geocode", + schema::Type::Nullable(Box::new(schema::Type::Object( + "Geocode".to_owned(), + ))), + ), + ]), + ), + ( + "Article".into(), + object_type([("title", schema::Type::Scalar(BsonScalarType::String))]), + ), + ( + "Geocode".into(), + object_type([ + ("latitude", schema::Type::Scalar(BsonScalarType::Double)), + ("longitude", schema::Type::Scalar(BsonScalarType::Double)), + ]), + ), + ( + "appearances".to_owned(), + object_type([("authorId", schema::Type::Scalar(BsonScalarType::ObjectId))]), + ), + ]), + procedures: Default::default(), + native_mutations: Default::default(), + native_queries: Default::default(), + options: Default::default(), + }) +} diff --git a/crates/mongodb-connector/Cargo.toml b/crates/mongodb-connector/Cargo.toml index a8b8fcf5..c817579c 100644 --- a/crates/mongodb-connector/Cargo.toml +++ b/crates/mongodb-connector/Cargo.toml @@ -4,20 +4,19 @@ version = "0.1.0" edition = "2021" [dependencies] +configuration = { path = "../configuration" } +mongodb-agent-common = { path = "../mongodb-agent-common" } +mongodb-support = { path = "../mongodb-support" } +ndc-query-plan = { path = "../ndc-query-plan" } + anyhow = "1" async-trait = "^0.1" -configuration = { path = "../configuration" } -dc-api = { path = "../dc-api" } -dc-api-types = { path = "../dc-api-types" } enum-iterator = "^2.0.0" futures = "^0.3" http = "^0.2" -indexmap = { version = "2.1.0", features = ["serde"] } +indexmap = { workspace = true } itertools = { workspace = true } -lazy_static = "^1.4.0" mongodb = { workspace = true } -mongodb-agent-common = { path = "../mongodb-agent-common" } -mongodb-support = { path = "../mongodb-support" } ndc-sdk = { workspace = true } prometheus = "*" # share version from ndc-sdk serde = { version = "1.0", features = ["derive"] } @@ -27,6 +26,5 @@ tokio = { version = "1.28.1", features = ["full"] } tracing = "0.1" [dev-dependencies] -dc-api-test-helpers = { path = "../dc-api-test-helpers" } ndc-test-helpers = { path = "../ndc-test-helpers" } pretty_assertions = "1" diff --git a/crates/mongodb-connector/src/api_type_conversions/helpers.rs b/crates/mongodb-connector/src/api_type_conversions/helpers.rs deleted file mode 100644 index ef500a63..00000000 --- a/crates/mongodb-connector/src/api_type_conversions/helpers.rs +++ /dev/null @@ -1,14 +0,0 @@ -use std::collections::BTreeMap; - -use ndc_sdk::models::{self as v3}; - -use super::ConversionError; - -pub fn lookup_relationship<'a>( - relationships: &'a BTreeMap, - relationship: &str, -) -> Result<&'a v3::Relationship, ConversionError> { - relationships - .get(relationship) - .ok_or_else(|| ConversionError::UnspecifiedRelation(relationship.to_owned())) -} diff --git a/crates/mongodb-connector/src/api_type_conversions/mod.rs b/crates/mongodb-connector/src/api_type_conversions/mod.rs deleted file mode 100644 index 87386b60..00000000 --- a/crates/mongodb-connector/src/api_type_conversions/mod.rs +++ /dev/null @@ -1,12 +0,0 @@ -mod conversion_error; -mod helpers; -mod query_request; -mod query_response; -mod query_traversal; - -#[allow(unused_imports)] -pub use self::{ - conversion_error::ConversionError, - query_request::{v3_to_v2_query_request, QueryContext}, - query_response::v2_to_v3_explain_response, -}; diff --git a/crates/mongodb-connector/src/api_type_conversions/query_request.rs b/crates/mongodb-connector/src/api_type_conversions/query_request.rs deleted file mode 100644 index 69acff43..00000000 --- a/crates/mongodb-connector/src/api_type_conversions/query_request.rs +++ /dev/null @@ -1,1264 +0,0 @@ -use std::{ - borrow::Cow, - collections::{BTreeMap, HashMap}, -}; - -use configuration::{schema, WithNameRef}; -use dc_api_types::{self as v2, ColumnSelector, Target}; -use indexmap::IndexMap; -use itertools::Itertools as _; -use ndc_sdk::models::{self as v3}; - -use super::{ - helpers::lookup_relationship, - query_traversal::{query_traversal, Node, TraversalStep}, - ConversionError, -}; - -#[derive(Clone, Debug)] -pub struct QueryContext<'a> { - pub collections: Cow<'a, BTreeMap>, - pub functions: Cow<'a, BTreeMap>, - pub object_types: Cow<'a, BTreeMap>, - pub scalar_types: Cow<'a, BTreeMap>, -} - -impl QueryContext<'_> { - pub fn find_collection( - &self, - collection_name: &str, - ) -> Result<&v3::CollectionInfo, ConversionError> { - if let Some(collection) = self.collections.get(collection_name) { - return Ok(collection); - } - if let Some((_, function)) = self.functions.get(collection_name) { - return Ok(function); - } - - Err(ConversionError::UnknownCollection( - collection_name.to_string(), - )) - } - - pub fn find_collection_object_type( - &self, - collection_name: &str, - ) -> Result, ConversionError> { - let collection = self.find_collection(collection_name)?; - self.find_object_type(&collection.collection_type) - } - - pub fn find_object_type<'a>( - &'a self, - object_type_name: &'a str, - ) -> Result, ConversionError> { - let object_type = self - .object_types - .get(object_type_name) - .ok_or_else(|| ConversionError::UnknownObjectType(object_type_name.to_string()))?; - - Ok(WithNameRef { - name: object_type_name, - value: object_type, - }) - } - - fn find_scalar_type(&self, scalar_type_name: &str) -> Result<&v3::ScalarType, ConversionError> { - self.scalar_types - .get(scalar_type_name) - .ok_or_else(|| ConversionError::UnknownScalarType(scalar_type_name.to_owned())) - } - - fn find_aggregation_function_definition( - &self, - scalar_type_name: &str, - function: &str, - ) -> Result<&v3::AggregateFunctionDefinition, ConversionError> { - let scalar_type = self.find_scalar_type(scalar_type_name)?; - scalar_type - .aggregate_functions - .get(function) - .ok_or_else(|| ConversionError::UnknownAggregateFunction { - scalar_type: scalar_type_name.to_string(), - aggregate_function: function.to_string(), - }) - } - - fn find_comparison_operator_definition( - &self, - scalar_type_name: &str, - operator: &str, - ) -> Result<&v3::ComparisonOperatorDefinition, ConversionError> { - let scalar_type = self.find_scalar_type(scalar_type_name)?; - scalar_type - .comparison_operators - .get(operator) - .ok_or_else(|| ConversionError::UnknownComparisonOperator(operator.to_owned())) - } -} - -fn find_object_field<'a>( - object_type: &'a WithNameRef, - field_name: &str, -) -> Result<&'a schema::ObjectField, ConversionError> { - object_type.value.fields.get(field_name).ok_or_else(|| { - ConversionError::UnknownObjectTypeField { - object_type: object_type.name.to_string(), - field_name: field_name.to_string(), - path: Default::default(), // TODO: set a path for more helpful error reporting - } - }) -} - -pub fn v3_to_v2_query_request( - context: &QueryContext, - request: v3::QueryRequest, -) -> Result { - let collection_object_type = context.find_collection_object_type(&request.collection)?; - - Ok(v2::QueryRequest { - relationships: v3_to_v2_relationships(&request)?, - target: Target::TTable { - name: vec![request.collection], - arguments: v3_to_v2_arguments(request.arguments.clone()), - }, - query: Box::new(v3_to_v2_query( - context, - &request.collection_relationships, - &collection_object_type, - request.query, - &collection_object_type, - )?), - - // We are using v2 types that have been augmented with a `variables` field (even though - // that is not part of the v2 API). For queries translated from v3 we use `variables` - // instead of `foreach`. - foreach: None, - variables: request.variables, - }) -} - -fn v3_to_v2_query( - context: &QueryContext, - collection_relationships: &BTreeMap, - root_collection_object_type: &WithNameRef, - query: v3::Query, - collection_object_type: &WithNameRef, -) -> Result { - let aggregates: Option> = query - .aggregates - .map(|aggregates| -> Result<_, ConversionError> { - aggregates - .into_iter() - .map(|(name, aggregate)| { - Ok(( - name, - v3_to_v2_aggregate(context, collection_object_type, aggregate)?, - )) - }) - .collect() - }) - .transpose()?; - - let fields = v3_to_v2_fields( - context, - collection_relationships, - root_collection_object_type, - collection_object_type, - query.fields, - )?; - - let order_by: Option = query - .order_by - .map(|order_by| -> Result<_, ConversionError> { - let (elements, relations) = order_by - .elements - .into_iter() - .map(|order_by_element| { - v3_to_v2_order_by_element( - context, - collection_relationships, - root_collection_object_type, - collection_object_type, - order_by_element, - ) - }) - .collect::, ConversionError>>()? - .into_iter() - .try_fold( - ( - Vec::::new(), - HashMap::::new(), - ), - |(mut acc_elems, mut acc_rels), (elem, rels)| { - acc_elems.push(elem); - merge_order_by_relations(&mut acc_rels, rels)?; - Ok((acc_elems, acc_rels)) - }, - )?; - Ok(v2::OrderBy { - elements, - relations, - }) - }) - .transpose()?; - - let limit = optional_32bit_number_to_64bit(query.limit); - let offset = optional_32bit_number_to_64bit(query.offset); - - Ok(v2::Query { - aggregates, - aggregates_limit: limit, - fields, - order_by, - limit, - offset, - r#where: query - .predicate - .map(|expr| { - v3_to_v2_expression( - context, - collection_relationships, - root_collection_object_type, - collection_object_type, - expr, - ) - }) - .transpose()?, - }) -} - -fn merge_order_by_relations( - rels1: &mut HashMap, - rels2: HashMap, -) -> Result<(), ConversionError> { - for (relationship_name, relation2) in rels2 { - if let Some(relation1) = rels1.get_mut(&relationship_name) { - if relation1.r#where != relation2.r#where { - // v2 does not support navigating the same relationship more than once across multiple - // order by elements and having different predicates used on the same relationship in - // different order by elements. This appears to be technically supported by NDC. - return Err(ConversionError::NotImplemented("Relationships used in order by elements cannot contain different predicates when used more than once")); - } - merge_order_by_relations(&mut relation1.subrelations, relation2.subrelations)?; - } else { - rels1.insert(relationship_name, relation2); - } - } - Ok(()) -} - -fn v3_to_v2_aggregate( - context: &QueryContext, - collection_object_type: &WithNameRef, - aggregate: v3::Aggregate, -) -> Result { - match aggregate { - v3::Aggregate::ColumnCount { column, distinct } => { - Ok(v2::Aggregate::ColumnCount { column, distinct }) - } - v3::Aggregate::SingleColumn { column, function } => { - let object_type_field = find_object_field(collection_object_type, column.as_ref())?; - let column_scalar_type_name = get_scalar_type_name(&object_type_field.r#type)?; - let aggregate_function = context - .find_aggregation_function_definition(&column_scalar_type_name, &function)?; - let result_type = type_to_type_name(&aggregate_function.result_type)?; - Ok(v2::Aggregate::SingleColumn { - column, - function, - result_type, - }) - } - v3::Aggregate::StarCount {} => Ok(v2::Aggregate::StarCount {}), - } -} - -fn type_to_type_name(t: &v3::Type) -> Result { - match t { - v3::Type::Named { name } => Ok(name.clone()), - v3::Type::Nullable { underlying_type } => type_to_type_name(underlying_type), - v3::Type::Array { .. } => Err(ConversionError::TypeMismatch(format!( - "Expected a named type, but got an array type: {t:?}" - ))), - v3::Type::Predicate { .. } => Err(ConversionError::TypeMismatch(format!( - "Expected a named type, but got a predicate type: {t:?}" - ))), - } -} - -fn v3_to_v2_fields( - context: &QueryContext, - collection_relationships: &BTreeMap, - root_collection_object_type: &WithNameRef, - object_type: &WithNameRef, - v3_fields: Option>, -) -> Result>, ConversionError> { - let v2_fields: Option> = v3_fields - .map(|fields| { - fields - .into_iter() - .map(|(name, field)| { - Ok(( - name, - v3_to_v2_field( - context, - collection_relationships, - root_collection_object_type, - object_type, - field, - )?, - )) - }) - .collect::>() - }) - .transpose()?; - Ok(v2_fields) -} - -fn v3_to_v2_field( - context: &QueryContext, - collection_relationships: &BTreeMap, - root_collection_object_type: &WithNameRef, - object_type: &WithNameRef, - field: v3::Field, -) -> Result { - match field { - v3::Field::Column { column, fields } => { - let object_type_field = find_object_field(object_type, column.as_ref())?; - v3_to_v2_nested_field( - context, - collection_relationships, - root_collection_object_type, - column, - &object_type_field.r#type, - fields, - ) - } - v3::Field::Relationship { - query, - relationship, - arguments: _, - } => { - let v3_relationship = lookup_relationship(collection_relationships, &relationship)?; - let collection_object_type = - context.find_collection_object_type(&v3_relationship.target_collection)?; - Ok(v2::Field::Relationship { - query: Box::new(v3_to_v2_query( - context, - collection_relationships, - root_collection_object_type, - *query, - &collection_object_type, - )?), - relationship, - }) - } - } -} - -fn v3_to_v2_nested_field( - context: &QueryContext, - collection_relationships: &BTreeMap, - root_collection_object_type: &WithNameRef, - column: String, - schema_type: &schema::Type, - nested_field: Option, -) -> Result { - match schema_type { - schema::Type::ExtendedJSON => { - Ok(v2::Field::Column { - column, - column_type: mongodb_support::EXTENDED_JSON_TYPE_NAME.to_string(), - }) - } - schema::Type::Scalar(bson_scalar_type) => { - Ok(v2::Field::Column { - column, - column_type: bson_scalar_type.graphql_name(), - }) - }, - schema::Type::Nullable(underlying_type) => v3_to_v2_nested_field(context, collection_relationships, root_collection_object_type, column, underlying_type, nested_field), - schema::Type::ArrayOf(element_type) => { - let inner_nested_field = match nested_field { - None => Ok(None), - Some(v3::NestedField::Object(_nested_object)) => Err(ConversionError::TypeMismatch("Expected an array nested field selection, but got an object nested field selection instead".into())), - Some(v3::NestedField::Array(nested_array)) => Ok(Some(*nested_array.fields)), - }?; - let nested_v2_field = v3_to_v2_nested_field(context, collection_relationships, root_collection_object_type, column, element_type, inner_nested_field)?; - Ok(v2::Field::NestedArray { - field: Box::new(nested_v2_field), - limit: None, - offset: None, - r#where: None, - }) - }, - schema::Type::Object(object_type_name) => { - match nested_field { - None => { - Ok(v2::Field::Column { - column, - column_type: object_type_name.clone(), - }) - }, - Some(v3::NestedField::Object(nested_object)) => { - let object_type = context.find_object_type(object_type_name.as_ref())?; - let mut query = v2::Query::new(); - query.fields = v3_to_v2_fields(context, collection_relationships, root_collection_object_type, &object_type, Some(nested_object.fields))?; - Ok(v2::Field::NestedObject { - column, - query: Box::new(query), - }) - }, - Some(v3::NestedField::Array(_nested_array)) => - Err(ConversionError::TypeMismatch("Expected an array nested field selection, but got an object nested field selection instead".into())), - } - }, - } -} - -fn v3_to_v2_order_by_element( - context: &QueryContext, - collection_relationships: &BTreeMap, - root_collection_object_type: &WithNameRef, - object_type: &WithNameRef, - elem: v3::OrderByElement, -) -> Result<(v2::OrderByElement, HashMap), ConversionError> { - let (target, target_path) = match elem.target { - v3::OrderByTarget::Column { name, path } => ( - v2::OrderByTarget::Column { - column: v2::ColumnSelector::Column(name), - }, - path, - ), - v3::OrderByTarget::SingleColumnAggregate { - column, - function, - path, - } => { - let end_of_relationship_path_object_type = path - .last() - .map(|last_path_element| { - let relationship = lookup_relationship( - collection_relationships, - &last_path_element.relationship, - )?; - context.find_collection_object_type(&relationship.target_collection) - }) - .transpose()?; - let target_object_type = end_of_relationship_path_object_type - .as_ref() - .unwrap_or(object_type); - let object_field = find_object_field(target_object_type, &column)?; - let scalar_type_name = get_scalar_type_name(&object_field.r#type)?; - let aggregate_function = - context.find_aggregation_function_definition(&scalar_type_name, &function)?; - let result_type = type_to_type_name(&aggregate_function.result_type)?; - let target = v2::OrderByTarget::SingleColumnAggregate { - column, - function, - result_type, - }; - (target, path) - } - v3::OrderByTarget::StarCountAggregate { path } => { - (v2::OrderByTarget::StarCountAggregate {}, path) - } - }; - let (target_path, relations) = v3_to_v2_target_path( - context, - collection_relationships, - root_collection_object_type, - target_path, - )?; - let order_by_element = v2::OrderByElement { - order_direction: match elem.order_direction { - v3::OrderDirection::Asc => v2::OrderDirection::Asc, - v3::OrderDirection::Desc => v2::OrderDirection::Desc, - }, - target, - target_path, - }; - Ok((order_by_element, relations)) -} - -fn v3_to_v2_target_path( - context: &QueryContext, - collection_relationships: &BTreeMap, - root_collection_object_type: &WithNameRef, - path: Vec, -) -> Result<(Vec, HashMap), ConversionError> { - let mut v2_path = vec![]; - let v2_relations = v3_to_v2_target_path_step::>( - context, - collection_relationships, - root_collection_object_type, - path.into_iter(), - &mut v2_path, - )?; - Ok((v2_path, v2_relations)) -} - -fn v3_to_v2_target_path_step>( - context: &QueryContext, - collection_relationships: &BTreeMap, - root_collection_object_type: &WithNameRef, - mut path_iter: T::IntoIter, - v2_path: &mut Vec, -) -> Result, ConversionError> { - let mut v2_relations = HashMap::new(); - - if let Some(path_element) = path_iter.next() { - v2_path.push(path_element.relationship.clone()); - - let where_expr = path_element - .predicate - .map(|expression| { - let v3_relationship = - lookup_relationship(collection_relationships, &path_element.relationship)?; - let target_object_type = - context.find_collection_object_type(&v3_relationship.target_collection)?; - let v2_expression = v3_to_v2_expression( - context, - collection_relationships, - root_collection_object_type, - &target_object_type, - *expression, - )?; - Ok(Box::new(v2_expression)) - }) - .transpose()?; - - let subrelations = v3_to_v2_target_path_step::( - context, - collection_relationships, - root_collection_object_type, - path_iter, - v2_path, - )?; - - v2_relations.insert( - path_element.relationship, - v2::OrderByRelation { - r#where: where_expr, - subrelations, - }, - ); - } - - Ok(v2_relations) -} - -/// Like v2, a v3 QueryRequest has a map of Relationships. Unlike v2, v3 does not indicate the -/// source collection for each relationship. Instead we are supposed to keep track of the "current" -/// collection so that when we hit a Field that refers to a Relationship we infer that the source -/// is the "current" collection. This means that to produce a v2 Relationship mapping we need to -/// traverse the query here. -fn v3_to_v2_relationships( - query_request: &v3::QueryRequest, -) -> Result, ConversionError> { - // This only captures relationships that are referenced by a Field or an OrderBy in the query. - // We might record a relationship more than once, but we are recording to maps so that doesn't - // matter. We might capture the same relationship multiple times with different source - // collections, but that is by design. - let relationships_by_source_and_name: Vec<(Vec, (String, v2::Relationship))> = - query_traversal(query_request) - .filter_map_ok(|TraversalStep { collection, node }| match node { - Node::Field { - field: - v3::Field::Relationship { - relationship, - arguments, - .. - }, - .. - } => Some((collection, relationship, arguments)), - Node::ExistsInCollection(v3::ExistsInCollection::Related { - relationship, - arguments, - }) => Some((collection, relationship, arguments)), - Node::PathElement(v3::PathElement { - relationship, - arguments, - .. - }) => Some((collection, relationship, arguments)), - _ => None, - }) - .map_ok(|(collection_name, relationship_name, arguments)| { - let v3_relationship = lookup_relationship( - &query_request.collection_relationships, - relationship_name, - )?; - - // TODO: Functions (native queries) may be referenced multiple times in a query - // request with different arguments. To accommodate that we will need to record - // separate v2 relations for each reference with different names. In the current - // implementation one set of arguments will override arguments to all occurrences of - // a given function. MDB-106 - let v2_relationship = v2::Relationship { - column_mapping: v2::ColumnMapping( - v3_relationship - .column_mapping - .iter() - .map(|(source_col, target_col)| { - ( - ColumnSelector::Column(source_col.clone()), - ColumnSelector::Column(target_col.clone()), - ) - }) - .collect(), - ), - relationship_type: match v3_relationship.relationship_type { - v3::RelationshipType::Object => v2::RelationshipType::Object, - v3::RelationshipType::Array => v2::RelationshipType::Array, - }, - target: v2::Target::TTable { - name: vec![v3_relationship.target_collection.clone()], - arguments: v3_to_v2_relationship_arguments(arguments.clone()), - }, - }; - - Ok(( - vec![collection_name.to_owned()], // put in vec to match v2 namespaced format - (relationship_name.clone(), v2_relationship), - )) as Result<_, ConversionError> - }) - // The previous step produced Result,_> values. Flatten them to Result<_,_>. - // We can't use the flatten() Iterator method because that loses the outer Result errors. - .map(|result| match result { - Ok(Ok(v)) => Ok(v), - Ok(Err(e)) => Err(e), - Err(e) => Err(e), - }) - .collect::>()?; - - let grouped_by_source: HashMap, Vec<(String, v2::Relationship)>> = - relationships_by_source_and_name - .into_iter() - .into_group_map(); - - let v2_relationships = grouped_by_source - .into_iter() - .map(|(source_table, relationships)| v2::TableRelationships { - source_table, - relationships: relationships.into_iter().collect(), - }) - .collect(); - - Ok(v2_relationships) -} - -fn v3_to_v2_expression( - context: &QueryContext, - collection_relationships: &BTreeMap, - root_collection_object_type: &WithNameRef, - object_type: &WithNameRef, - expression: v3::Expression, -) -> Result { - match expression { - v3::Expression::And { expressions } => Ok(v2::Expression::And { - expressions: expressions - .into_iter() - .map(|expr| { - v3_to_v2_expression( - context, - collection_relationships, - root_collection_object_type, - object_type, - expr, - ) - }) - .collect::>()?, - }), - v3::Expression::Or { expressions } => Ok(v2::Expression::Or { - expressions: expressions - .into_iter() - .map(|expr| { - v3_to_v2_expression( - context, - collection_relationships, - root_collection_object_type, - object_type, - expr, - ) - }) - .collect::>()?, - }), - v3::Expression::Not { expression } => Ok(v2::Expression::Not { - expression: Box::new(v3_to_v2_expression( - context, - collection_relationships, - root_collection_object_type, - object_type, - *expression, - )?), - }), - v3::Expression::UnaryComparisonOperator { column, operator } => { - Ok(v2::Expression::ApplyUnaryComparison { - column: v3_to_v2_comparison_target( - root_collection_object_type, - object_type, - column, - )?, - operator: match operator { - v3::UnaryComparisonOperator::IsNull => v2::UnaryComparisonOperator::IsNull, - }, - }) - } - v3::Expression::BinaryComparisonOperator { - column, - operator, - value, - } => v3_to_v2_binary_comparison( - context, - root_collection_object_type, - object_type, - column, - operator, - value, - ), - v3::Expression::Exists { - in_collection, - predicate, - } => { - let (in_table, collection_object_type) = match in_collection { - v3::ExistsInCollection::Related { - relationship, - arguments: _, - } => { - let v3_relationship = - lookup_relationship(collection_relationships, &relationship)?; - let collection_object_type = - context.find_collection_object_type(&v3_relationship.target_collection)?; - let in_table = v2::ExistsInTable::RelatedTable { relationship }; - Ok((in_table, collection_object_type)) - } - v3::ExistsInCollection::Unrelated { - collection, - arguments: _, - } => { - let collection_object_type = - context.find_collection_object_type(&collection)?; - let in_table = v2::ExistsInTable::UnrelatedTable { - table: vec![collection], - }; - Ok((in_table, collection_object_type)) - } - }?; - Ok(v2::Expression::Exists { - in_table, - r#where: Box::new(if let Some(predicate) = predicate { - v3_to_v2_expression( - context, - collection_relationships, - root_collection_object_type, - &collection_object_type, - *predicate, - )? - } else { - // empty expression - v2::Expression::Or { - expressions: vec![], - } - }), - }) - } - } -} - -// TODO: NDC-393 - What do we need to do to handle array comparisons like `in`?. v3 now combines -// scalar and array comparisons, v2 separates them -fn v3_to_v2_binary_comparison( - context: &QueryContext, - root_collection_object_type: &WithNameRef, - object_type: &WithNameRef, - column: v3::ComparisonTarget, - operator: String, - value: v3::ComparisonValue, -) -> Result { - let comparison_column = - v3_to_v2_comparison_target(root_collection_object_type, object_type, column)?; - let operator_definition = - context.find_comparison_operator_definition(&comparison_column.column_type, &operator)?; - let operator = match operator_definition { - v3::ComparisonOperatorDefinition::Equal => v2::BinaryComparisonOperator::Equal, - _ => v2::BinaryComparisonOperator::CustomBinaryComparisonOperator(operator), - }; - Ok(v2::Expression::ApplyBinaryComparison { - value: v3_to_v2_comparison_value( - root_collection_object_type, - object_type, - comparison_column.column_type.clone(), - value, - )?, - column: comparison_column, - operator, - }) -} - -fn get_scalar_type_name(schema_type: &schema::Type) -> Result { - match schema_type { - schema::Type::ExtendedJSON => Ok(mongodb_support::EXTENDED_JSON_TYPE_NAME.to_string()), - schema::Type::Scalar(scalar_type_name) => Ok(scalar_type_name.graphql_name()), - schema::Type::Object(object_name_name) => Err(ConversionError::TypeMismatch(format!( - "Expected a scalar type, got the object type {object_name_name}" - ))), - schema::Type::ArrayOf(element_type) => Err(ConversionError::TypeMismatch(format!( - "Expected a scalar type, got an array of {element_type:?}" - ))), - schema::Type::Nullable(underlying_type) => get_scalar_type_name(underlying_type), - } -} - -fn v3_to_v2_comparison_target( - root_collection_object_type: &WithNameRef, - object_type: &WithNameRef, - target: v3::ComparisonTarget, -) -> Result { - match target { - v3::ComparisonTarget::Column { name, path } => { - let object_field = find_object_field(object_type, &name)?; - let scalar_type_name = get_scalar_type_name(&object_field.r#type)?; - if !path.is_empty() { - // This is not supported in the v2 model. ComparisonColumn.path accepts only two values: - // []/None for the current table, and ["*"] for the RootCollectionColumn (handled below) - Err(ConversionError::NotImplemented( - "The MongoDB connector does not currently support comparisons against columns from related tables", - )) - } else { - Ok(v2::ComparisonColumn { - column_type: scalar_type_name, - name: ColumnSelector::Column(name), - path: None, - }) - } - } - v3::ComparisonTarget::RootCollectionColumn { name } => { - let object_field = find_object_field(root_collection_object_type, &name)?; - let scalar_type_name = get_scalar_type_name(&object_field.r#type)?; - Ok(v2::ComparisonColumn { - column_type: scalar_type_name, - name: ColumnSelector::Column(name), - path: Some(vec!["$".to_owned()]), - }) - } - } -} - -fn v3_to_v2_comparison_value( - root_collection_object_type: &WithNameRef, - object_type: &WithNameRef, - comparison_column_scalar_type: String, - value: v3::ComparisonValue, -) -> Result { - match value { - v3::ComparisonValue::Column { column } => { - Ok(v2::ComparisonValue::AnotherColumnComparison { - column: v3_to_v2_comparison_target( - root_collection_object_type, - object_type, - column, - )?, - }) - } - v3::ComparisonValue::Scalar { value } => Ok(v2::ComparisonValue::ScalarValueComparison { - value, - value_type: comparison_column_scalar_type, - }), - v3::ComparisonValue::Variable { name } => Ok(v2::ComparisonValue::Variable { name }), - } -} - -#[inline] -fn optional_32bit_number_to_64bit(n: Option) -> Option -where - B: From, -{ - n.map(|input| input.into()) -} - -fn v3_to_v2_arguments(arguments: BTreeMap) -> HashMap { - arguments - .into_iter() - .map(|(argument_name, argument)| match argument { - v3::Argument::Variable { name } => (argument_name, v2::Argument::Variable { name }), - v3::Argument::Literal { value } => (argument_name, v2::Argument::Literal { value }), - }) - .collect() -} - -fn v3_to_v2_relationship_arguments( - arguments: BTreeMap, -) -> HashMap { - arguments - .into_iter() - .map(|(argument_name, argument)| match argument { - v3::RelationshipArgument::Variable { name } => { - (argument_name, v2::Argument::Variable { name }) - } - v3::RelationshipArgument::Literal { value } => { - (argument_name, v2::Argument::Literal { value }) - } - v3::RelationshipArgument::Column { name } => { - (argument_name, v2::Argument::Column { name }) - } - }) - .collect() -} - -#[cfg(test)] -mod tests { - use std::collections::HashMap; - - use dc_api_test_helpers::{self as v2, source, table_relationships, target}; - use ndc_sdk::models::{OrderByElement, OrderByTarget, OrderDirection}; - use ndc_test_helpers::*; - use pretty_assertions::assert_eq; - use serde_json::json; - - use crate::test_helpers::{make_flat_schema, make_nested_schema}; - - use super::{v3_to_v2_query_request, v3_to_v2_relationships}; - - #[test] - fn translates_query_request_relationships() -> Result<(), anyhow::Error> { - let v3_query_request = query_request() - .collection("schools") - .relationships([ - ( - "school_classes", - relationship("classes", [("_id", "school_id")]), - ), - ( - "class_students", - relationship("students", [("_id", "class_id")]), - ), - ( - "class_department", - relationship("departments", [("department_id", "_id")]).object_type(), - ), - ( - "school_directory", - relationship("directory", [("_id", "school_id")]).object_type(), - ), - ( - "student_advisor", - relationship("advisors", [("advisor_id", "_id")]).object_type(), - ), - ( - "existence_check", - relationship("some_collection", [("some_id", "_id")]), - ), - ]) - .query( - query() - .fields([relation_field!("school_classes" => "class_name", query() - .fields([ - relation_field!("class_students" => "student_name") - ]) - )]) - .order_by(vec![OrderByElement { - order_direction: OrderDirection::Asc, - target: OrderByTarget::Column { - name: "advisor_name".to_owned(), - path: vec![ - path_element("school_classes") - .predicate(equal( - target!( - "department_id", - [ - path_element("school_classes"), - path_element("class_department"), - ], - ), - column_value!( - "math_department_id", - [path_element("school_directory")], - ), - )) - .into(), - path_element("class_students").into(), - path_element("student_advisor").into(), - ], - }, - }]) - // The `And` layer checks that we properly recursive into Expressions - .predicate(and([exists( - related!("existence_check"), - empty_expression(), - )])), - ) - .into(); - - let expected_relationships = vec![ - table_relationships( - source("classes"), - [ - ( - "class_department", - v2::relationship( - target("departments"), - [(v2::select!("department_id"), v2::select!("_id"))], - ) - .object_type(), - ), - ( - "class_students", - v2::relationship( - target("students"), - [(v2::select!("_id"), v2::select!("class_id"))], - ), - ), - ], - ), - table_relationships( - source("schools"), - [ - ( - "school_classes", - v2::relationship( - target("classes"), - [(v2::select!("_id"), v2::select!("school_id"))], - ), - ), - ( - "school_directory", - v2::relationship( - target("directory"), - [(v2::select!("_id"), v2::select!("school_id"))], - ) - .object_type(), - ), - ( - "existence_check", - v2::relationship( - target("some_collection"), - [(v2::select!("some_id"), v2::select!("_id"))], - ), - ), - ], - ), - table_relationships( - source("students"), - [( - "student_advisor", - v2::relationship( - target("advisors"), - [(v2::select!("advisor_id"), v2::select!("_id"))], - ) - .object_type(), - )], - ), - ]; - - let mut relationships = v3_to_v2_relationships(&v3_query_request)?; - - // Sort to match order of expected result - relationships.sort_by_key(|rels| rels.source_table.clone()); - - assert_eq!(relationships, expected_relationships); - Ok(()) - } - - #[test] - fn translates_root_column_references() -> Result<(), anyhow::Error> { - let query_context = make_flat_schema(); - let query = query_request() - .collection("authors") - .query(query().fields([field!("last_name")]).predicate(exists( - unrelated!("articles"), - and([ - equal(target!("author_id"), column_value!(root("id"))), - binop("_regex", target!("title"), value!("Functional.*")), - ]), - ))) - .into(); - let v2_request = v3_to_v2_query_request(&query_context, query)?; - - let expected = v2::query_request() - .target(["authors"]) - .query( - v2::query() - .fields([v2::column!("last_name": "String")]) - .predicate(v2::exists_unrelated( - ["articles"], - v2::and([ - v2::equal( - v2::compare!("author_id": "Int"), - v2::column_value!(["$"], "id": "Int"), - ), - v2::binop( - "_regex", - v2::compare!("title": "String"), - v2::value!(json!("Functional.*"), "String"), - ), - ]), - )), - ) - .into(); - - assert_eq!(v2_request, expected); - Ok(()) - } - - #[test] - fn translates_aggregate_selections() -> Result<(), anyhow::Error> { - let query_context = make_flat_schema(); - let query = query_request() - .collection("authors") - .query(query().aggregates([ - star_count_aggregate!("count_star"), - column_count_aggregate!("count_id" => "last_name", distinct: true), - column_aggregate!("avg_id" => "id", "avg"), - ])) - .into(); - let v2_request = v3_to_v2_query_request(&query_context, query)?; - - let expected = v2::query_request() - .target(["authors"]) - .query(v2::query().aggregates([ - v2::star_count_aggregate!("count_star"), - v2::column_count_aggregate!("count_id" => "last_name", distinct: true), - v2::column_aggregate!("avg_id" => "id", "avg": "Float"), - ])) - .into(); - - assert_eq!(v2_request, expected); - Ok(()) - } - - #[test] - fn translates_relationships_in_fields_predicates_and_orderings() -> Result<(), anyhow::Error> { - let query_context = make_flat_schema(); - let query = query_request() - .collection("authors") - .query( - query() - .fields([ - field!("last_name"), - relation_field!( - "author_articles" => "articles", - query().fields([field!("title"), field!("year")]) - ), - ]) - .predicate(exists( - related!("author_articles"), - binop("_regex", target!("title"), value!("Functional.*")), - )) - .order_by(vec![ - OrderByElement { - order_direction: OrderDirection::Asc, - target: OrderByTarget::SingleColumnAggregate { - column: "year".into(), - function: "avg".into(), - path: vec![path_element("author_articles").into()], - }, - }, - OrderByElement { - order_direction: OrderDirection::Desc, - target: OrderByTarget::Column { - name: "id".into(), - path: vec![], - }, - }, - ]), - ) - .relationships([( - "author_articles", - relationship("articles", [("id", "author_id")]), - )]) - .into(); - let v2_request = v3_to_v2_query_request(&query_context, query)?; - - let expected = v2::query_request() - .target(["authors"]) - .query( - v2::query() - .fields([ - v2::column!("last_name": "String"), - v2::relation_field!( - "author_articles" => "articles", - v2::query() - .fields([ - v2::column!("title": "String"), - v2::column!("year": "Int")] - ) - ), - ]) - .predicate(v2::exists( - "author_articles", - v2::binop( - "_regex", - v2::compare!("title": "String"), - v2::value!(json!("Functional.*"), "String"), - ), - )) - .order_by(dc_api_types::OrderBy { - elements: vec![ - dc_api_types::OrderByElement { - order_direction: dc_api_types::OrderDirection::Asc, - target: dc_api_types::OrderByTarget::SingleColumnAggregate { - column: "year".into(), - function: "avg".into(), - result_type: "Float".into(), - }, - target_path: vec!["author_articles".into()], - }, - dc_api_types::OrderByElement { - order_direction: dc_api_types::OrderDirection::Desc, - target: dc_api_types::OrderByTarget::Column { - column: v2::select!("id"), - }, - target_path: vec![], - }, - ], - relations: HashMap::from([( - "author_articles".into(), - dc_api_types::OrderByRelation { - r#where: None, - subrelations: HashMap::new(), - }, - )]), - }), - ) - .relationships(vec![table_relationships( - source("authors"), - [( - "author_articles", - v2::relationship( - target("articles"), - [(v2::select!("id"), v2::select!("author_id"))], - ), - )], - )]) - .into(); - - assert_eq!(v2_request, expected); - Ok(()) - } - - #[test] - fn translates_nested_fields() -> Result<(), anyhow::Error> { - let query_context = make_nested_schema(); - let query_request = query_request() - .collection("authors") - .query(query().fields([ - field!("author_address" => "address", object!([field!("address_country" => "country")])), - field!("author_articles" => "articles", array!(object!([field!("article_title" => "title")]))), - field!("author_array_of_arrays" => "array_of_arrays", array!(array!(object!([field!("article_title" => "title")])))) - ])) - .into(); - let v2_request = v3_to_v2_query_request(&query_context, query_request)?; - - let expected = v2::query_request() - .target(["authors"]) - .query(v2::query().fields([ - v2::nested_object!("author_address" => "address", v2::query().fields([v2::column!("address_country" => "country": "String")])), - v2::nested_array!("author_articles", v2::nested_object_field!("articles", v2::query().fields([v2::column!("article_title" => "title": "String")]))), - v2::nested_array!("author_array_of_arrays", v2::nested_array_field!(v2::nested_object_field!("array_of_arrays", v2::query().fields([v2::column!("article_title" => "title": "String")])))) - ])) - .into(); - - assert_eq!(v2_request, expected); - Ok(()) - } -} diff --git a/crates/mongodb-connector/src/api_type_conversions/query_response.rs b/crates/mongodb-connector/src/api_type_conversions/query_response.rs deleted file mode 100644 index 1985f8c9..00000000 --- a/crates/mongodb-connector/src/api_type_conversions/query_response.rs +++ /dev/null @@ -1,13 +0,0 @@ -use std::collections::BTreeMap; - -use dc_api_types::{self as v2}; -use ndc_sdk::models::{self as v3}; - -pub fn v2_to_v3_explain_response(response: v2::ExplainResponse) -> v3::ExplainResponse { - v3::ExplainResponse { - details: BTreeMap::from_iter([ - ("plan".to_owned(), response.lines.join("\n")), - ("query".to_owned(), response.query), - ]), - } -} diff --git a/crates/mongodb-connector/src/api_type_conversions/query_traversal.rs b/crates/mongodb-connector/src/api_type_conversions/query_traversal.rs deleted file mode 100644 index c760d639..00000000 --- a/crates/mongodb-connector/src/api_type_conversions/query_traversal.rs +++ /dev/null @@ -1,280 +0,0 @@ -use std::collections::BTreeMap; - -use itertools::Either; -use ndc_sdk::models::{ - ComparisonTarget, ComparisonValue, ExistsInCollection, Expression, Field, OrderByElement, - OrderByTarget, PathElement, Query, QueryRequest, Relationship, -}; - -use super::{helpers::lookup_relationship, ConversionError}; - -#[derive(Copy, Clone, Debug)] -pub enum Node<'a> { - ComparisonTarget(&'a ComparisonTarget), - ComparisonValue(&'a ComparisonValue), - ExistsInCollection(&'a ExistsInCollection), - Expression(&'a Expression), - Field { name: &'a str, field: &'a Field }, - OrderByElement(&'a OrderByElement), - PathElement(&'a PathElement), -} - -#[derive(Clone, Debug)] -pub struct TraversalStep<'a, 'b> { - pub collection: &'a str, - pub node: Node<'b>, -} - -#[derive(Copy, Clone, Debug)] -struct Context<'a> { - collection: &'a str, - relationships: &'a BTreeMap, -} - -impl<'a> Context<'a> { - fn set_collection<'b>(self, new_collection: &'b str) -> Context<'b> - where - 'a: 'b, - { - Context { - collection: new_collection, - relationships: self.relationships, - } - } -} - -/// Walk a v3 query producing an iterator that visits selected AST nodes. This is used to build up -/// maps of relationships, so the goal is to hit every instance of these node types: -/// -/// - Field (referenced by Query, MutationOperation) -/// - ExistsInCollection (referenced by Expression which is referenced by Query, PathElement) -/// - PathElement (referenced by OrderByTarget<-OrderByElement<-OrderBy<-Query, ComparisonTarget<-Expression, ComparisonValue<-Expression) -/// -/// This implementation does not guarantee an order. -pub fn query_traversal( - query_request: &QueryRequest, -) -> impl Iterator> { - let QueryRequest { - collection, - collection_relationships, - query, - .. - } = query_request; - query_traversal_helper( - Context { - relationships: collection_relationships, - collection, - }, - query, - ) -} - -fn query_traversal_helper<'a>( - context: Context<'a>, - query: &'a Query, -) -> impl Iterator, ConversionError>> { - query_fields_traversal(context, query) - .chain(traverse_collection( - expression_traversal, - context, - &query.predicate, - )) - .chain(order_by_traversal(context, query)) -} - -/// Recursively walk each Field in a Query -fn query_fields_traversal<'a>( - context: Context<'a>, - query: &'a Query, -) -> impl Iterator, ConversionError>> { - query - .fields - .iter() - .flatten() - .flat_map(move |(name, field)| { - let field_step = std::iter::once(Ok(TraversalStep { - collection: context.collection, - node: Node::Field { name, field }, - })); - field_step.chain(field_relationship_traversal(context, field)) - }) -} - -/// If the given field is a Relationship, traverses the nested query -fn field_relationship_traversal<'a>( - context: Context<'a>, - field: &'a Field, -) -> Box, ConversionError>> + 'a> { - match field { - Field::Column { .. } => Box::new(std::iter::empty()), - Field::Relationship { - query, - relationship, - .. - } => match lookup_relationship(context.relationships, relationship) { - Ok(rel) => Box::new(query_traversal_helper( - context.set_collection(&rel.target_collection), - query, - )), - Err(e) => Box::new(std::iter::once(Err(e))), - }, - } -} - -/// Traverse OrderByElements, including their PathElements. -fn order_by_traversal<'a>( - context: Context<'a>, - query: &'a Query, -) -> impl Iterator, ConversionError>> { - let order_by_elements = query.order_by.as_ref().map(|o| &o.elements); - - order_by_elements - .into_iter() - .flatten() - .flat_map(move |order_by_element| { - let order_by_element_step = std::iter::once(Ok(TraversalStep { - collection: context.collection, - node: Node::OrderByElement(order_by_element), - })); - let path = match &order_by_element.target { - OrderByTarget::Column { path, .. } => path, - OrderByTarget::SingleColumnAggregate { path, .. } => path, - OrderByTarget::StarCountAggregate { path } => path, - }; - order_by_element_step.chain(path_elements_traversal(context, path)) - }) -} - -fn path_elements_traversal<'a>( - context: Context<'a>, - path: &'a [PathElement], -) -> impl Iterator, ConversionError>> { - path.iter() - .scan( - context.collection, - move |element_collection, path_element| -> Option>> { - match lookup_relationship(context.relationships, &path_element.relationship) { - Ok(rel) => { - let path_element_step = std::iter::once(Ok(TraversalStep { - collection: element_collection, - node: Node::PathElement(path_element), - })); - - let expression_steps = match &path_element.predicate { - Some(expression) => Either::Right(expression_traversal( - context.set_collection(element_collection), - expression, - )), - None => Either::Left(std::iter::empty()), - }; - - *element_collection = &rel.target_collection; - - Some(Box::new(path_element_step.chain(expression_steps))) - } - Err(e) => Some(Box::new(std::iter::once(Err(e)))), - } - }, - ) - .flatten() -} - -fn expression_traversal<'a>( - context: Context<'a>, - expression: &'a Expression, -) -> impl Iterator, ConversionError>> { - let expression_step = std::iter::once(Ok(TraversalStep { - collection: context.collection, - node: Node::Expression(expression), - })); - - let nested_expression_steps: Box> = match expression { - Expression::And { expressions } => Box::new(traverse_collection( - expression_traversal, - context, - expressions, - )), - Expression::Or { expressions } => Box::new(traverse_collection( - expression_traversal, - context, - expressions, - )), - Expression::Not { expression } => Box::new(expression_traversal(context, expression)), - Expression::UnaryComparisonOperator { column, .. } => { - Box::new(comparison_target_traversal(context, column)) - } - Expression::BinaryComparisonOperator { column, value, .. } => Box::new( - comparison_target_traversal(context, column) - .chain(comparison_value_traversal(context, value)), - ), - Expression::Exists { - in_collection, - predicate, - } => { - let in_collection_step = std::iter::once(Ok(TraversalStep { - collection: context.collection, - node: Node::ExistsInCollection(in_collection), - })); - match predicate { - Some(predicate) => { - Box::new(in_collection_step.chain(expression_traversal(context, predicate))) - } - None => Box::new(std::iter::empty()), - } - } - }; - - expression_step.chain(nested_expression_steps) -} - -fn comparison_target_traversal<'a>( - context: Context<'a>, - comparison_target: &'a ComparisonTarget, -) -> impl Iterator, ConversionError>> { - let this_step = std::iter::once(Ok(TraversalStep { - collection: context.collection, - node: Node::ComparisonTarget(comparison_target), - })); - - let nested_steps: Box> = match comparison_target { - ComparisonTarget::Column { path, .. } => Box::new(path_elements_traversal(context, path)), - ComparisonTarget::RootCollectionColumn { .. } => Box::new(std::iter::empty()), - }; - - this_step.chain(nested_steps) -} - -fn comparison_value_traversal<'a>( - context: Context<'a>, - comparison_value: &'a ComparisonValue, -) -> impl Iterator, ConversionError>> { - let this_step = std::iter::once(Ok(TraversalStep { - collection: context.collection, - node: Node::ComparisonValue(comparison_value), - })); - - let nested_steps: Box> = match comparison_value { - ComparisonValue::Column { column } => { - Box::new(comparison_target_traversal(context, column)) - } - ComparisonValue::Scalar { .. } => Box::new(std::iter::empty()), - ComparisonValue::Variable { .. } => Box::new(std::iter::empty()), - }; - - this_step.chain(nested_steps) -} - -fn traverse_collection<'a, Node, Nodes, I, F>( - traverse: F, - context: Context<'a>, - ast_nodes: &'a Nodes, -) -> impl Iterator, ConversionError>> -where - &'a Nodes: IntoIterator, - F: Fn(Context<'a>, Node) -> I, - I: Iterator, ConversionError>>, -{ - ast_nodes - .into_iter() - .flat_map(move |node| traverse(context, node)) -} diff --git a/crates/mongodb-connector/src/capabilities.rs b/crates/mongodb-connector/src/capabilities.rs index cdd9f4e6..3319e74e 100644 --- a/crates/mongodb-connector/src/capabilities.rs +++ b/crates/mongodb-connector/src/capabilities.rs @@ -1,14 +1,5 @@ -use std::collections::BTreeMap; - -use mongodb_agent_common::{ - comparison_function::ComparisonFunction, - scalar_types_capabilities::{aggregate_functions, comparison_operators}, -}; -use mongodb_support::BsonScalarType; use ndc_sdk::models::{ - AggregateFunctionDefinition, Capabilities, CapabilitiesResponse, ComparisonOperatorDefinition, - LeafCapability, QueryCapabilities, RelationshipCapabilities, ScalarType, Type, - TypeRepresentation, + Capabilities, CapabilitiesResponse, LeafCapability, QueryCapabilities, RelationshipCapabilities, }; pub fn mongo_capabilities_response() -> CapabilitiesResponse { @@ -31,93 +22,3 @@ pub fn mongo_capabilities_response() -> CapabilitiesResponse { }, } } - -pub fn scalar_types() -> BTreeMap { - enum_iterator::all::() - .map(make_scalar_type) - .chain([extended_json_scalar_type()]) - .collect::>() -} - -fn extended_json_scalar_type() -> (String, ScalarType) { - ( - mongodb_support::EXTENDED_JSON_TYPE_NAME.to_owned(), - ScalarType { - representation: Some(TypeRepresentation::JSON), - aggregate_functions: BTreeMap::new(), - comparison_operators: BTreeMap::new(), - }, - ) -} - -fn make_scalar_type(bson_scalar_type: BsonScalarType) -> (String, ScalarType) { - let scalar_type_name = bson_scalar_type.graphql_name(); - let scalar_type = ScalarType { - representation: bson_scalar_type_representation(bson_scalar_type), - aggregate_functions: bson_aggregation_functions(bson_scalar_type), - comparison_operators: bson_comparison_operators(bson_scalar_type), - }; - (scalar_type_name, scalar_type) -} - -fn bson_scalar_type_representation(bson_scalar_type: BsonScalarType) -> Option { - match bson_scalar_type { - BsonScalarType::Double => Some(TypeRepresentation::Float64), - BsonScalarType::Decimal => Some(TypeRepresentation::BigDecimal), // Not quite.... Mongo Decimal is 128-bit, BigDecimal is unlimited - BsonScalarType::Int => Some(TypeRepresentation::Int32), - BsonScalarType::Long => Some(TypeRepresentation::Int64), - BsonScalarType::String => Some(TypeRepresentation::String), - BsonScalarType::Date => Some(TypeRepresentation::Timestamp), // Mongo Date is milliseconds since unix epoch - BsonScalarType::Timestamp => None, // Internal Mongo timestamp type - BsonScalarType::BinData => None, - BsonScalarType::ObjectId => Some(TypeRepresentation::String), // Mongo ObjectId is usually expressed as a 24 char hex string (12 byte number) - BsonScalarType::Bool => Some(TypeRepresentation::Boolean), - BsonScalarType::Null => None, - BsonScalarType::Regex => None, - BsonScalarType::Javascript => None, - BsonScalarType::JavascriptWithScope => None, - BsonScalarType::MinKey => None, - BsonScalarType::MaxKey => None, - BsonScalarType::Undefined => None, - BsonScalarType::DbPointer => None, - BsonScalarType::Symbol => None, - } -} - -fn bson_aggregation_functions( - bson_scalar_type: BsonScalarType, -) -> BTreeMap { - aggregate_functions(bson_scalar_type) - .map(|(fn_name, result_type)| { - let aggregation_definition = AggregateFunctionDefinition { - result_type: bson_to_named_type(result_type), - }; - (fn_name.graphql_name().to_owned(), aggregation_definition) - }) - .collect() -} - -fn bson_comparison_operators( - bson_scalar_type: BsonScalarType, -) -> BTreeMap { - comparison_operators(bson_scalar_type) - .map(|(comparison_fn, arg_type)| { - let fn_name = comparison_fn.graphql_name().to_owned(); - match comparison_fn { - ComparisonFunction::Equal => (fn_name, ComparisonOperatorDefinition::Equal), - _ => ( - fn_name, - ComparisonOperatorDefinition::Custom { - argument_type: bson_to_named_type(arg_type), - }, - ), - } - }) - .collect() -} - -fn bson_to_named_type(bson_scalar_type: BsonScalarType) -> Type { - Type::Named { - name: bson_scalar_type.graphql_name(), - } -} diff --git a/crates/mongodb-connector/src/main.rs b/crates/mongodb-connector/src/main.rs index 261a1185..abcab866 100644 --- a/crates/mongodb-connector/src/main.rs +++ b/crates/mongodb-connector/src/main.rs @@ -1,15 +1,9 @@ -mod api_type_conversions; mod capabilities; mod error_mapping; mod mongo_connector; mod mutation; -mod query_context; -mod query_response; mod schema; -#[cfg(test)] -mod test_helpers; - use std::error::Error; use mongo_connector::MongoConnector; diff --git a/crates/mongodb-connector/src/mongo_connector.rs b/crates/mongodb-connector/src/mongo_connector.rs index 9b40389a..4c29c2cf 100644 --- a/crates/mongodb-connector/src/mongo_connector.rs +++ b/crates/mongodb-connector/src/mongo_connector.rs @@ -4,8 +4,8 @@ use anyhow::anyhow; use async_trait::async_trait; use configuration::Configuration; use mongodb_agent_common::{ - explain::explain_query, health::check_health, query::handle_query_request, - state::ConnectorState, + explain::explain_query, health::check_health, mongo_query_plan::MongoConfiguration, + query::handle_query_request, state::ConnectorState, }; use ndc_sdk::{ connector::{ @@ -18,14 +18,9 @@ use ndc_sdk::{ QueryResponse, SchemaResponse, }, }; -use tracing::{instrument, Instrument}; +use tracing::instrument; -use crate::{ - api_type_conversions::{v2_to_v3_explain_response, v3_to_v2_query_request}, - error_mapping::{mongo_agent_error_to_explain_error, mongo_agent_error_to_query_error}, - query_context::get_query_context, - query_response::serialize_query_response, -}; +use crate::error_mapping::{mongo_agent_error_to_explain_error, mongo_agent_error_to_query_error}; use crate::{capabilities::mongo_capabilities_response, mutation::handle_mutation_request}; #[derive(Clone, Default)] @@ -40,11 +35,11 @@ impl ConnectorSetup for MongoConnector { async fn parse_configuration( &self, configuration_dir: impl AsRef + Send, - ) -> Result { + ) -> Result { let configuration = Configuration::parse_configuration(configuration_dir) .await .map_err(|err| ParseError::Other(err.into()))?; - Ok(configuration) + Ok(MongoConfiguration(configuration)) } /// Reads database connection URI from environment variable @@ -54,7 +49,7 @@ impl ConnectorSetup for MongoConnector { // - `skip_all` omits arguments from the trace async fn try_init_state( &self, - _configuration: &Configuration, + _configuration: &MongoConfiguration, _metrics: &mut prometheus::Registry, ) -> Result { let state = mongodb_agent_common::state::try_init_state().await?; @@ -65,7 +60,7 @@ impl ConnectorSetup for MongoConnector { #[allow(clippy::blocks_in_conditions)] #[async_trait] impl Connector for MongoConnector { - type Configuration = Configuration; + type Configuration = MongoConfiguration; type State = ConnectorState; #[instrument(err, skip_all)] @@ -108,11 +103,10 @@ impl Connector for MongoConnector { state: &Self::State, request: QueryRequest, ) -> Result, ExplainError> { - let v2_request = v3_to_v2_query_request(&get_query_context(configuration), request)?; - let response = explain_query(configuration, state, v2_request) + let response = explain_query(configuration, state, request) .await .map_err(mongo_agent_error_to_explain_error)?; - Ok(v2_to_v3_explain_response(response).into()) + Ok(response.into()) } #[instrument(err, skip_all)] @@ -132,37 +126,18 @@ impl Connector for MongoConnector { state: &Self::State, request: MutationRequest, ) -> Result, MutationError> { - let query_context = get_query_context(configuration); - handle_mutation_request(configuration, query_context, state, request).await + handle_mutation_request(configuration, state, request).await } - #[instrument(err, skip_all)] + #[instrument(name = "/query", err, skip_all, fields(internal.visibility = "user"))] async fn query( configuration: &Self::Configuration, state: &Self::State, request: QueryRequest, ) -> Result, QueryError> { - let response = async move { - tracing::debug!(query_request = %serde_json::to_string(&request).unwrap(), "received query request"); - let query_context = get_query_context(configuration); - let v2_request = tracing::info_span!("Prepare Query Request").in_scope(|| { - v3_to_v2_query_request(&query_context, request.clone()) - })?; - let response_documents = handle_query_request(configuration, state, v2_request) - .instrument(tracing::info_span!("Process Query Request", internal.visibility = "user")) - .await - .map_err(mongo_agent_error_to_query_error)?; - tracing::info_span!("Serialize Query Response", internal.visibility = "user").in_scope(|| { - serialize_query_response(&query_context, &request, response_documents) - .map_err(|err| { - QueryError::UnprocessableContent(format!( - "error converting MongoDB response to JSON: {err}" - )) - }) - }) - } - .instrument(tracing::info_span!("/query", internal.visibility = "user")) - .await?; + let response = handle_query_request(configuration, state, request) + .await + .map_err(mongo_agent_error_to_query_error)?; Ok(response.into()) } } diff --git a/crates/mongodb-connector/src/mutation.rs b/crates/mongodb-connector/src/mutation.rs index e6ea2590..74a2bdbf 100644 --- a/crates/mongodb-connector/src/mutation.rs +++ b/crates/mongodb-connector/src/mutation.rs @@ -1,6 +1,3 @@ -use std::collections::BTreeMap; - -use configuration::Configuration; use futures::future::try_join_all; use itertools::Itertools; use mongodb::{ @@ -8,37 +5,35 @@ use mongodb::{ Database, }; use mongodb_agent_common::{ - mutation::Mutation, query::serialization::bson_to_json, state::ConnectorState, + mongo_query_plan::MongoConfiguration, + procedure::Procedure, + query::{response::type_for_nested_field, serialization::bson_to_json}, + state::ConnectorState, }; +use ndc_query_plan::type_annotated_nested_field; use ndc_sdk::{ connector::MutationError, json_response::JsonResponse, models::{ - Field, MutationOperation, MutationOperationResults, MutationRequest, MutationResponse, - NestedArray, NestedField, NestedObject, Relationship, + self as ndc, MutationOperation, MutationOperationResults, MutationRequest, + MutationResponse, NestedField, NestedObject, }, }; -use crate::{ - api_type_conversions::QueryContext, - query_response::{extend_configured_object_types, prune_type_to_field_selection}, -}; - pub async fn handle_mutation_request( - config: &Configuration, - query_context: QueryContext<'_>, + config: &MongoConfiguration, state: &ConnectorState, mutation_request: MutationRequest, ) -> Result, MutationError> { tracing::debug!(?config, mutation_request = %serde_json::to_string(&mutation_request).unwrap(), "executing mutation"); let database = state.database(); - let jobs = look_up_mutations(config, &mutation_request)?; - let operation_results = try_join_all(jobs.into_iter().map(|(mutation, requested_fields)| { - execute_mutation( - &query_context, + let jobs = look_up_procedures(config, &mutation_request)?; + let operation_results = try_join_all(jobs.into_iter().map(|(procedure, requested_fields)| { + execute_procedure( + config, + &mutation_request, database.clone(), - &mutation_request.collection_relationships, - mutation, + procedure, requested_fields, ) })) @@ -46,13 +41,13 @@ pub async fn handle_mutation_request( Ok(JsonResponse::Value(MutationResponse { operation_results })) } -/// Looks up mutations according to the names given in the mutation request, and pairs them with -/// arguments and requested fields. Returns an error if any mutations cannot be found. -fn look_up_mutations<'a, 'b>( - config: &'a Configuration, +/// Looks up procedures according to the names given in the mutation request, and pairs them with +/// arguments and requested fields. Returns an error if any procedures cannot be found. +fn look_up_procedures<'a, 'b>( + config: &'a MongoConfiguration, mutation_request: &'b MutationRequest, -) -> Result, Option<&'b NestedField>)>, MutationError> { - let (mutations, not_found): (Vec<_>, Vec) = mutation_request +) -> Result, Option<&'b NestedField>)>, MutationError> { + let (procedures, not_found): (Vec<_>, Vec) = mutation_request .operations .iter() .map(|operation| match operation { @@ -61,11 +56,11 @@ fn look_up_mutations<'a, 'b>( arguments, fields, } => { - let native_mutation = config.native_mutations.get(name); - let mutation = native_mutation.ok_or(name).map(|native_mutation| { - Mutation::from_native_mutation(native_mutation, arguments.clone()) + let native_mutation = config.native_mutations().get(name); + let procedure = native_mutation.ok_or(name).map(|native_mutation| { + Procedure::from_native_mutation(native_mutation, arguments.clone()) })?; - Ok((mutation, fields.as_ref())) + Ok((procedure, fields.as_ref())) } }) .partition_result(); @@ -77,34 +72,38 @@ fn look_up_mutations<'a, 'b>( ))); } - Ok(mutations) + Ok(procedures) } -async fn execute_mutation( - query_context: &QueryContext<'_>, +async fn execute_procedure( + config: &MongoConfiguration, + mutation_request: &MutationRequest, database: Database, - relationships: &BTreeMap, - mutation: Mutation<'_>, + procedure: Procedure<'_>, requested_fields: Option<&NestedField>, ) -> Result { - let (result, result_type) = mutation - .execute(&query_context.object_types, database.clone()) + let (result, result_type) = procedure + .execute(database.clone()) .await .map_err(|err| MutationError::UnprocessableContent(err.to_string()))?; let rewritten_result = rewrite_response(requested_fields, result.into())?; - let (requested_result_type, temp_object_types) = prune_type_to_field_selection( - query_context, - relationships, - &[], - &result_type, - requested_fields, - ) - .map_err(|err| MutationError::Other(Box::new(err)))?; - let object_types = extend_configured_object_types(query_context, temp_object_types); + let requested_result_type = if let Some(fields) = requested_fields { + let plan_field = type_annotated_nested_field( + config, + &mutation_request.collection_relationships, + &result_type, + fields.clone(), + ) + .map_err(|err| MutationError::UnprocessableContent(err.to_string()))?; + type_for_nested_field(&[], &result_type, &plan_field) + .map_err(|err| MutationError::UnprocessableContent(err.to_string()))? + } else { + result_type + }; - let json_result = bson_to_json(&requested_result_type, &object_types, rewritten_result) + let json_result = bson_to_json(&requested_result_type, rewritten_result) .map_err(|err| MutationError::UnprocessableContent(err.to_string()))?; Ok(MutationOperationResults::Procedure { @@ -146,7 +145,7 @@ fn rewrite_doc( .iter() .map(|(name, field)| { let field_value = match field { - Field::Column { column, fields } => { + ndc::Field::Column { column, fields } => { let orig_value = doc.remove(column).ok_or_else(|| { MutationError::UnprocessableContent(format!( "missing expected field from response: {name}" @@ -154,7 +153,7 @@ fn rewrite_doc( })?; rewrite_response(fields.as_ref(), orig_value) } - Field::Relationship { .. } => Err(MutationError::UnsupportedOperation( + ndc::Field::Relationship { .. } => Err(MutationError::UnsupportedOperation( "The MongoDB connector does not support relationship references in mutations" .to_owned(), )), @@ -165,7 +164,7 @@ fn rewrite_doc( .try_collect() } -fn rewrite_array(fields: &NestedArray, values: Vec) -> Result, MutationError> { +fn rewrite_array(fields: &ndc::NestedArray, values: Vec) -> Result, MutationError> { let nested = &fields.fields; values .into_iter() diff --git a/crates/mongodb-connector/src/query_context.rs b/crates/mongodb-connector/src/query_context.rs deleted file mode 100644 index 9ab3ac08..00000000 --- a/crates/mongodb-connector/src/query_context.rs +++ /dev/null @@ -1,14 +0,0 @@ -use std::borrow::Cow; - -use crate::{api_type_conversions::QueryContext, schema::SCALAR_TYPES}; -use configuration::Configuration; - -/// Produce a query context from the connector configuration to direct query request processing -pub fn get_query_context(configuration: &Configuration) -> QueryContext<'_> { - QueryContext { - collections: Cow::Borrowed(&configuration.collections), - functions: Cow::Borrowed(&configuration.functions), - object_types: Cow::Borrowed(&configuration.object_types), - scalar_types: Cow::Borrowed(&SCALAR_TYPES), - } -} diff --git a/crates/mongodb-connector/src/query_response.rs b/crates/mongodb-connector/src/query_response.rs deleted file mode 100644 index 6ece4aa7..00000000 --- a/crates/mongodb-connector/src/query_response.rs +++ /dev/null @@ -1,957 +0,0 @@ -use std::{borrow::Cow, collections::BTreeMap}; - -use configuration::schema::{ObjectField, ObjectType, Type}; -use indexmap::IndexMap; -use itertools::Itertools; -use mongodb::bson::{self, Bson}; -use mongodb_agent_common::query::serialization::{bson_to_json, BsonToJsonError}; -use ndc_sdk::models::{ - self as ndc, Aggregate, Field, NestedField, NestedObject, Query, QueryRequest, QueryResponse, - Relationship, RowFieldValue, RowSet, -}; -use serde::Deserialize; -use thiserror::Error; - -use crate::api_type_conversions::{ConversionError, QueryContext}; - -const GEN_OBJECT_TYPE_PREFIX: &str = "__query__"; - -#[derive(Debug, Error)] -pub enum QueryResponseError { - #[error("expected aggregates to be an object at path {}", path.join("."))] - AggregatesNotObject { path: Vec }, - - #[error("{0}")] - BsonDeserialization(#[from] bson::de::Error), - - #[error("{0}")] - BsonToJson(#[from] BsonToJsonError), - - #[error("{0}")] - Conversion(#[from] ConversionError), - - #[error("expected an array at path {}", path.join("."))] - ExpectedArray { path: Vec }, - - #[error("expected an object at path {}", path.join("."))] - ExpectedObject { path: Vec }, - - #[error("expected a single response document from MongoDB, but did not get one")] - ExpectedSingleDocument, -} - -type ObjectTypes = Vec<(String, ObjectType)>; -type Result = std::result::Result; - -// These structs describe possible shapes of data returned by MongoDB query plans - -#[derive(Debug, Deserialize)] -struct ResponsesForVariableSets { - row_sets: Vec>, -} - -#[derive(Debug, Deserialize)] -struct BsonRowSet { - #[serde(default)] - aggregates: Bson, - #[serde(default)] - rows: Vec, -} - -pub fn serialize_query_response( - query_context: &QueryContext<'_>, - query_request: &QueryRequest, - response_documents: Vec, -) -> Result { - tracing::debug!(response_documents = %serde_json::to_string(&response_documents).unwrap(), "response from MongoDB"); - - let collection_info = query_context.find_collection(&query_request.collection)?; - let collection_name = &collection_info.name; - - // If the query request specified variable sets then we should have gotten a single document - // from MongoDB with fields for multiple sets of results - one for each set of variables. - let row_sets = if query_request.variables.is_some() { - let responses: ResponsesForVariableSets = parse_single_document(response_documents)?; - responses - .row_sets - .into_iter() - .map(|docs| { - serialize_row_set( - query_context, - &query_request.collection_relationships, - &[collection_name], - collection_name, - &query_request.query, - docs, - ) - }) - .try_collect() - } else { - Ok(vec![serialize_row_set( - query_context, - &query_request.collection_relationships, - &[], - collection_name, - &query_request.query, - response_documents, - )?]) - }?; - let response = QueryResponse(row_sets); - tracing::debug!(query_response = %serde_json::to_string(&response).unwrap()); - Ok(response) -} - -fn serialize_row_set( - query_context: &QueryContext<'_>, - relationships: &BTreeMap, - path: &[&str], - collection_name: &str, - query: &Query, - docs: Vec, -) -> Result { - if !has_aggregates(query) { - // When there are no aggregates we expect a list of rows - let rows = query - .fields - .as_ref() - .map(|fields| { - serialize_rows( - query_context, - relationships, - path, - collection_name, - fields, - docs, - ) - }) - .transpose()?; - - Ok(RowSet { - aggregates: None, - rows, - }) - } else { - // When there are aggregates we expect a single document with `rows` and `aggregates` - // fields - let row_set: BsonRowSet = parse_single_document(docs)?; - - let aggregates = query - .aggregates - .as_ref() - .map(|aggregates| { - serialize_aggregates(query_context, path, aggregates, row_set.aggregates) - }) - .transpose()?; - - let rows = query - .fields - .as_ref() - .map(|fields| { - serialize_rows( - query_context, - relationships, - path, - collection_name, - fields, - row_set.rows, - ) - }) - .transpose()?; - - Ok(RowSet { aggregates, rows }) - } -} - -fn serialize_aggregates( - query_context: &QueryContext<'_>, - path: &[&str], - _query_aggregates: &IndexMap, - value: Bson, -) -> Result> { - let (aggregates_type, temp_object_types) = type_for_aggregates()?; - - let object_types = extend_configured_object_types(query_context, temp_object_types); - - let json = bson_to_json(&aggregates_type, &object_types, value)?; - - // The NDC type uses an IndexMap for aggregate values; we need to convert the map - // underlying the Value::Object value to an IndexMap - let aggregate_values = match json { - serde_json::Value::Object(obj) => obj.into_iter().collect(), - _ => Err(QueryResponseError::AggregatesNotObject { - path: path_to_owned(path), - })?, - }; - Ok(aggregate_values) -} - -fn serialize_rows( - query_context: &QueryContext<'_>, - relationships: &BTreeMap, - path: &[&str], - collection_name: &str, - query_fields: &IndexMap, - docs: Vec, -) -> Result>> { - let (row_type, temp_object_types) = type_for_row( - query_context, - relationships, - path, - collection_name, - query_fields, - )?; - - let object_types = extend_configured_object_types(query_context, temp_object_types); - - docs.into_iter() - .map(|doc| { - let json = bson_to_json(&row_type, &object_types, doc.into())?; - // The NDC types use an IndexMap for each row value; we need to convert the map - // underlying the Value::Object value to an IndexMap - let index_map = match json { - serde_json::Value::Object(obj) => obj - .into_iter() - .map(|(key, value)| (key, RowFieldValue(value))) - .collect(), - _ => unreachable!(), - }; - Ok(index_map) - }) - .try_collect() -} - -fn type_for_row_set( - query_context: &QueryContext<'_>, - relationships: &BTreeMap, - path: &[&str], - collection_name: &str, - query: &Query, -) -> Result<(Type, ObjectTypes)> { - let mut fields = BTreeMap::new(); - let mut object_types = vec![]; - - if has_aggregates(query) { - let (aggregates_type, nested_object_types) = type_for_aggregates()?; - fields.insert( - "aggregates".to_owned(), - ObjectField { - r#type: aggregates_type, - description: Default::default(), - }, - ); - object_types.extend(nested_object_types); - } - - if let Some(query_fields) = &query.fields { - let (row_type, nested_object_types) = type_for_row( - query_context, - relationships, - path, - collection_name, - query_fields, - )?; - fields.insert( - "rows".to_owned(), - ObjectField { - r#type: Type::ArrayOf(Box::new(row_type)), - description: Default::default(), - }, - ); - object_types.extend(nested_object_types); - } - - let (row_set_type_name, row_set_type) = named_type(path, "row_set"); - let object_type = ObjectType { - description: Default::default(), - fields, - }; - object_types.push((row_set_type_name, object_type)); - - Ok((row_set_type, object_types)) -} - -// TODO: infer response type for aggregates MDB-130 -fn type_for_aggregates() -> Result<(Type, ObjectTypes)> { - Ok((Type::ExtendedJSON, Default::default())) -} - -fn type_for_row( - query_context: &QueryContext<'_>, - relationships: &BTreeMap, - path: &[&str], - collection_name: &str, - query_fields: &IndexMap, -) -> Result<(Type, ObjectTypes)> { - let mut object_types = vec![]; - - let fields = query_fields - .iter() - .map(|(field_name, field_definition)| { - let (field_type, nested_object_types) = type_for_field( - query_context, - relationships, - &append_to_path(path, [field_name.as_ref()]), - collection_name, - field_definition, - )?; - object_types.extend(nested_object_types); - Ok(( - field_name.clone(), - ObjectField { - description: Default::default(), - r#type: field_type, - }, - )) - }) - .try_collect::<_, _, QueryResponseError>()?; - - let (row_type_name, row_type) = named_type(path, "row"); - let object_type = ObjectType { - description: Default::default(), - fields, - }; - object_types.push((row_type_name, object_type)); - - Ok((row_type, object_types)) -} - -fn type_for_field( - query_context: &QueryContext<'_>, - relationships: &BTreeMap, - path: &[&str], - collection_name: &str, - field_definition: &ndc::Field, -) -> Result<(Type, ObjectTypes)> { - match field_definition { - ndc::Field::Column { column, fields } => { - let field_type = find_field_type(query_context, path, collection_name, column)?; - - let (requested_type, temp_object_types) = prune_type_to_field_selection( - query_context, - relationships, - path, - field_type, - fields.as_ref(), - )?; - - Ok((requested_type, temp_object_types)) - } - - ndc::Field::Relationship { - query, - relationship, - .. - } => { - let (requested_type, temp_object_types) = - type_for_relation_field(query_context, relationships, path, query, relationship)?; - - Ok((requested_type, temp_object_types)) - } - } -} - -fn find_field_type<'a>( - query_context: &'a QueryContext<'a>, - path: &[&str], - collection_name: &str, - column: &str, -) -> Result<&'a Type> { - let object_type = query_context.find_collection_object_type(collection_name)?; - let field_type = object_type.value.fields.get(column).ok_or_else(|| { - ConversionError::UnknownObjectTypeField { - object_type: object_type.name.to_string(), - field_name: column.to_string(), - path: path_to_owned(path), - } - })?; - Ok(&field_type.r#type) -} - -/// Computes a new hierarchy of object types (if necessary) that select a subset of fields from -/// existing object types to match the fields requested by the query. Recurses into nested objects, -/// arrays, and nullable type references. -/// -/// Scalar types are returned without modification. -/// -/// Returns a reference to the pruned type, and a list of newly-computed object types with -/// generated names. -pub fn prune_type_to_field_selection( - query_context: &QueryContext<'_>, - relationships: &BTreeMap, - path: &[&str], - input_type: &Type, - fields: Option<&NestedField>, -) -> Result<(Type, Vec<(String, ObjectType)>)> { - match (input_type, fields) { - (t, None) => Ok((t.clone(), Default::default())), - (t @ Type::Scalar(_) | t @ Type::ExtendedJSON, _) => Ok((t.clone(), Default::default())), - - (Type::Nullable(t), _) => { - let (underlying_type, object_types) = - prune_type_to_field_selection(query_context, relationships, path, t, fields)?; - Ok((Type::Nullable(Box::new(underlying_type)), object_types)) - } - (Type::ArrayOf(t), Some(NestedField::Array(nested))) => { - let (element_type, object_types) = prune_type_to_field_selection( - query_context, - relationships, - path, - t, - Some(&nested.fields), - )?; - Ok((Type::ArrayOf(Box::new(element_type)), object_types)) - } - (Type::Object(t), Some(NestedField::Object(nested))) => { - object_type_for_field_subset(query_context, relationships, path, t, nested) - } - - (_, Some(NestedField::Array(_))) => Err(QueryResponseError::ExpectedArray { - path: path_to_owned(path), - }), - (_, Some(NestedField::Object(_))) => Err(QueryResponseError::ExpectedObject { - path: path_to_owned(path), - }), - } -} - -/// We have a configured object type for a collection, or for a nested object in a collection. But -/// the query may request a subset of fields from that object type. We need to compute a new object -/// type for that requested subset. -/// -/// Returns a reference to the newly-generated object type, and a list of all new object types with -/// generated names including the newly-generated object type, and types for any nested objects. -fn object_type_for_field_subset( - query_context: &QueryContext<'_>, - relationships: &BTreeMap, - path: &[&str], - object_type_name: &str, - requested_fields: &NestedObject, -) -> Result<(Type, Vec<(String, ObjectType)>)> { - let object_type = query_context.find_object_type(object_type_name)?.value; - let (fields, object_type_sets): (_, Vec>) = requested_fields - .fields - .iter() - .map(|(name, requested_field)| { - let (object_field, object_types) = requested_field_definition( - query_context, - relationships, - &append_to_path(path, [name.as_ref()]), - object_type_name, - object_type, - requested_field, - )?; - Ok(((name.clone(), object_field), object_types)) - }) - .process_results::<_, _, QueryResponseError, _>(|iter| iter.unzip())?; - - let pruned_object_type = ObjectType { - fields, - description: None, - }; - let (pruned_object_type_name, pruned_type) = named_type(path, "fields"); - - let mut object_types: Vec<(String, ObjectType)> = - object_type_sets.into_iter().flatten().collect(); - object_types.push((pruned_object_type_name, pruned_object_type)); - - Ok((pruned_type, object_types)) -} - -/// Given an object type for a value, and a requested field from that value, produce an updated -/// object field definition to match the request. This must take into account aliasing where the -/// name of the requested field maps to a different name on the underlying type. -fn requested_field_definition( - query_context: &QueryContext<'_>, - relationships: &BTreeMap, - path: &[&str], - object_type_name: &str, - object_type: &ObjectType, - requested_field: &Field, -) -> Result<(ObjectField, Vec<(String, ObjectType)>)> { - match requested_field { - Field::Column { column, fields } => { - let field_def = object_type.fields.get(column).ok_or_else(|| { - ConversionError::UnknownObjectTypeField { - object_type: object_type_name.to_owned(), - field_name: column.to_owned(), - path: path_to_owned(path), - } - })?; - let (field_type, object_types) = prune_type_to_field_selection( - query_context, - relationships, - path, - &field_def.r#type, - fields.as_ref(), - )?; - let pruned_field = ObjectField { - r#type: field_type, - description: None, - }; - Ok((pruned_field, object_types)) - } - Field::Relationship { - query, - relationship, - .. - } => { - let (relation_type, temp_object_types) = - type_for_relation_field(query_context, relationships, path, query, relationship)?; - let relation_field = ObjectField { - r#type: relation_type, - description: None, - }; - Ok((relation_field, temp_object_types)) - } - } -} - -fn type_for_relation_field( - query_context: &QueryContext<'_>, - relationships: &BTreeMap, - path: &[&str], - query: &Query, - relationship: &str, -) -> Result<(Type, Vec<(String, ObjectType)>)> { - let relationship_def = - relationships - .get(relationship) - .ok_or_else(|| ConversionError::UnknownRelationship { - relationship_name: relationship.to_owned(), - path: path_to_owned(path), - })?; - type_for_row_set( - query_context, - relationships, - path, - &relationship_def.target_collection, - query, - ) -} - -pub fn extend_configured_object_types<'a>( - query_context: &QueryContext<'a>, - object_types: ObjectTypes, -) -> Cow<'a, BTreeMap> { - if object_types.is_empty() { - // We're cloning a Cow, not a BTreeMap here. In production that will be a [Cow::Borrowed] - // variant so effectively that means we're cloning a wide pointer - query_context.object_types.clone() - } else { - // This time we're cloning the BTreeMap - let mut extended_object_types = query_context.object_types.clone().into_owned(); - extended_object_types.extend(object_types); - Cow::Owned(extended_object_types) - } -} - -fn parse_single_document(documents: Vec) -> Result -where - T: for<'de> serde::Deserialize<'de>, -{ - let document = documents - .into_iter() - .next() - .ok_or(QueryResponseError::ExpectedSingleDocument)?; - let value = bson::from_document(document)?; - Ok(value) -} - -fn has_aggregates(query: &Query) -> bool { - match &query.aggregates { - Some(aggregates) => !aggregates.is_empty(), - None => false, - } -} - -fn append_to_path<'a>(path: &[&'a str], elems: impl IntoIterator) -> Vec<&'a str> { - path.iter().copied().chain(elems).collect() -} - -fn path_to_owned(path: &[&str]) -> Vec { - path.iter().map(|x| (*x).to_owned()).collect() -} - -fn named_type(path: &[&str], name_suffix: &str) -> (String, Type) { - let name = format!( - "{GEN_OBJECT_TYPE_PREFIX}{}_{name_suffix}", - path.iter().join("_") - ); - let t = Type::Object(name.clone()); - (name, t) -} - -#[cfg(test)] -mod tests { - use std::{borrow::Cow, collections::BTreeMap, str::FromStr}; - - use configuration::schema::{ObjectType, Type}; - use mongodb::bson::{self, Bson}; - use mongodb_support::BsonScalarType; - use ndc_sdk::models::{QueryRequest, QueryResponse, RowFieldValue, RowSet}; - use ndc_test_helpers::{ - array, collection, field, object, query, query_request, relation_field, relationship, - }; - use pretty_assertions::assert_eq; - use serde_json::json; - - use crate::{ - api_type_conversions::QueryContext, - test_helpers::{make_nested_schema, make_scalar_types, object_type}, - }; - - use super::{serialize_query_response, type_for_row_set}; - - #[test] - fn serializes_response_with_nested_fields() -> anyhow::Result<()> { - let query_context = make_nested_schema(); - let request = query_request() - .collection("authors") - .query(query().fields([field!("address" => "address", object!([ - field!("street"), - field!("geocode" => "geocode", object!([ - field!("longitude"), - ])), - ]))])) - .into(); - - let response_documents = vec![bson::doc! { - "address": { - "street": "137 Maple Dr", - "geocode": { - "longitude": 122.4194, - }, - }, - }]; - - let response = serialize_query_response(&query_context, &request, response_documents)?; - assert_eq!( - response, - QueryResponse(vec![RowSet { - aggregates: Default::default(), - rows: Some(vec![[( - "address".into(), - RowFieldValue(json!({ - "street": "137 Maple Dr", - "geocode": { - "longitude": 122.4194, - }, - })) - )] - .into()]), - }]) - ); - Ok(()) - } - - #[test] - fn serializes_response_with_nested_object_inside_array() -> anyhow::Result<()> { - let query_context = make_nested_schema(); - let request = query_request() - .collection("authors") - .query(query().fields([field!("articles" => "articles", array!( - object!([ - field!("title"), - ]) - ))])) - .into(); - - let response_documents = vec![bson::doc! { - "articles": [ - { "title": "Modeling MongoDB with relational model" }, - { "title": "NoSQL databases: MongoDB vs cassandra" }, - ], - }]; - - let response = serialize_query_response(&query_context, &request, response_documents)?; - assert_eq!( - response, - QueryResponse(vec![RowSet { - aggregates: Default::default(), - rows: Some(vec![[( - "articles".into(), - RowFieldValue(json!([ - { "title": "Modeling MongoDB with relational model" }, - { "title": "NoSQL databases: MongoDB vs cassandra" }, - ])) - )] - .into()]), - }]) - ); - Ok(()) - } - - #[test] - fn serializes_response_with_aliased_fields() -> anyhow::Result<()> { - let query_context = make_nested_schema(); - let request = query_request() - .collection("authors") - .query(query().fields([ - field!("address1" => "address", object!([ - field!("line1" => "street"), - ])), - field!("address2" => "address", object!([ - field!("latlong" => "geocode", object!([ - field!("long" => "longitude"), - ])), - ])), - ])) - .into(); - - let response_documents = vec![bson::doc! { - "address1": { - "line1": "137 Maple Dr", - }, - "address2": { - "latlong": { - "long": 122.4194, - }, - }, - }]; - - let response = serialize_query_response(&query_context, &request, response_documents)?; - assert_eq!( - response, - QueryResponse(vec![RowSet { - aggregates: Default::default(), - rows: Some(vec![[ - ( - "address1".into(), - RowFieldValue(json!({ - "line1": "137 Maple Dr", - })) - ), - ( - "address2".into(), - RowFieldValue(json!({ - "latlong": { - "long": 122.4194, - }, - })) - ) - ] - .into()]), - }]) - ); - Ok(()) - } - - #[test] - fn serializes_response_with_decimal_128_fields() -> anyhow::Result<()> { - let query_context = QueryContext { - collections: Cow::Owned([collection("business")].into()), - functions: Default::default(), - object_types: Cow::Owned( - [( - "business".to_owned(), - object_type([ - ("price", Type::Scalar(BsonScalarType::Decimal)), - ("price_extjson", Type::ExtendedJSON), - ]), - )] - .into(), - ), - scalar_types: Cow::Owned(make_scalar_types()), - }; - - let request = query_request() - .collection("business") - .query(query().fields([field!("price"), field!("price_extjson")])) - .into(); - - let response_documents = vec![bson::doc! { - "price": Bson::Decimal128(bson::Decimal128::from_str("127.6486654").unwrap()), - "price_extjson": Bson::Decimal128(bson::Decimal128::from_str("-4.9999999999").unwrap()), - }]; - - let response = serialize_query_response(&query_context, &request, response_documents)?; - assert_eq!( - response, - QueryResponse(vec![RowSet { - aggregates: Default::default(), - rows: Some(vec![[ - ("price".into(), RowFieldValue(json!("127.6486654"))), - ( - "price_extjson".into(), - RowFieldValue(json!({ - "$numberDecimal": "-4.9999999999" - })) - ), - ] - .into()]), - }]) - ); - Ok(()) - } - - #[test] - fn serializes_response_with_nested_extjson() -> anyhow::Result<()> { - let query_context = QueryContext { - collections: Cow::Owned([collection("data")].into()), - functions: Default::default(), - object_types: Cow::Owned( - [( - "data".to_owned(), - object_type([("value", Type::ExtendedJSON)]), - )] - .into(), - ), - scalar_types: Cow::Owned(make_scalar_types()), - }; - - let request = query_request() - .collection("data") - .query(query().fields([field!("value")])) - .into(); - - let response_documents = vec![bson::doc! { - "value": { - "array": [ - { "number": Bson::Int32(3) }, - { "number": Bson::Decimal128(bson::Decimal128::from_str("127.6486654").unwrap()) }, - ], - "string": "hello", - "object": { - "foo": 1, - "bar": 2, - }, - }, - }]; - - let response = serialize_query_response(&query_context, &request, response_documents)?; - assert_eq!( - response, - QueryResponse(vec![RowSet { - aggregates: Default::default(), - rows: Some(vec![[( - "value".into(), - RowFieldValue(json!({ - "array": [ - { "number": { "$numberInt": "3" } }, - { "number": { "$numberDecimal": "127.6486654" } }, - ], - "string": "hello", - "object": { - "foo": { "$numberInt": "1" }, - "bar": { "$numberInt": "2" }, - }, - })) - )] - .into()]), - }]) - ); - Ok(()) - } - - #[test] - fn uses_field_path_to_guarantee_distinct_type_names() -> anyhow::Result<()> { - let query_context = make_nested_schema(); - let collection_name = "appearances"; - let request: QueryRequest = query_request() - .collection(collection_name) - .relationships([("author", relationship("authors", [("authorId", "id")]))]) - .query( - query().fields([relation_field!("author" => "presenter", query().fields([ - field!("addr" => "address", object!([ - field!("street"), - field!("geocode" => "geocode", object!([ - field!("latitude"), - field!("long" => "longitude"), - ])) - ])), - field!("articles" => "articles", array!(object!([ - field!("article_title" => "title") - ]))), - ]))]), - ) - .into(); - let path = [collection_name]; - - let (row_set_type, object_types) = type_for_row_set( - &query_context, - &request.collection_relationships, - &path, - collection_name, - &request.query, - )?; - - // Convert object types into a map so we can compare without worrying about order - let object_types: BTreeMap = object_types.into_iter().collect(); - - assert_eq!( - (row_set_type, object_types), - ( - Type::Object("__query__appearances_row_set".to_owned()), - [ - ( - "__query__appearances_row_set".to_owned(), - object_type([( - "rows".to_owned(), - Type::ArrayOf(Box::new(Type::Object( - "__query__appearances_row".to_owned() - ))) - )]), - ), - ( - "__query__appearances_row".to_owned(), - object_type([( - "presenter".to_owned(), - Type::Object("__query__appearances_presenter_row_set".to_owned()) - )]), - ), - ( - "__query__appearances_presenter_row_set".to_owned(), - object_type([( - "rows", - Type::ArrayOf(Box::new(Type::Object( - "__query__appearances_presenter_row".to_owned() - ))) - )]), - ), - ( - "__query__appearances_presenter_row".to_owned(), - object_type([ - ( - "addr", - Type::Object( - "__query__appearances_presenter_addr_fields".to_owned() - ) - ), - ( - "articles", - Type::ArrayOf(Box::new(Type::Object( - "__query__appearances_presenter_articles_fields".to_owned() - ))) - ), - ]), - ), - ( - "__query__appearances_presenter_addr_fields".to_owned(), - object_type([ - ( - "geocode", - Type::Nullable(Box::new(Type::Object( - "__query__appearances_presenter_addr_geocode_fields".to_owned() - ))) - ), - ("street", Type::Scalar(BsonScalarType::String)), - ]), - ), - ( - "__query__appearances_presenter_addr_geocode_fields".to_owned(), - object_type([ - ("latitude", Type::Scalar(BsonScalarType::Double)), - ("long", Type::Scalar(BsonScalarType::Double)), - ]), - ), - ( - "__query__appearances_presenter_articles_fields".to_owned(), - object_type([("article_title", Type::Scalar(BsonScalarType::String))]), - ), - ] - .into() - ) - ); - Ok(()) - } -} diff --git a/crates/mongodb-connector/src/schema.rs b/crates/mongodb-connector/src/schema.rs index 727fd807..d24c8d5e 100644 --- a/crates/mongodb-connector/src/schema.rs +++ b/crates/mongodb-connector/src/schema.rs @@ -1,24 +1,23 @@ -use lazy_static::lazy_static; -use std::collections::BTreeMap; - -use configuration::Configuration; +use mongodb_agent_common::{ + mongo_query_plan::MongoConfiguration, scalar_types_capabilities::SCALAR_TYPES, +}; +use ndc_query_plan::QueryContext as _; use ndc_sdk::{connector::SchemaError, models as ndc}; -use crate::capabilities; - -lazy_static! { - pub static ref SCALAR_TYPES: BTreeMap = capabilities::scalar_types(); -} - -pub async fn get_schema(config: &Configuration) -> Result { +pub async fn get_schema(config: &MongoConfiguration) -> Result { Ok(ndc::SchemaResponse { - collections: config.collections.values().cloned().collect(), - functions: config.functions.values().map(|(f, _)| f).cloned().collect(), - procedures: config.mutations.values().cloned().collect(), + collections: config.collections().values().cloned().collect(), + functions: config + .functions() + .values() + .map(|(f, _)| f) + .cloned() + .collect(), + procedures: config.procedures().values().cloned().collect(), object_types: config - .object_types + .object_types() .iter() - .map(|(name, object_type)| (name.clone(), object_type.clone().into())) + .map(|(name, object_type)| (name.clone(), object_type.clone())) .collect(), scalar_types: SCALAR_TYPES.clone(), }) diff --git a/crates/mongodb-connector/src/test_helpers.rs b/crates/mongodb-connector/src/test_helpers.rs deleted file mode 100644 index 4c9a9918..00000000 --- a/crates/mongodb-connector/src/test_helpers.rs +++ /dev/null @@ -1,293 +0,0 @@ -use std::{borrow::Cow, collections::BTreeMap}; - -use configuration::schema; -use mongodb_support::BsonScalarType; -use ndc_sdk::models::{ - AggregateFunctionDefinition, CollectionInfo, ComparisonOperatorDefinition, ScalarType, Type, - TypeRepresentation, -}; -use ndc_test_helpers::{collection, make_primary_key_uniqueness_constraint}; - -use crate::api_type_conversions::QueryContext; - -pub fn object_type( - fields: impl IntoIterator)>, -) -> schema::ObjectType { - schema::ObjectType { - description: Default::default(), - fields: fields - .into_iter() - .map(|(name, field_type)| { - ( - name.to_string(), - schema::ObjectField { - description: Default::default(), - r#type: field_type.into(), - }, - ) - }) - .collect(), - } -} - -pub fn make_scalar_types() -> BTreeMap { - BTreeMap::from([ - ( - "String".to_owned(), - ScalarType { - representation: Some(TypeRepresentation::String), - aggregate_functions: Default::default(), - comparison_operators: BTreeMap::from([ - ("_eq".to_owned(), ComparisonOperatorDefinition::Equal), - ( - "_regex".to_owned(), - ComparisonOperatorDefinition::Custom { - argument_type: Type::Named { - name: "String".to_owned(), - }, - }, - ), - ]), - }, - ), - ( - "Int".to_owned(), - ScalarType { - representation: Some(TypeRepresentation::Int32), - aggregate_functions: BTreeMap::from([( - "avg".into(), - AggregateFunctionDefinition { - result_type: Type::Named { - name: "Float".into(), // Different result type to the input scalar type - }, - }, - )]), - comparison_operators: BTreeMap::from([( - "_eq".to_owned(), - ComparisonOperatorDefinition::Equal, - )]), - }, - ), - ]) -} - -pub fn make_flat_schema() -> QueryContext<'static> { - QueryContext { - collections: Cow::Owned(BTreeMap::from([ - ( - "authors".into(), - CollectionInfo { - name: "authors".to_owned(), - description: None, - collection_type: "Author".into(), - arguments: Default::default(), - uniqueness_constraints: make_primary_key_uniqueness_constraint("authors"), - foreign_keys: Default::default(), - }, - ), - ( - "articles".into(), - CollectionInfo { - name: "articles".to_owned(), - description: None, - collection_type: "Article".into(), - arguments: Default::default(), - uniqueness_constraints: make_primary_key_uniqueness_constraint("articles"), - foreign_keys: Default::default(), - }, - ), - ])), - functions: Default::default(), - object_types: Cow::Owned(BTreeMap::from([ - ( - "Author".into(), - schema::ObjectType { - description: None, - fields: BTreeMap::from([ - ( - "id".into(), - schema::ObjectField { - description: None, - r#type: schema::Type::Scalar(BsonScalarType::Int), - }, - ), - ( - "last_name".into(), - schema::ObjectField { - description: None, - r#type: schema::Type::Scalar(BsonScalarType::String), - }, - ), - ]), - }, - ), - ( - "Article".into(), - schema::ObjectType { - description: None, - fields: BTreeMap::from([ - ( - "author_id".into(), - schema::ObjectField { - description: None, - r#type: schema::Type::Scalar(BsonScalarType::Int), - }, - ), - ( - "title".into(), - schema::ObjectField { - description: None, - r#type: schema::Type::Scalar(BsonScalarType::String), - }, - ), - ( - "year".into(), - schema::ObjectField { - description: None, - r#type: schema::Type::Nullable(Box::new(schema::Type::Scalar( - BsonScalarType::Int, - ))), - }, - ), - ]), - }, - ), - ])), - scalar_types: Cow::Owned(make_scalar_types()), - } -} - -pub fn make_nested_schema() -> QueryContext<'static> { - QueryContext { - collections: Cow::Owned(BTreeMap::from([ - ( - "authors".into(), - CollectionInfo { - name: "authors".into(), - description: None, - collection_type: "Author".into(), - arguments: Default::default(), - uniqueness_constraints: make_primary_key_uniqueness_constraint("authors"), - foreign_keys: Default::default(), - }, - ), - collection("appearances"), // new helper gives more concise syntax - ])), - functions: Default::default(), - object_types: Cow::Owned(BTreeMap::from([ - ( - "Author".into(), - schema::ObjectType { - description: None, - fields: BTreeMap::from([ - ( - "address".into(), - schema::ObjectField { - description: None, - r#type: schema::Type::Object("Address".into()), - }, - ), - ( - "articles".into(), - schema::ObjectField { - description: None, - r#type: schema::Type::ArrayOf(Box::new(schema::Type::Object( - "Article".into(), - ))), - }, - ), - ( - "array_of_arrays".into(), - schema::ObjectField { - description: None, - r#type: schema::Type::ArrayOf(Box::new(schema::Type::ArrayOf( - Box::new(schema::Type::Object("Article".into())), - ))), - }, - ), - ]), - }, - ), - ( - "Address".into(), - schema::ObjectType { - description: None, - fields: BTreeMap::from([ - ( - "country".into(), - schema::ObjectField { - description: None, - r#type: schema::Type::Scalar(BsonScalarType::String), - }, - ), - ( - "street".into(), - schema::ObjectField { - description: None, - r#type: schema::Type::Scalar(BsonScalarType::String), - }, - ), - ( - "apartment".into(), - schema::ObjectField { - description: None, - r#type: schema::Type::Nullable(Box::new(schema::Type::Scalar( - BsonScalarType::String, - ))), - }, - ), - ( - "geocode".into(), - schema::ObjectField { - description: Some("Lat/Long".to_owned()), - r#type: schema::Type::Nullable(Box::new(schema::Type::Object( - "Geocode".to_owned(), - ))), - }, - ), - ]), - }, - ), - ( - "Article".into(), - schema::ObjectType { - description: None, - fields: BTreeMap::from([( - "title".into(), - schema::ObjectField { - description: None, - r#type: schema::Type::Scalar(BsonScalarType::String), - }, - )]), - }, - ), - ( - "Geocode".into(), - schema::ObjectType { - description: None, - fields: BTreeMap::from([ - ( - "latitude".into(), - schema::ObjectField { - description: None, - r#type: schema::Type::Scalar(BsonScalarType::Double), - }, - ), - ( - "longitude".into(), - schema::ObjectField { - description: None, - r#type: schema::Type::Scalar(BsonScalarType::Double), - }, - ), - ]), - }, - ), - ( - "appearances".to_owned(), - object_type([("authorId", schema::Type::Scalar(BsonScalarType::ObjectId))]), - ), - ])), - scalar_types: Cow::Owned(make_scalar_types()), - } -} diff --git a/crates/mongodb-support/Cargo.toml b/crates/mongodb-support/Cargo.toml index a9a42a92..72ba7436 100644 --- a/crates/mongodb-support/Cargo.toml +++ b/crates/mongodb-support/Cargo.toml @@ -4,9 +4,8 @@ version = "0.1.0" edition = "2021" [dependencies] -dc-api-types = { path = "../dc-api-types" } enum-iterator = "^2.0.0" -indexmap = { version = "1", features = ["serde"] } # must match the version that ndc-client uses +indexmap = { workspace = true } mongodb = { workspace = true } schemars = "^0.8.12" serde = { version = "1", features = ["derive"] } diff --git a/crates/mongodb-support/src/bson_type.rs b/crates/mongodb-support/src/bson_type.rs index f92f70ef..5024a2cf 100644 --- a/crates/mongodb-support/src/bson_type.rs +++ b/crates/mongodb-support/src/bson_type.rs @@ -1,4 +1,3 @@ -use dc_api_types::GraphQlType; use enum_iterator::{all, Sequence}; use mongodb::bson::Bson; use schemars::JsonSchema; @@ -141,17 +140,27 @@ impl BsonScalarType { } } - pub fn graphql_name(self) -> String { - capitalize(self.bson_name()) - } - - pub fn graphql_type(self) -> Option { + pub fn graphql_name(self) -> &'static str { match self { - S::Double => Some(GraphQlType::Float), - S::String => Some(GraphQlType::String), - S::Int => Some(GraphQlType::Int), - S::Bool => Some(GraphQlType::Boolean), - _ => None, + S::Double => "Double", + S::Decimal => "Decimal", + S::Int => "Int", + S::Long => "Long", + S::String => "String", + S::Date => "Date", + S::Timestamp => "Timestamp", + S::BinData => "BinData", + S::ObjectId => "ObjectId", + S::Bool => "Bool", + S::Null => "Null", + S::Regex => "Regex", + S::Javascript => "Javascript", + S::JavascriptWithScope => "JavascriptWithScope", + S::MinKey => "MinKey", + S::MaxKey => "MaxKey", + S::Undefined => "Undefined", + S::DbPointer => "DbPointer", + S::Symbol => "Symbol", } } @@ -288,15 +297,6 @@ impl TryFrom for BsonScalarType { } } -/// Capitalizes the first character in s. -fn capitalize(s: &str) -> String { - let mut c = s.chars(); - match c.next() { - None => String::new(), - Some(f) => f.to_uppercase().collect::() + c.as_str(), - } -} - #[cfg(test)] mod tests { use crate::BsonScalarType; diff --git a/crates/ndc-query-plan/Cargo.toml b/crates/ndc-query-plan/Cargo.toml new file mode 100644 index 00000000..06ec0331 --- /dev/null +++ b/crates/ndc-query-plan/Cargo.toml @@ -0,0 +1,21 @@ +[package] +name = "ndc-query-plan" +version = "0.1.0" +edition = "2021" + +[dependencies] +derivative = "2" +indexmap = { workspace = true } +itertools = { workspace = true } +ndc-models = { workspace = true } +nonempty = "^0.10" +serde_json = "1" +thiserror = "1" + +[dev-dependencies] +ndc-test-helpers = { path = "../ndc-test-helpers" } + +anyhow = "1" +enum-iterator = "2" +lazy_static = "1" +pretty_assertions = "1" diff --git a/crates/ndc-query-plan/src/lib.rs b/crates/ndc-query-plan/src/lib.rs new file mode 100644 index 00000000..032382cb --- /dev/null +++ b/crates/ndc-query-plan/src/lib.rs @@ -0,0 +1,17 @@ +mod plan_for_query_request; +mod query_plan; +mod type_system; + +pub use plan_for_query_request::{ + plan_for_query_request, + query_context::QueryContext, + query_plan_error::QueryPlanError, + type_annotated_field::{type_annotated_field, type_annotated_nested_field}, +}; +pub use query_plan::{ + Aggregate, AggregateFunctionDefinition, ComparisonOperatorDefinition, ComparisonTarget, + ComparisonValue, ConnectorTypes, ExistsInCollection, Expression, Field, NestedArray, + NestedField, NestedObject, OrderBy, OrderByElement, OrderByTarget, Query, QueryPlan, + Relationship, Relationships, VariableSet, +}; +pub use type_system::{inline_object_types, ObjectType, Type}; diff --git a/crates/ndc-query-plan/src/plan_for_query_request/helpers.rs b/crates/ndc-query-plan/src/plan_for_query_request/helpers.rs new file mode 100644 index 00000000..27c6d832 --- /dev/null +++ b/crates/ndc-query-plan/src/plan_for_query_request/helpers.rs @@ -0,0 +1,30 @@ +use std::collections::BTreeMap; + +use ndc_models as ndc; +use crate as plan; + +use super::query_plan_error::QueryPlanError; + +type Result = std::result::Result; + +pub fn find_object_field<'a, S>( + object_type: &'a plan::ObjectType, + field_name: &str, +) -> Result<&'a plan::Type> { + object_type.fields.get(field_name).ok_or_else(|| { + QueryPlanError::UnknownObjectTypeField { + object_type: object_type.name.clone(), + field_name: field_name.to_string(), + path: Default::default(), // TODO: set a path for more helpful error reporting + } + }) +} + +pub fn lookup_relationship<'a>( + relationships: &'a BTreeMap, + relationship: &str, +) -> Result<&'a ndc::Relationship> { + relationships + .get(relationship) + .ok_or_else(|| QueryPlanError::UnspecifiedRelation(relationship.to_owned())) +} diff --git a/crates/ndc-query-plan/src/plan_for_query_request/mod.rs b/crates/ndc-query-plan/src/plan_for_query_request/mod.rs new file mode 100644 index 00000000..2f72869d --- /dev/null +++ b/crates/ndc-query-plan/src/plan_for_query_request/mod.rs @@ -0,0 +1,1434 @@ +mod helpers; +pub mod query_context; +pub mod query_plan_error; +mod query_plan_state; +pub mod type_annotated_field; + +#[cfg(test)] +mod plan_test_helpers; + +use std::collections::VecDeque; + +use crate::{self as plan, type_annotated_field, ObjectType, QueryPlan}; +use indexmap::IndexMap; +use itertools::Itertools as _; +use ndc::QueryRequest; +use ndc_models as ndc; + +use self::{ + helpers::{find_object_field, lookup_relationship}, + query_context::QueryContext, + query_plan_error::QueryPlanError, + query_plan_state::QueryPlanState, +}; + +type Result = std::result::Result; + +pub fn plan_for_query_request( + context: &T, + request: QueryRequest, +) -> Result> { + let mut plan_state = QueryPlanState::new(context, &request.collection_relationships); + let collection_object_type = context.find_collection_object_type(&request.collection)?; + + let query = plan_for_query( + &mut plan_state, + &collection_object_type, + &collection_object_type, + request.query, + )?; + + let unrelated_collections = plan_state.into_unrelated_collections(); + + Ok(QueryPlan { + collection: request.collection, + arguments: request.arguments, + query, + variables: request.variables, + unrelated_collections, + }) +} + +pub fn plan_for_query( + plan_state: &mut QueryPlanState<'_, T>, + root_collection_object_type: &plan::ObjectType, + collection_object_type: &plan::ObjectType, + query: ndc::Query, +) -> Result> { + let mut plan_state = plan_state.state_for_subquery(); + + let aggregates = + plan_for_aggregates(plan_state.context, collection_object_type, query.aggregates)?; + let fields = plan_for_fields( + &mut plan_state, + root_collection_object_type, + collection_object_type, + query.fields, + )?; + + let order_by = query + .order_by + .map(|order_by| { + plan_for_order_by( + &mut plan_state, + root_collection_object_type, + collection_object_type, + order_by, + ) + }) + .transpose()?; + + let limit = query.limit; + let offset = query.offset; + + let predicate = query + .predicate + .map(|expr| { + plan_for_expression( + &mut plan_state, + root_collection_object_type, + collection_object_type, + expr, + ) + }) + .transpose()?; + + Ok(plan::Query { + aggregates, + aggregates_limit: limit, + fields, + order_by, + limit, + offset, + predicate, + relationships: plan_state.into_relationships(), + }) +} + +fn plan_for_aggregates( + context: &T, + collection_object_type: &plan::ObjectType, + ndc_aggregates: Option>, +) -> Result>>> { + ndc_aggregates + .map(|aggregates| -> Result<_> { + aggregates + .into_iter() + .map(|(name, aggregate)| { + Ok(( + name, + plan_for_aggregate(context, collection_object_type, aggregate)?, + )) + }) + .collect() + }) + .transpose() +} + +fn plan_for_aggregate( + context: &T, + collection_object_type: &plan::ObjectType, + aggregate: ndc::Aggregate, +) -> Result> { + match aggregate { + ndc::Aggregate::ColumnCount { column, distinct } => { + Ok(plan::Aggregate::ColumnCount { column, distinct }) + } + ndc::Aggregate::SingleColumn { column, function } => { + let object_type_field_type = + find_object_field(collection_object_type, column.as_ref())?; + // let column_scalar_type_name = get_scalar_type_name(&object_type_field.r#type)?; + let (function, definition) = + context.find_aggregation_function_definition(object_type_field_type, &function)?; + Ok(plan::Aggregate::SingleColumn { + column, + function, + result_type: definition.result_type.clone(), + }) + } + ndc::Aggregate::StarCount {} => Ok(plan::Aggregate::StarCount {}), + } +} + +fn plan_for_fields( + plan_state: &mut QueryPlanState<'_, T>, + root_collection_object_type: &plan::ObjectType, + collection_object_type: &plan::ObjectType, + ndc_fields: Option>, +) -> Result>>> { + let plan_fields: Option>> = ndc_fields + .map(|fields| { + fields + .into_iter() + .map(|(name, field)| { + Ok(( + name, + type_annotated_field( + plan_state, + root_collection_object_type, + collection_object_type, + field, + )?, + )) + }) + .collect::>() + }) + .transpose()?; + Ok(plan_fields) +} + +fn plan_for_order_by( + plan_state: &mut QueryPlanState<'_, T>, + root_collection_object_type: &plan::ObjectType, + object_type: &plan::ObjectType, + order_by: ndc::OrderBy, +) -> Result> { + let elements = order_by + .elements + .into_iter() + .map(|element| { + plan_for_order_by_element( + plan_state, + root_collection_object_type, + object_type, + element, + ) + }) + .try_collect()?; + Ok(plan::OrderBy { elements }) +} + +fn plan_for_order_by_element( + plan_state: &mut QueryPlanState<'_, T>, + root_collection_object_type: &plan::ObjectType, + object_type: &plan::ObjectType, + element: ndc::OrderByElement, +) -> Result> { + let target = match element.target { + ndc::OrderByTarget::Column { name, path } => plan::OrderByTarget::Column { + name, + field_path: Default::default(), // TODO: propagate this after ndc-spec update + path: plan_for_relationship_path( + plan_state, + root_collection_object_type, + object_type, + path, + )? + .0, + }, + ndc::OrderByTarget::SingleColumnAggregate { + column, + function, + path, + } => { + let (plan_path, target_object_type) = plan_for_relationship_path( + plan_state, + root_collection_object_type, + object_type, + path, + )?; + let column_type = find_object_field(&target_object_type, &column)?; + let (function, function_definition) = plan_state + .context + .find_aggregation_function_definition(column_type, &function)?; + + plan::OrderByTarget::SingleColumnAggregate { + column, + function, + result_type: function_definition.result_type.clone(), + path: plan_path, + } + } + ndc::OrderByTarget::StarCountAggregate { path } => { + let (plan_path, _) = plan_for_relationship_path( + plan_state, + root_collection_object_type, + object_type, + path, + )?; + plan::OrderByTarget::StarCountAggregate { path: plan_path } + } + }; + + Ok(plan::OrderByElement { + order_direction: element.order_direction, + target, + }) +} + +/// Returns list of aliases for joins to traverse, plus the object type of the final collection in +/// the path. +fn plan_for_relationship_path( + plan_state: &mut QueryPlanState<'_, T>, + root_collection_object_type: &plan::ObjectType, + object_type: &plan::ObjectType, + relationship_path: Vec, +) -> Result<(Vec, ObjectType)> { + let end_of_relationship_path_object_type = relationship_path + .last() + .map(|last_path_element| { + let relationship = lookup_relationship( + plan_state.collection_relationships, + &last_path_element.relationship, + )?; + plan_state + .context + .find_collection_object_type(&relationship.target_collection) + }) + .transpose()?; + let target_object_type = end_of_relationship_path_object_type.unwrap_or(object_type.clone()); + + let vec_deque = plan_for_relationship_path_helper( + plan_state, + root_collection_object_type, + relationship_path, + )?; + let aliases = vec_deque.into_iter().collect(); + + Ok((aliases, target_object_type)) +} + +fn plan_for_relationship_path_helper( + plan_state: &mut QueryPlanState<'_, T>, + root_collection_object_type: &plan::ObjectType, + relationship_path: impl IntoIterator, +) -> Result> { + let (head, tail) = { + let mut path_iter = relationship_path.into_iter(); + let head = path_iter.next(); + (head, path_iter) + }; + if let Some(ndc::PathElement { + relationship, + arguments, + predicate, + }) = head + { + let relationship_def = + lookup_relationship(plan_state.collection_relationships, &relationship)?; + let related_collection_type = plan_state + .context + .find_collection_object_type(&relationship_def.target_collection)?; + let mut nested_state = plan_state.state_for_subquery(); + + let mut rest_path = plan_for_relationship_path_helper( + &mut nested_state, + root_collection_object_type, + tail, + )?; + + let nested_relationships = nested_state.into_relationships(); + + let relationship_query = plan::Query { + predicate: predicate + .map(|p| { + plan_for_expression( + plan_state, + root_collection_object_type, + &related_collection_type, + *p, + ) + }) + .transpose()?, + relationships: nested_relationships, + ..Default::default() + }; + + let (relation_key, _) = + plan_state.register_relationship(relationship, arguments, relationship_query)?; + + rest_path.push_front(relation_key.to_owned()); + Ok(rest_path) + } else { + Ok(VecDeque::new()) + } +} + +fn plan_for_expression( + plan_state: &mut QueryPlanState, + root_collection_object_type: &plan::ObjectType, + object_type: &plan::ObjectType, + expression: ndc::Expression, +) -> Result> { + match expression { + ndc::Expression::And { expressions } => Ok(plan::Expression::And { + expressions: expressions + .into_iter() + .map(|expr| { + plan_for_expression(plan_state, root_collection_object_type, object_type, expr) + }) + .collect::>()?, + }), + ndc::Expression::Or { expressions } => Ok(plan::Expression::Or { + expressions: expressions + .into_iter() + .map(|expr| { + plan_for_expression(plan_state, root_collection_object_type, object_type, expr) + }) + .collect::>()?, + }), + ndc::Expression::Not { expression } => Ok(plan::Expression::Not { + expression: Box::new(plan_for_expression( + plan_state, + root_collection_object_type, + object_type, + *expression, + )?), + }), + ndc::Expression::UnaryComparisonOperator { column, operator } => { + Ok(plan::Expression::UnaryComparisonOperator { + column: plan_for_comparison_target( + plan_state, + root_collection_object_type, + object_type, + column, + )?, + operator: match operator { + ndc::UnaryComparisonOperator::IsNull => ndc::UnaryComparisonOperator::IsNull, + }, + }) + } + ndc::Expression::BinaryComparisonOperator { + column, + operator, + value, + } => plan_for_binary_comparison( + plan_state, + root_collection_object_type, + object_type, + column, + operator, + value, + ), + ndc::Expression::Exists { + in_collection, + predicate, + } => { + let mut nested_state = plan_state.state_for_subquery(); + + let (in_collection, predicate) = match in_collection { + ndc::ExistsInCollection::Related { + relationship, + arguments, + } => { + let ndc_relationship = + lookup_relationship(plan_state.collection_relationships, &relationship)?; + let collection_object_type = plan_state + .context + .find_collection_object_type(&ndc_relationship.target_collection)?; + + let predicate = predicate + .map(|expression| { + plan_for_expression( + &mut nested_state, + root_collection_object_type, + &collection_object_type, + *expression, + ) + }) + .transpose()?; + + let relationship_query = plan::Query { + predicate: predicate.clone(), + relationships: nested_state.into_relationships(), + ..Default::default() + }; + + let (relationship_key, _) = plan_state.register_relationship( + relationship, + arguments, + relationship_query, + )?; + + let in_collection = plan::ExistsInCollection::Related { + relationship: relationship_key.to_owned(), + }; + + Ok((in_collection, predicate)) + } + ndc::ExistsInCollection::Unrelated { + collection, + arguments, + } => { + let collection_object_type = plan_state + .context + .find_collection_object_type(&collection)?; + + let predicate = predicate + .map(|expression| { + plan_for_expression( + &mut nested_state, + root_collection_object_type, + &collection_object_type, + *expression, + ) + }) + .transpose()?; + + let join_query = plan::Query { + predicate: predicate.clone(), + relationships: nested_state.into_relationships(), + ..Default::default() + }; + + let join_key = + plan_state.register_unrelated_join(collection, arguments, join_query); + + let in_collection = plan::ExistsInCollection::Unrelated { + unrelated_collection: join_key, + }; + Ok((in_collection, predicate)) + } + }?; + + Ok(plan::Expression::Exists { + in_collection, + predicate: predicate.map(Box::new), + }) + } + } +} + +fn plan_for_binary_comparison( + plan_state: &mut QueryPlanState<'_, T>, + root_collection_object_type: &plan::ObjectType, + object_type: &plan::ObjectType, + column: ndc::ComparisonTarget, + operator: String, + value: ndc::ComparisonValue, +) -> Result> { + let comparison_target = + plan_for_comparison_target(plan_state, root_collection_object_type, object_type, column)?; + let (operator, operator_definition) = plan_state + .context + .find_comparison_operator(comparison_target.get_column_type(), &operator)?; + let value_type = match operator_definition { + plan::ComparisonOperatorDefinition::Equal => comparison_target.get_column_type().clone(), + plan::ComparisonOperatorDefinition::In => { + plan::Type::ArrayOf(Box::new(comparison_target.get_column_type().clone())) + } + plan::ComparisonOperatorDefinition::Custom { argument_type } => argument_type.clone(), + }; + Ok(plan::Expression::BinaryComparisonOperator { + operator, + value: plan_for_comparison_value( + plan_state, + root_collection_object_type, + object_type, + value_type, + value, + )?, + column: comparison_target, + }) +} + +fn plan_for_comparison_target( + plan_state: &mut QueryPlanState<'_, T>, + root_collection_object_type: &plan::ObjectType, + object_type: &plan::ObjectType, + target: ndc::ComparisonTarget, +) -> Result> { + match target { + ndc::ComparisonTarget::Column { name, path } => { + let (path, target_object_type) = plan_for_relationship_path( + plan_state, + root_collection_object_type, + object_type, + path, + )?; + let column_type = find_object_field(&target_object_type, &name)?.clone(); + Ok(plan::ComparisonTarget::Column { + name, + field_path: Default::default(), // TODO: propagate this after ndc-spec update + path, + column_type, + }) + } + ndc::ComparisonTarget::RootCollectionColumn { name } => { + let column_type = find_object_field(root_collection_object_type, &name)?.clone(); + Ok(plan::ComparisonTarget::RootCollectionColumn { + name, + field_path: Default::default(), // TODO: propagate this after ndc-spec update + column_type, + }) + } + } +} + +fn plan_for_comparison_value( + plan_state: &mut QueryPlanState<'_, T>, + root_collection_object_type: &plan::ObjectType, + object_type: &plan::ObjectType, + expected_type: plan::Type, + value: ndc::ComparisonValue, +) -> Result> { + match value { + ndc::ComparisonValue::Column { column } => Ok(plan::ComparisonValue::Column { + column: plan_for_comparison_target( + plan_state, + root_collection_object_type, + object_type, + column, + )?, + }), + ndc::ComparisonValue::Scalar { value } => Ok(plan::ComparisonValue::Scalar { + value, + value_type: expected_type, + }), + ndc::ComparisonValue::Variable { name } => Ok(plan::ComparisonValue::Variable { + name, + variable_type: expected_type, + }), + } +} + +#[cfg(test)] +mod tests { + use ndc_models::{self as ndc, OrderByTarget, OrderDirection, RelationshipType}; + use ndc_test_helpers::*; + use pretty_assertions::assert_eq; + use serde_json::json; + + use crate::{ + self as plan, + plan_for_query_request::plan_test_helpers::{ + self, make_flat_schema, make_nested_schema, TestContext, + }, + query_plan::UnrelatedJoin, + ExistsInCollection, Expression, Field, OrderBy, Query, QueryContext, QueryPlan, + Relationship, + }; + + use super::plan_for_query_request; + + #[test] + fn translates_query_request_relationships() -> Result<(), anyhow::Error> { + let request = query_request() + .collection("schools") + .relationships([ + ( + "school_classes", + relationship("classes", [("_id", "school_id")]), + ), + ( + "class_students", + relationship("students", [("_id", "class_id")]), + ), + ( + "class_department", + relationship("departments", [("department_id", "_id")]).object_type(), + ), + ( + "school_directory", + relationship("directory", [("_id", "school_id")]).object_type(), + ), + ( + "student_advisor", + relationship("advisors", [("advisor_id", "_id")]).object_type(), + ), + ( + "existence_check", + relationship("some_collection", [("some_id", "_id")]), + ), + ]) + .query( + query() + .fields([relation_field!("class_name" => "school_classes", query() + .fields([ + relation_field!("student_name" => "class_students") + ]) + )]) + .order_by(vec![ndc::OrderByElement { + order_direction: OrderDirection::Asc, + target: OrderByTarget::Column { + name: "advisor_name".to_owned(), + path: vec![ + path_element("school_classes") + .predicate(binop( + "Equal", + target!( + "_id", + relations: [ + path_element("school_classes"), + path_element("class_department"), + ], + ), + column_value!( + "math_department_id", + relations: [path_element("school_directory")], + ), + )) + .into(), + path_element("class_students").into(), + path_element("student_advisor").into(), + ], + }, + }]) + // The `And` layer checks that we properly recursive into Expressions + .predicate(and([ndc::Expression::Exists { + in_collection: related!("existence_check"), + predicate: None, + }])), + ) + .into(); + + let expected = QueryPlan { + collection: "schools".to_owned(), + arguments: Default::default(), + variables: None, + unrelated_collections: Default::default(), + query: Query { + predicate: Some(Expression::And { + expressions: vec![Expression::Exists { + in_collection: ExistsInCollection::Related { + relationship: "existence_check".into(), + }, + predicate: None, + }], + }), + order_by: Some(OrderBy { + elements: [plan::OrderByElement { + order_direction: OrderDirection::Asc, + target: plan::OrderByTarget::Column { + name: "advisor_name".into(), + field_path: Default::default(), + path: [ + "school_classes".into(), + "class_students".into(), + "student_advisor".into(), + ] + .into(), + }, + }] + .into(), + }), + relationships: [ + ( + "school_classes".to_owned(), + Relationship { + column_mapping: [("_id".to_owned(), "school_id".to_owned())].into(), + relationship_type: RelationshipType::Array, + target_collection: "classes".to_owned(), + arguments: Default::default(), + query: Query { + fields: Some( + [( + "student_name".into(), + plan::Field::Relationship { + relationship: "class_students".into(), + aggregates: None, + fields: None, + }, + )] + .into(), + ), + relationships: [( + "class_students".into(), + plan::Relationship { + target_collection: "students".into(), + column_mapping: [("_id".into(), "class_id".into())].into(), + relationship_type: RelationshipType::Array, + arguments: Default::default(), + query: Default::default(), + }, + )] + .into(), + ..Default::default() + }, + }, + ), + ( + "school_directory".to_owned(), + Relationship { + target_collection: "directory".to_owned(), + column_mapping: [("_id".to_owned(), "school_id".to_owned())].into(), + relationship_type: RelationshipType::Object, + arguments: Default::default(), + query: Query { + ..Default::default() + }, + }, + ), + ( + "existence_check".to_owned(), + Relationship { + column_mapping: [("some_id".to_owned(), "_id".to_owned())].into(), + relationship_type: RelationshipType::Array, + target_collection: "some_collection".to_owned(), + arguments: Default::default(), + query: Query { + predicate: None, + ..Default::default() + }, + }, + ), + ] + .into(), + fields: Some( + [( + "class_name".into(), + Field::Relationship { + relationship: "school_classes".into(), + aggregates: None, + fields: Some( + [( + "student_name".into(), + Field::Relationship { + relationship: "class_students".into(), + aggregates: None, + fields: None, + }, + )] + .into(), + ), + }, + )] + .into(), + ), + ..Default::default() + }, + }; + + let context = TestContext { + collections: [ + collection("schools"), + collection("classes"), + collection("students"), + collection("departments"), + collection("directory"), + collection("advisors"), + collection("some_collection"), + ] + .into(), + object_types: [ + ( + "schools".to_owned(), + object_type([("_id", named_type("Int"))]), + ), + ( + "classes".to_owned(), + object_type([ + ("_id", named_type("Int")), + ("school_id", named_type("Int")), + ("department_id", named_type("Int")), + ]), + ), + ( + "students".to_owned(), + object_type([ + ("_id", named_type("Int")), + ("class_id", named_type("Int")), + ("advisor_id", named_type("Int")), + ("student_name", named_type("String")), + ]), + ), + ( + "departments".to_owned(), + object_type([("_id", named_type("Int"))]), + ), + ( + "directory".to_owned(), + object_type([ + ("_id", named_type("Int")), + ("school_id", named_type("Int")), + ("math_department_id", named_type("Int")), + ]), + ), + ( + "advisors".to_owned(), + object_type([ + ("_id", named_type("Int")), + ("advisor_name", named_type("String")), + ]), + ), + ( + "some_collection".to_owned(), + object_type([("_id", named_type("Int")), ("some_id", named_type("Int"))]), + ), + ] + .into(), + ..Default::default() + }; + + let query_plan = plan_for_query_request(&context, request)?; + + assert_eq!(query_plan, expected); + Ok(()) + } + + #[test] + fn translates_root_column_references() -> Result<(), anyhow::Error> { + let query_context = make_flat_schema(); + let query = query_request() + .collection("authors") + .query(query().fields([field!("last_name")]).predicate(exists( + unrelated!("articles"), + and([ + binop("Equal", target!("author_id"), column_value!(root("id"))), + binop("Regex", target!("title"), value!("Functional.*")), + ]), + ))) + .into(); + let query_plan = plan_for_query_request(&query_context, query)?; + + let expected = QueryPlan { + collection: "authors".into(), + query: plan::Query { + predicate: Some(plan::Expression::Exists { + in_collection: plan::ExistsInCollection::Unrelated { + unrelated_collection: "__join_articles_0".into(), + }, + predicate: Some(Box::new(plan::Expression::And { + expressions: vec![ + plan::Expression::BinaryComparisonOperator { + column: plan::ComparisonTarget::Column { + name: "author_id".into(), + field_path: Default::default(), + column_type: plan::Type::Scalar( + plan_test_helpers::ScalarType::Int, + ), + path: Default::default(), + }, + operator: plan_test_helpers::ComparisonOperator::Equal, + value: plan::ComparisonValue::Column { + column: plan::ComparisonTarget::RootCollectionColumn { + name: "id".into(), + field_path: Default::default(), + column_type: plan::Type::Scalar( + plan_test_helpers::ScalarType::Int, + ), + }, + }, + }, + plan::Expression::BinaryComparisonOperator { + column: plan::ComparisonTarget::Column { + name: "title".into(), + field_path: Default::default(), + column_type: plan::Type::Scalar( + plan_test_helpers::ScalarType::String, + ), + path: Default::default(), + }, + operator: plan_test_helpers::ComparisonOperator::Regex, + value: plan::ComparisonValue::Scalar { + value: json!("Functional.*"), + value_type: plan::Type::Scalar( + plan_test_helpers::ScalarType::String, + ), + }, + }, + ], + })), + }), + fields: Some( + [( + "last_name".into(), + plan::Field::Column { + column: "last_name".into(), + fields: None, + column_type: plan::Type::Scalar(plan_test_helpers::ScalarType::String), + }, + )] + .into(), + ), + ..Default::default() + }, + unrelated_collections: [( + "__join_articles_0".into(), + UnrelatedJoin { + target_collection: "articles".into(), + arguments: Default::default(), + query: plan::Query { + predicate: Some(plan::Expression::And { + expressions: vec![ + plan::Expression::BinaryComparisonOperator { + column: plan::ComparisonTarget::Column { + name: "author_id".into(), + column_type: plan::Type::Scalar( + plan_test_helpers::ScalarType::Int, + ), + field_path: None, + path: vec![], + }, + operator: plan_test_helpers::ComparisonOperator::Equal, + value: plan::ComparisonValue::Column { + column: plan::ComparisonTarget::RootCollectionColumn { + name: "id".into(), + column_type: plan::Type::Scalar( + plan_test_helpers::ScalarType::Int, + ), + field_path: None, + }, + }, + }, + plan::Expression::BinaryComparisonOperator { + column: plan::ComparisonTarget::Column { + name: "title".into(), + column_type: plan::Type::Scalar( + plan_test_helpers::ScalarType::String, + ), + field_path: None, + path: vec![], + }, + operator: plan_test_helpers::ComparisonOperator::Regex, + value: plan::ComparisonValue::Scalar { + value: "Functional.*".into(), + value_type: plan::Type::Scalar( + plan_test_helpers::ScalarType::String, + ), + }, + }, + ], + }), + ..Default::default() + }, + }, + )] + .into(), + arguments: Default::default(), + variables: Default::default(), + }; + + assert_eq!(query_plan, expected); + Ok(()) + } + + #[test] + fn translates_aggregate_selections() -> Result<(), anyhow::Error> { + let query_context = make_flat_schema(); + let query = query_request() + .collection("authors") + .query(query().aggregates([ + star_count_aggregate!("count_star"), + column_count_aggregate!("count_id" => "last_name", distinct: true), + column_aggregate!("avg_id" => "id", "Average"), + ])) + .into(); + let query_plan = plan_for_query_request(&query_context, query)?; + + let expected = QueryPlan { + collection: "authors".into(), + query: plan::Query { + aggregates: Some( + [ + ("count_star".into(), plan::Aggregate::StarCount), + ( + "count_id".into(), + plan::Aggregate::ColumnCount { + column: "last_name".into(), + distinct: true, + }, + ), + ( + "avg_id".into(), + plan::Aggregate::SingleColumn { + column: "id".into(), + function: plan_test_helpers::AggregateFunction::Average, + result_type: plan::Type::Scalar( + plan_test_helpers::ScalarType::Double, + ), + }, + ), + ] + .into(), + ), + ..Default::default() + }, + arguments: Default::default(), + variables: Default::default(), + unrelated_collections: Default::default(), + }; + + assert_eq!(query_plan, expected); + Ok(()) + } + + #[test] + fn translates_relationships_in_fields_predicates_and_orderings() -> Result<(), anyhow::Error> { + let query_context = make_flat_schema(); + let query = query_request() + .collection("authors") + .query( + query() + .fields([ + field!("last_name"), + relation_field!( + "articles" => "author_articles", + query().fields([field!("title"), field!("year")]) + ), + ]) + .predicate(exists( + related!("author_articles"), + binop("Regex", target!("title"), value!("Functional.*")), + )) + .order_by(vec![ + ndc::OrderByElement { + order_direction: OrderDirection::Asc, + target: OrderByTarget::SingleColumnAggregate { + column: "year".into(), + function: "Average".into(), + path: vec![path_element("author_articles").into()], + }, + }, + ndc::OrderByElement { + order_direction: OrderDirection::Desc, + target: OrderByTarget::Column { + name: "id".into(), + path: vec![], + }, + }, + ]), + ) + .relationships([( + "author_articles", + relationship("articles", [("id", "author_id")]), + )]) + .into(); + let query_plan = plan_for_query_request(&query_context, query)?; + + let expected = QueryPlan { + collection: "authors".into(), + query: plan::Query { + predicate: Some(plan::Expression::Exists { + in_collection: plan::ExistsInCollection::Related { + relationship: "author_articles".into(), + }, + predicate: Some(Box::new(plan::Expression::BinaryComparisonOperator { + column: plan::ComparisonTarget::Column { + name: "title".into(), + field_path: Default::default(), + column_type: plan::Type::Scalar(plan_test_helpers::ScalarType::String), + path: Default::default(), + }, + operator: plan_test_helpers::ComparisonOperator::Regex, + value: plan::ComparisonValue::Scalar { + value: "Functional.*".into(), + value_type: plan::Type::Scalar(plan_test_helpers::ScalarType::String), + }, + })), + }), + order_by: Some(plan::OrderBy { + elements: vec![ + plan::OrderByElement { + order_direction: OrderDirection::Asc, + target: plan::OrderByTarget::SingleColumnAggregate { + column: "year".into(), + function: plan_test_helpers::AggregateFunction::Average, + result_type: plan::Type::Scalar( + plan_test_helpers::ScalarType::Double, + ), + path: vec!["author_articles".into()], + }, + }, + plan::OrderByElement { + order_direction: OrderDirection::Desc, + target: plan::OrderByTarget::Column { + name: "id".into(), + field_path: None, + path: vec![], + }, + }, + ], + }), + fields: Some( + [ + ( + "last_name".into(), + plan::Field::Column { + column: "last_name".into(), + column_type: plan::Type::Scalar( + plan_test_helpers::ScalarType::String, + ), + fields: None, + }, + ), + ( + "articles".into(), + plan::Field::Relationship { + relationship: "author_articles".into(), + aggregates: None, + fields: Some( + [ + ( + "title".into(), + plan::Field::Column { + column: "title".into(), + column_type: plan::Type::Scalar( + plan_test_helpers::ScalarType::String, + ), + fields: None, + }, + ), + ( + "year".into(), + plan::Field::Column { + column: "year".into(), + column_type: plan::Type::Nullable(Box::new( + plan::Type::Scalar( + plan_test_helpers::ScalarType::Int, + ), + )), + fields: None, + }, + ), + ] + .into(), + ), + }, + ), + ] + .into(), + ), + relationships: [( + "author_articles".into(), + plan::Relationship { + target_collection: "articles".into(), + column_mapping: [("id".into(), "author_id".into())].into(), + relationship_type: RelationshipType::Array, + arguments: Default::default(), + query: plan::Query { + fields: Some( + [ + ( + "title".into(), + plan::Field::Column { + column: "title".into(), + column_type: plan::Type::Scalar( + plan_test_helpers::ScalarType::String, + ), + fields: None, + }, + ), + ( + "year".into(), + plan::Field::Column { + column: "year".into(), + column_type: plan::Type::Nullable(Box::new( + plan::Type::Scalar( + plan_test_helpers::ScalarType::Int, + ), + )), + fields: None, + }, + ), + ] + .into(), + ), + ..Default::default() + }, + }, + )] + .into(), + ..Default::default() + }, + arguments: Default::default(), + variables: Default::default(), + unrelated_collections: Default::default(), + }; + + assert_eq!(query_plan, expected); + Ok(()) + } + + #[test] + fn translates_nested_fields() -> Result<(), anyhow::Error> { + let query_context = make_nested_schema(); + let query_request = query_request() + .collection("authors") + .query(query().fields([ + field!("author_address" => "address", object!([field!("address_country" => "country")])), + field!("author_articles" => "articles", array!(object!([field!("article_title" => "title")]))), + field!("author_array_of_arrays" => "array_of_arrays", array!(array!(object!([field!("article_title" => "title")])))) + ])) + .into(); + let query_plan = plan_for_query_request(&query_context, query_request)?; + + let expected = QueryPlan { + collection: "authors".into(), + query: plan::Query { + fields: Some( + [ + ( + "author_address".into(), + plan::Field::Column { + column: "address".into(), + column_type: plan::Type::Object( + query_context.find_object_type("Address")?, + ), + fields: Some(plan::NestedField::Object(plan::NestedObject { + fields: [( + "address_country".into(), + plan::Field::Column { + column: "country".into(), + column_type: plan::Type::Scalar( + plan_test_helpers::ScalarType::String, + ), + fields: None, + }, + )] + .into(), + })), + }, + ), + ( + "author_articles".into(), + plan::Field::Column { + column: "articles".into(), + column_type: plan::Type::ArrayOf(Box::new(plan::Type::Object( + query_context.find_object_type("Article")?, + ))), + fields: Some(plan::NestedField::Array(plan::NestedArray { + fields: Box::new(plan::NestedField::Object( + plan::NestedObject { + fields: [( + "article_title".into(), + plan::Field::Column { + column: "title".into(), + fields: None, + column_type: plan::Type::Scalar( + plan_test_helpers::ScalarType::String, + ), + }, + )] + .into(), + }, + )), + })), + }, + ), + ( + "author_array_of_arrays".into(), + plan::Field::Column { + column: "array_of_arrays".into(), + fields: Some(plan::NestedField::Array(plan::NestedArray { + fields: Box::new(plan::NestedField::Array(plan::NestedArray { + fields: Box::new(plan::NestedField::Object( + plan::NestedObject { + fields: [( + "article_title".into(), + plan::Field::Column { + column: "title".into(), + fields: None, + column_type: plan::Type::Scalar( + plan_test_helpers::ScalarType::String, + ), + }, + )] + .into(), + }, + )), + })), + })), + column_type: plan::Type::ArrayOf(Box::new(plan::Type::ArrayOf( + Box::new(plan::Type::Object( + query_context.find_object_type("Article")?, + )), + ))), + }, + ), + ] + .into(), + ), + ..Default::default() + }, + arguments: Default::default(), + variables: Default::default(), + unrelated_collections: Default::default(), + }; + + assert_eq!(query_plan, expected); + Ok(()) + } + + #[test] + fn translates_predicate_referencing_field_of_related_collection() -> anyhow::Result<()> { + let query_context = make_nested_schema(); + let request = query_request() + .collection("appearances") + .relationships([("author", relationship("authors", [("authorId", "id")]))]) + .query( + query() + .fields([relation_field!("presenter" => "author", query().fields([ + field!("name"), + ]))]) + .predicate(not(is_null( + target!("name", relations: [path_element("author")]), + ))), + ) + .into(); + let query_plan = plan_for_query_request(&query_context, request)?; + + let expected = QueryPlan { + collection: "appearances".into(), + query: plan::Query { + predicate: Some(plan::Expression::Not { + expression: Box::new(plan::Expression::UnaryComparisonOperator { + column: plan::ComparisonTarget::Column { + name: "name".into(), + field_path: None, + column_type: plan::Type::Scalar(plan_test_helpers::ScalarType::String), + path: vec!["author".into()], + }, + operator: ndc_models::UnaryComparisonOperator::IsNull, + }), + }), + fields: Some( + [( + "presenter".into(), + plan::Field::Relationship { + relationship: "author".into(), + aggregates: None, + fields: Some( + [( + "name".into(), + plan::Field::Column { + column: "name".into(), + fields: None, + column_type: plan::Type::Scalar( + plan_test_helpers::ScalarType::String, + ), + }, + )] + .into(), + ), + }, + )] + .into(), + ), + relationships: [( + "author".into(), + plan::Relationship { + column_mapping: [("authorId".into(), "id".into())].into(), + relationship_type: RelationshipType::Array, + target_collection: "authors".into(), + arguments: Default::default(), + query: plan::Query { + fields: Some( + [( + "name".into(), + plan::Field::Column { + column: "name".into(), + fields: None, + column_type: plan::Type::Scalar( + plan_test_helpers::ScalarType::String, + ), + }, + )] + .into(), + ), + ..Default::default() + }, + }, + )] + .into(), + ..Default::default() + }, + arguments: Default::default(), + variables: Default::default(), + unrelated_collections: Default::default(), + }; + + assert_eq!(query_plan, expected); + Ok(()) + } +} diff --git a/crates/ndc-query-plan/src/plan_for_query_request/plan_test_helpers.rs b/crates/ndc-query-plan/src/plan_for_query_request/plan_test_helpers.rs new file mode 100644 index 00000000..9fce920a --- /dev/null +++ b/crates/ndc-query-plan/src/plan_for_query_request/plan_test_helpers.rs @@ -0,0 +1,328 @@ +use std::{collections::BTreeMap, fmt::Display}; + +use enum_iterator::Sequence; +use lazy_static::lazy_static; +use ndc::TypeRepresentation; +use ndc_models as ndc; +use ndc_test_helpers::{ + array_of, collection, make_primary_key_uniqueness_constraint, named_type, nullable, object_type, +}; + +use crate::{ConnectorTypes, QueryContext, QueryPlanError, Type}; + +#[derive(Clone, Debug, Default)] +pub struct TestContext { + pub collections: BTreeMap, + pub functions: BTreeMap, + pub procedures: BTreeMap, + pub object_types: BTreeMap, +} + +impl ConnectorTypes for TestContext { + type AggregateFunction = AggregateFunction; + type ComparisonOperator = ComparisonOperator; + type ScalarType = ScalarType; +} + +impl QueryContext for TestContext { + fn lookup_scalar_type(type_name: &str) -> Option { + ScalarType::find_by_name(type_name) + } + + fn lookup_aggregation_function( + &self, + input_type: &Type, + function_name: &str, + ) -> Result<(Self::AggregateFunction, &ndc::AggregateFunctionDefinition), QueryPlanError> { + let function = AggregateFunction::find_by_name(function_name).ok_or_else(|| { + QueryPlanError::UnknownAggregateFunction { + aggregate_function: function_name.to_owned(), + } + })?; + let definition = scalar_type_name(input_type) + .and_then(|name| SCALAR_TYPES.get(name)) + .and_then(|scalar_type_def| scalar_type_def.aggregate_functions.get(function_name)) + .ok_or_else(|| QueryPlanError::UnknownAggregateFunction { + aggregate_function: function_name.to_owned(), + })?; + Ok((function, definition)) + } + + fn lookup_comparison_operator( + &self, + left_operand_type: &Type, + operator_name: &str, + ) -> Result<(Self::ComparisonOperator, &ndc::ComparisonOperatorDefinition), QueryPlanError> + where + Self: Sized, + { + let operator = ComparisonOperator::find_by_name(operator_name) + .ok_or_else(|| QueryPlanError::UnknownComparisonOperator(operator_name.to_owned()))?; + let definition = scalar_type_name(left_operand_type) + .and_then(|name| SCALAR_TYPES.get(name)) + .and_then(|scalar_type_def| scalar_type_def.comparison_operators.get(operator_name)) + .ok_or_else(|| QueryPlanError::UnknownComparisonOperator(operator_name.to_owned()))?; + Ok((operator, definition)) + } + + fn collections(&self) -> &BTreeMap { + &self.collections + } + + fn functions(&self) -> &BTreeMap { + &self.functions + } + + fn object_types(&self) -> &BTreeMap { + &self.object_types + } + + fn procedures(&self) -> &BTreeMap { + &self.procedures + } +} + +#[derive(Clone, Copy, Debug, PartialEq, Sequence)] +pub enum AggregateFunction { + Average, +} + +impl NamedEnum for AggregateFunction { + fn name(self) -> &'static str { + match self { + AggregateFunction::Average => "Average", + } + } +} + +#[derive(Clone, Copy, Debug, PartialEq, Sequence)] +pub enum ComparisonOperator { + Equal, + Regex, +} + +impl NamedEnum for ComparisonOperator { + fn name(self) -> &'static str { + match self { + ComparisonOperator::Equal => "Equal", + ComparisonOperator::Regex => "Regex", + } + } +} + +#[derive(Clone, Copy, Debug, PartialEq, Sequence)] +pub enum ScalarType { + Bool, + Double, + Int, + String, +} + +impl NamedEnum for ScalarType { + fn name(self) -> &'static str { + match self { + ScalarType::Bool => "Bool", + ScalarType::Double => "Double", + ScalarType::Int => "Int", + ScalarType::String => "String", + } + } +} + +impl Display for ScalarType { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + f.write_str(self.name()) + } +} + +trait NamedEnum { + fn name(self) -> &'static str; + fn find_by_name(name: &str) -> Option + where + Self: Clone + Sequence, + { + enum_iterator::all::().find(|s| s.clone().name() == name) + } +} + +fn scalar_type_name(t: &Type) -> Option<&'static str> { + match t { + Type::Scalar(s) => Some(s.name()), + Type::Nullable(t) => scalar_type_name(t), + _ => None, + } +} + +fn scalar_types() -> BTreeMap { + [ + ( + ScalarType::Double.name().to_owned(), + ndc::ScalarType { + representation: Some(TypeRepresentation::Float64), + aggregate_functions: [( + AggregateFunction::Average.name().to_owned(), + ndc::AggregateFunctionDefinition { + result_type: ndc::Type::Named { + name: ScalarType::Double.name().to_owned(), + }, + }, + )] + .into(), + comparison_operators: [( + ComparisonOperator::Equal.name().to_owned(), + ndc::ComparisonOperatorDefinition::Equal, + )] + .into(), + }, + ), + ( + ScalarType::Int.name().to_owned(), + ndc::ScalarType { + representation: Some(TypeRepresentation::Int32), + aggregate_functions: [( + AggregateFunction::Average.name().to_owned(), + ndc::AggregateFunctionDefinition { + result_type: ndc::Type::Named { + name: ScalarType::Double.name().to_owned(), + }, + }, + )] + .into(), + comparison_operators: [( + ComparisonOperator::Equal.name().to_owned(), + ndc::ComparisonOperatorDefinition::Equal, + )] + .into(), + }, + ), + ( + ScalarType::String.name().to_owned(), + ndc::ScalarType { + representation: Some(TypeRepresentation::String), + aggregate_functions: Default::default(), + comparison_operators: [ + ( + ComparisonOperator::Equal.name().to_owned(), + ndc::ComparisonOperatorDefinition::Equal, + ), + ( + ComparisonOperator::Regex.name().to_owned(), + ndc::ComparisonOperatorDefinition::Custom { + argument_type: named_type(ScalarType::String), + }, + ), + ] + .into(), + }, + ), + ] + .into() +} + +lazy_static! { + static ref SCALAR_TYPES: BTreeMap = scalar_types(); +} + +pub fn make_flat_schema() -> TestContext { + TestContext { + collections: BTreeMap::from([ + ( + "authors".into(), + ndc::CollectionInfo { + name: "authors".to_owned(), + description: None, + collection_type: "Author".into(), + arguments: Default::default(), + uniqueness_constraints: make_primary_key_uniqueness_constraint("authors"), + foreign_keys: Default::default(), + }, + ), + ( + "articles".into(), + ndc::CollectionInfo { + name: "articles".to_owned(), + description: None, + collection_type: "Article".into(), + arguments: Default::default(), + uniqueness_constraints: make_primary_key_uniqueness_constraint("articles"), + foreign_keys: Default::default(), + }, + ), + ]), + functions: Default::default(), + object_types: BTreeMap::from([ + ( + "Author".into(), + object_type([ + ("id", named_type(ScalarType::Int)), + ("last_name", named_type(ScalarType::String)), + ]), + ), + ( + "Article".into(), + object_type([ + ("author_id", named_type(ScalarType::Int)), + ("title", named_type(ScalarType::String)), + ("year", nullable(named_type(ScalarType::Int))), + ]), + ), + ]), + procedures: Default::default(), + } +} + +pub fn make_nested_schema() -> TestContext { + TestContext { + collections: BTreeMap::from([ + ( + "authors".into(), + ndc::CollectionInfo { + name: "authors".into(), + description: None, + collection_type: "Author".into(), + arguments: Default::default(), + uniqueness_constraints: make_primary_key_uniqueness_constraint("authors"), + foreign_keys: Default::default(), + }, + ), + collection("appearances"), // new helper gives more concise syntax + ]), + functions: Default::default(), + object_types: BTreeMap::from([ + ( + "Author".to_owned(), + object_type([ + ("name", named_type(ScalarType::String)), + ("address", named_type("Address")), + ("articles", array_of(named_type("Article"))), + ("array_of_arrays", array_of(array_of(named_type("Article")))), + ]), + ), + ( + "Address".into(), + object_type([ + ("country", named_type(ScalarType::String)), + ("street", named_type(ScalarType::String)), + ("apartment", nullable(named_type(ScalarType::String))), + ("geocode", nullable(named_type("Geocode"))), + ]), + ), + ( + "Article".into(), + object_type([("title", named_type(ScalarType::String))]), + ), + ( + "Geocode".into(), + object_type([ + ("latitude", named_type(ScalarType::Double)), + ("longitude", named_type(ScalarType::Double)), + ]), + ), + ( + "appearances".to_owned(), + object_type([("authorId", named_type(ScalarType::Int))]), + ), + ]), + procedures: Default::default(), + } +} diff --git a/crates/ndc-query-plan/src/plan_for_query_request/query_context.rs b/crates/ndc-query-plan/src/plan_for_query_request/query_context.rs new file mode 100644 index 00000000..43336e85 --- /dev/null +++ b/crates/ndc-query-plan/src/plan_for_query_request/query_context.rs @@ -0,0 +1,127 @@ +use std::collections::BTreeMap; + +use ndc_models as ndc; + +use crate::type_system::lookup_object_type; +use crate::{self as plan, inline_object_types}; +use crate::{ConnectorTypes, Type}; + +use super::query_plan_error::QueryPlanError; + +type Result = std::result::Result; + +/// Necessary information to produce a [plan::QueryPlan] from an [ndc::QueryRequest] +pub trait QueryContext: ConnectorTypes { + /* Required methods */ + + /// Get the specific scalar type for this connector by name if the given name is a scalar type + /// name. (This method will also be called for object type names in which case it should return + /// `None`.) + fn lookup_scalar_type(type_name: &str) -> Option; + + fn lookup_aggregation_function( + &self, + input_type: &Type, + function_name: &str, + ) -> Result<(Self::AggregateFunction, &ndc::AggregateFunctionDefinition)>; + + fn lookup_comparison_operator( + &self, + left_operand_type: &Type, + operator_name: &str, + ) -> Result<(Self::ComparisonOperator, &ndc::ComparisonOperatorDefinition)>; + + fn collections(&self) -> &BTreeMap; + fn functions(&self) -> &BTreeMap; + fn object_types(&self) -> &BTreeMap; + fn procedures(&self) -> &BTreeMap; + + /* Provided methods */ + + fn find_aggregation_function_definition( + &self, + input_type: &Type, + function_name: &str, + ) -> Result<( + Self::AggregateFunction, + plan::AggregateFunctionDefinition, + )> + where + Self: Sized, + { + let (func, definition) = + Self::lookup_aggregation_function(self, input_type, function_name)?; + Ok(( + func, + plan::AggregateFunctionDefinition { + result_type: self.ndc_to_plan_type(&definition.result_type)?, + }, + )) + } + + fn find_comparison_operator( + &self, + left_operand_type: &Type, + op_name: &str, + ) -> Result<( + Self::ComparisonOperator, + plan::ComparisonOperatorDefinition, + )> + where + Self: Sized, + { + let (operator, definition) = + Self::lookup_comparison_operator(self, left_operand_type, op_name)?; + let plan_def = match definition { + ndc::ComparisonOperatorDefinition::Equal => plan::ComparisonOperatorDefinition::Equal, + ndc::ComparisonOperatorDefinition::In => plan::ComparisonOperatorDefinition::In, + ndc::ComparisonOperatorDefinition::Custom { argument_type } => { + plan::ComparisonOperatorDefinition::Custom { + argument_type: self.ndc_to_plan_type(argument_type)?, + } + } + }; + Ok((operator, plan_def)) + } + + fn find_collection(&self, collection_name: &str) -> Result<&ndc::CollectionInfo> { + if let Some(collection) = self.collections().get(collection_name) { + return Ok(collection); + } + if let Some((_, function)) = self.functions().get(collection_name) { + return Ok(function); + } + + Err(QueryPlanError::UnknownCollection( + collection_name.to_string(), + )) + } + + fn find_collection_object_type( + &self, + collection_name: &str, + ) -> Result> { + let collection = self.find_collection(collection_name)?; + self.find_object_type(&collection.collection_type) + } + + fn find_object_type<'a>( + &'a self, + object_type_name: &'a str, + ) -> Result> { + lookup_object_type( + self.object_types(), + object_type_name, + Self::lookup_scalar_type, + ) + } + + fn find_scalar_type(scalar_type_name: &str) -> Result { + Self::lookup_scalar_type(scalar_type_name) + .ok_or_else(|| QueryPlanError::UnknownScalarType(scalar_type_name.to_owned())) + } + + fn ndc_to_plan_type(&self, ndc_type: &ndc::Type) -> Result> { + inline_object_types(self.object_types(), ndc_type, Self::lookup_scalar_type) + } +} diff --git a/crates/mongodb-connector/src/api_type_conversions/conversion_error.rs b/crates/ndc-query-plan/src/plan_for_query_request/query_plan_error.rs similarity index 58% rename from crates/mongodb-connector/src/api_type_conversions/conversion_error.rs rename to crates/ndc-query-plan/src/plan_for_query_request/query_plan_error.rs index b032f484..4bef10ed 100644 --- a/crates/mongodb-connector/src/api_type_conversions/conversion_error.rs +++ b/crates/ndc-query-plan/src/plan_for_query_request/query_plan_error.rs @@ -1,8 +1,13 @@ -use ndc_sdk::connector::{ExplainError, QueryError}; use thiserror::Error; #[derive(Clone, Debug, Error)] -pub enum ConversionError { +pub enum QueryPlanError { + #[error("expected an array at path {}", path.join("."))] + ExpectedArray { path: Vec }, + + #[error("expected an object at path {}", path.join("."))] + ExpectedObject { path: Vec }, + #[error("The connector does not yet support {0}")] NotImplemented(&'static str), @@ -22,11 +27,12 @@ pub enum ConversionError { UnknownObjectType(String), #[error( - "Unknown field \"{field_name}\" in object type \"{object_type}\"{}", + "Unknown field \"{field_name}\"{}{}", + in_object_type(object_type.as_ref()), at_path(path) )] UnknownObjectTypeField { - object_type: String, + object_type: Option, field_name: String, path: Vec, }, @@ -40,13 +46,8 @@ pub enum ConversionError { path: Vec, }, - #[error( - "Unknown aggregate function, \"{aggregate_function}\" in scalar type \"{scalar_type}\"" - )] - UnknownAggregateFunction { - scalar_type: String, - aggregate_function: String, - }, + #[error("Unknown aggregate function, \"{aggregate_function}\"")] + UnknownAggregateFunction { aggregate_function: String }, #[error("Query referenced a function, \"{0}\", but it has not been defined")] UnspecifiedFunction(String), @@ -55,24 +56,6 @@ pub enum ConversionError { UnspecifiedRelation(String), } -impl From for QueryError { - fn from(error: ConversionError) -> Self { - match error { - ConversionError::NotImplemented(e) => QueryError::UnsupportedOperation(e.to_owned()), - e => QueryError::InvalidRequest(e.to_string()), - } - } -} - -impl From for ExplainError { - fn from(error: ConversionError) -> Self { - match error { - ConversionError::NotImplemented(e) => ExplainError::UnsupportedOperation(e.to_owned()), - e => ExplainError::InvalidRequest(e.to_string()), - } - } -} - fn at_path(path: &[String]) -> String { if path.is_empty() { "".to_owned() @@ -80,3 +63,10 @@ fn at_path(path: &[String]) -> String { format!(" at path {}", path.join(".")) } } + +fn in_object_type(type_name: Option<&String>) -> String { + match type_name { + Some(name) => format!(" in object type \"{name}\""), + None => "".to_owned(), + } +} diff --git a/crates/ndc-query-plan/src/plan_for_query_request/query_plan_state.rs b/crates/ndc-query-plan/src/plan_for_query_request/query_plan_state.rs new file mode 100644 index 00000000..e8fc4544 --- /dev/null +++ b/crates/ndc-query-plan/src/plan_for_query_request/query_plan_state.rs @@ -0,0 +1,138 @@ +use std::{ + cell::{Cell, RefCell}, + collections::BTreeMap, + rc::Rc, +}; + +use ndc::RelationshipArgument; +use ndc_models as ndc; + +use crate::{ + plan_for_query_request::helpers::lookup_relationship, query_plan::UnrelatedJoin, Query, + QueryContext, QueryPlanError, Relationship, +}; + +type Result = std::result::Result; + +/// Records relationship and other join references in a mutable struct. Relations are scoped to +/// a sub-query (a value of type [Query]), unrelated joins are scoped to the entire query plan. +/// +/// This does two things: +/// - Accumulate all of the details needed for joins for each sub-query in one place +/// - Associate an identifier for each join that can be used at each reference site +#[derive(Debug)] +pub struct QueryPlanState<'a, T: QueryContext> { + pub context: &'a T, + pub collection_relationships: &'a BTreeMap, + relationships: BTreeMap>, + unrelated_joins: Rc>>>, + counter: Rc>, +} + +// TODO: We may be able to unify relationships that are not identical, but that are compatible. +// For example two relationships that differ only in field selection could be merged into one +// with the union of both field selections. + +impl QueryPlanState<'_, T> { + pub fn new<'a>( + query_context: &'a T, + collection_relationships: &'a BTreeMap, + ) -> QueryPlanState<'a, T> { + QueryPlanState { + context: query_context, + collection_relationships, + relationships: Default::default(), + unrelated_joins: Rc::new(RefCell::new(Default::default())), + counter: Rc::new(Cell::new(0)), + } + } + + /// When traversing a query request into a sub-query we enter a new scope for relationships. + /// Use this function to get a new plan for the new scope. Shares query-request-level state + /// with the parent plan. + pub fn state_for_subquery(&self) -> QueryPlanState<'_, T> { + QueryPlanState { + context: self.context, + collection_relationships: self.collection_relationships, + relationships: Default::default(), + unrelated_joins: self.unrelated_joins.clone(), + counter: self.counter.clone(), + } + } + + /// Record a relationship reference so that it is added to the list of joins for the query + /// plan, and get back an identifier than can be used to access the joined collection. + pub fn register_relationship( + &mut self, + ndc_relationship_name: String, + arguments: BTreeMap, + query: Query, + ) -> Result<(&str, &Relationship)> { + let already_registered = self.relationships.contains_key(&ndc_relationship_name); + + if !already_registered { + let ndc_relationship = + lookup_relationship(self.collection_relationships, &ndc_relationship_name)?; + + let relationship = Relationship { + column_mapping: ndc_relationship.column_mapping.clone(), + relationship_type: ndc_relationship.relationship_type, + target_collection: ndc_relationship.target_collection.clone(), + arguments, + query, + }; + + self.relationships + .insert(ndc_relationship_name.clone(), relationship); + } + + // Safety: we just inserted this key + let (key, relationship) = self + .relationships + .get_key_value(&ndc_relationship_name) + .unwrap(); + Ok((key, relationship)) + } + + /// Record a collection reference so that it is added to the list of joins for the query + /// plan, and get back an identifier than can be used to access the joined collection. + pub fn register_unrelated_join( + &mut self, + target_collection: String, + arguments: BTreeMap, + query: Query, + ) -> String { + let join = UnrelatedJoin { + target_collection, + arguments, + query, + }; + + let key = self.unique_name(format!("__join_{}", join.target_collection)); + self.unrelated_joins.borrow_mut().insert(key.clone(), join); + + // Unlike [Self::register_relationship] this method does not return a reference to the + // registered join. If we need that reference then we need another [RefCell::borrow] call + // here, and we need to return the [std::cell::Ref] value that is produced. (We can't + // borrow map values through a RefCell without keeping a live Ref.) But if that Ref is + // still alive the next time [Self::register_unrelated_join] is called then the borrow_mut + // call will fail. + key + } + + /// Use this for subquery plans to get the relationships for each sub-query + pub fn into_relationships(self) -> BTreeMap> { + self.relationships + } + + /// Use this with the top-level plan to get unrelated joins. + pub fn into_unrelated_collections(self) -> BTreeMap> { + self.unrelated_joins.take() + } + + fn unique_name(&mut self, name: String) -> String { + let count = self.counter.get(); + self.counter.set(count + 1); + format!("{name}_{count}") + } +} diff --git a/crates/ndc-query-plan/src/plan_for_query_request/type_annotated_field.rs b/crates/ndc-query-plan/src/plan_for_query_request/type_annotated_field.rs new file mode 100644 index 00000000..59c43475 --- /dev/null +++ b/crates/ndc-query-plan/src/plan_for_query_request/type_annotated_field.rs @@ -0,0 +1,177 @@ +use std::collections::BTreeMap; + +use itertools::Itertools as _; +use ndc_models as ndc; + +use crate::{ + Field, NestedArray, NestedField, NestedObject, ObjectType, QueryContext, QueryPlanError, Type, +}; + +use super::{ + helpers::{find_object_field, lookup_relationship}, + plan_for_query, + query_plan_state::QueryPlanState, +}; + +type Result = std::result::Result; + +/// Translates [ndc::Field] to [Field]. The latter includes type annotations. +pub fn type_annotated_field( + plan_state: &mut QueryPlanState<'_, T>, + root_collection_object_type: &ObjectType, + collection_object_type: &ObjectType, + field: ndc::Field, +) -> Result> { + type_annotated_field_helper( + plan_state, + root_collection_object_type, + collection_object_type, + field, + &[], + ) +} + +fn type_annotated_field_helper( + plan_state: &mut QueryPlanState<'_, T>, + root_collection_object_type: &ObjectType, + collection_object_type: &ObjectType, + field: ndc::Field, + path: &[&str], +) -> Result> { + let field = match field { + ndc::Field::Column { column, fields } => { + let column_type = find_object_field(collection_object_type, &column)?; + let fields = fields + .map(|nested_field| { + type_annotated_nested_field_helper( + plan_state, + root_collection_object_type, + column_type, + nested_field, + path, + ) + }) + .transpose()?; + Field::Column { + column_type: column_type.clone(), + column, + fields, + } + } + ndc::Field::Relationship { + arguments, + query, + relationship, + } => { + let relationship_def = + lookup_relationship(plan_state.collection_relationships, &relationship)?; + let related_collection_type = plan_state + .context + .find_collection_object_type(&relationship_def.target_collection)?; + + let query_plan = plan_for_query( + &mut plan_state.state_for_subquery(), + root_collection_object_type, + &related_collection_type, + *query, + )?; + + let (relationship_key, plan_relationship) = + plan_state.register_relationship(relationship, arguments, query_plan)?; + Field::Relationship { + relationship: relationship_key.to_owned(), + aggregates: plan_relationship.query.aggregates.clone(), + fields: plan_relationship.query.fields.clone(), + } + } + }; + Ok(field) +} + +/// Translates [ndc::NestedField] to [Field]. The latter includes type annotations. +pub fn type_annotated_nested_field( + query_context: &T, + collection_relationships: &BTreeMap, + result_type: &Type, + requested_fields: ndc::NestedField, +) -> Result> { + // TODO: root column references for mutations + let root_collection_object_type = &ObjectType { + name: None, + fields: Default::default(), + }; + type_annotated_nested_field_helper( + &mut QueryPlanState::new(query_context, collection_relationships), + root_collection_object_type, + result_type, + requested_fields, + &[], + ) +} + +fn type_annotated_nested_field_helper( + plan_state: &mut QueryPlanState<'_, T>, + root_collection_object_type: &ObjectType, + parent_type: &Type, + requested_fields: ndc::NestedField, + path: &[&str], +) -> Result> { + let field = match (requested_fields, parent_type) { + (ndc::NestedField::Object(object), Type::Object(object_type)) => { + NestedField::Object(NestedObject { + fields: object + .fields + .iter() + .map(|(name, field)| { + Ok(( + name.clone(), + type_annotated_field_helper( + plan_state, + root_collection_object_type, + object_type, + field.clone(), + &append_to_path(path, [name.as_ref()]), + )?, + )) + }) + .try_collect()?, + }) + } + (ndc::NestedField::Array(array), Type::ArrayOf(element_type)) => { + NestedField::Array(NestedArray { + fields: Box::new(type_annotated_nested_field_helper( + plan_state, + root_collection_object_type, + element_type, + *array.fields, + &append_to_path(path, ["[]"]), + )?), + }) + } + (nested, Type::Nullable(t)) => { + // let path = append_to_path(path, []) + type_annotated_nested_field_helper( + plan_state, + root_collection_object_type, + t, + nested, + path, + )? + } + (ndc::NestedField::Object(_), _) => Err(QueryPlanError::ExpectedObject { + path: path_to_owned(path), + })?, + (ndc::NestedField::Array(_), _) => Err(QueryPlanError::ExpectedArray { + path: path_to_owned(path), + })?, + }; + Ok(field) +} + +fn append_to_path<'a>(path: &[&'a str], elems: impl IntoIterator) -> Vec<&'a str> { + path.iter().copied().chain(elems).collect() +} + +fn path_to_owned(path: &[&str]) -> Vec { + path.iter().map(|x| (*x).to_owned()).collect() +} diff --git a/crates/ndc-query-plan/src/query_plan.rs b/crates/ndc-query-plan/src/query_plan.rs new file mode 100644 index 00000000..ebeec0cd --- /dev/null +++ b/crates/ndc-query-plan/src/query_plan.rs @@ -0,0 +1,319 @@ +use std::collections::BTreeMap; +use std::fmt::Debug; + +use derivative::Derivative; +use indexmap::IndexMap; +use ndc_models::{ + Argument, OrderDirection, RelationshipArgument, RelationshipType, UnaryComparisonOperator, +}; + +use crate::Type; + +pub trait ConnectorTypes { + type ScalarType: Clone + Debug + PartialEq; + type AggregateFunction: Clone + Debug + PartialEq; + type ComparisonOperator: Clone + Debug + PartialEq; +} + +#[derive(Derivative)] +#[derivative(Clone(bound = ""), Debug(bound = ""), PartialEq(bound = ""))] +pub struct QueryPlan { + pub collection: String, + pub query: Query, + pub arguments: BTreeMap, + pub variables: Option>, + + // TODO: type for unrelated collection + pub unrelated_collections: BTreeMap>, +} + +impl QueryPlan { + pub fn has_variables(&self) -> bool { + self.variables.is_some() + } +} + +pub type VariableSet = BTreeMap; +pub type Relationships = BTreeMap>; + +#[derive(Derivative)] +#[derivative( + Clone(bound = ""), + Debug(bound = ""), + Default(bound = ""), + PartialEq(bound = "") +)] +pub struct Query { + pub aggregates: Option>>, + pub fields: Option>>, + pub limit: Option, + pub aggregates_limit: Option, + pub offset: Option, + pub order_by: Option>, + pub predicate: Option>, + + /// Relationships referenced by fields and expressions in this query or sub-query. Does not + /// include relationships in sub-queries nested under this one. + pub relationships: Relationships, +} + +impl Query { + pub fn has_aggregates(&self) -> bool { + if let Some(aggregates) = &self.aggregates { + !aggregates.is_empty() + } else { + false + } + } + + pub fn has_fields(&self) -> bool { + if let Some(fields) = &self.fields { + !fields.is_empty() + } else { + false + } + } +} + +#[derive(Derivative)] +#[derivative(Clone(bound = ""), Debug(bound = ""), PartialEq(bound = ""))] +pub struct Relationship { + pub column_mapping: BTreeMap, + pub relationship_type: RelationshipType, + pub target_collection: String, + pub arguments: BTreeMap, + pub query: Query, +} + +#[derive(Derivative)] +#[derivative(Clone(bound = ""), Debug(bound = ""), PartialEq(bound = ""))] +pub struct UnrelatedJoin { + pub target_collection: String, + pub arguments: BTreeMap, + pub query: Query, +} + +#[derive(Derivative)] +#[derivative(Clone(bound = ""), Debug(bound = ""), PartialEq(bound = ""))] +pub enum Aggregate { + ColumnCount { + /// The column to apply the count aggregate function to + column: String, + /// Whether or not only distinct items should be counted + distinct: bool, + }, + SingleColumn { + /// The column to apply the aggregation function to + column: String, + /// Single column aggregate function name. + function: T::AggregateFunction, + result_type: Type, + }, + StarCount, +} + +#[derive(Derivative)] +#[derivative(Clone(bound = ""), Debug(bound = ""), PartialEq(bound = ""))] +pub struct NestedObject { + pub fields: IndexMap>, +} + +#[derive(Derivative)] +#[derivative(Clone(bound = ""), Debug(bound = ""), PartialEq(bound = ""))] +pub struct NestedArray { + pub fields: Box>, +} + +#[derive(Derivative)] +#[derivative(Clone(bound = ""), Debug(bound = ""), PartialEq(bound = ""))] +pub enum NestedField { + Object(NestedObject), + Array(NestedArray), +} + +#[derive(Derivative)] +#[derivative(Clone(bound = ""), Debug(bound = ""), PartialEq(bound = ""))] +pub enum Field { + Column { + column: String, + + /// When the type of the column is a (possibly-nullable) array or object, + /// the caller can request a subset of the complete column data, + /// by specifying fields to fetch here. + /// If omitted, the column data will be fetched in full. + fields: Option>, + + column_type: Type, + }, + Relationship { + /// The name of the relationship to follow for the subquery - this is the key in the + /// [Query] relationships map in this module, it is **not** the key in the + /// [ndc::QueryRequest] collection_relationships map. + relationship: String, + aggregates: Option>>, + fields: Option>>, + }, +} + +#[derive(Derivative)] +#[derivative(Clone(bound = ""), Debug(bound = ""), PartialEq(bound = ""))] +pub enum Expression { + And { + expressions: Vec>, + }, + Or { + expressions: Vec>, + }, + Not { + expression: Box>, + }, + UnaryComparisonOperator { + column: ComparisonTarget, + operator: UnaryComparisonOperator, + }, + BinaryComparisonOperator { + column: ComparisonTarget, + operator: T::ComparisonOperator, + value: ComparisonValue, + }, + Exists { + in_collection: ExistsInCollection, + predicate: Option>>, + }, +} + +#[derive(Derivative)] +#[derivative(Clone(bound = ""), Debug(bound = ""), PartialEq(bound = ""))] +pub struct OrderBy { + /// The elements to order by, in priority order + pub elements: Vec>, +} + +#[derive(Derivative)] +#[derivative(Clone(bound = ""), Debug(bound = ""), PartialEq(bound = ""))] +pub struct OrderByElement { + pub order_direction: OrderDirection, + pub target: OrderByTarget, +} + +#[derive(Derivative)] +#[derivative(Clone(bound = ""), Debug(bound = ""), PartialEq(bound = ""))] +pub enum OrderByTarget { + Column { + /// The name of the column + name: String, + + /// Path to a nested field within an object column + field_path: Option>, + + /// Any relationships to traverse to reach this column. These are translated from + /// [ndc_models::OrderByElement] values in the [ndc_models::QueryRequest] to names of relation + /// fields for the [QueryPlan]. + path: Vec, + }, + SingleColumnAggregate { + /// The column to apply the aggregation function to + column: String, + /// Single column aggregate function name. + function: T::AggregateFunction, + + result_type: Type, + + /// Any relationships to traverse to reach this aggregate. These are translated from + /// [ndc_models::OrderByElement] values in the [ndc_models::QueryRequest] to names of relation + /// fields for the [QueryPlan]. + path: Vec, + }, + StarCountAggregate { + /// Any relationships to traverse to reach this aggregate. These are translated from + /// [ndc_models::OrderByElement] values in the [ndc_models::QueryRequest] to names of relation + /// fields for the [QueryPlan]. + path: Vec, + }, +} + +#[derive(Derivative)] +#[derivative(Clone(bound = ""), Debug(bound = ""), PartialEq(bound = ""))] +pub enum ComparisonTarget { + Column { + /// The name of the column + name: String, + + /// Path to a nested field within an object column + field_path: Option>, + + column_type: Type, + + /// Any relationships to traverse to reach this column. These are translated from + /// [ndc_models::PathElement] values in the [ndc_models::QueryRequest] to names of relation + /// fields for the [QueryPlan]. + path: Vec, + }, + RootCollectionColumn { + /// The name of the column + name: String, + + /// Path to a nested field within an object column + field_path: Option>, + + column_type: Type, + }, +} + +impl ComparisonTarget { + pub fn get_column_type(&self) -> &Type { + match self { + ComparisonTarget::Column { column_type, .. } => column_type, + ComparisonTarget::RootCollectionColumn { column_type, .. } => column_type, + } + } +} + +#[derive(Derivative)] +#[derivative(Clone(bound = ""), Debug(bound = ""), PartialEq(bound = ""))] +pub enum ComparisonValue { + Column { + column: ComparisonTarget, + }, + Scalar { + value: serde_json::Value, + value_type: Type, + }, + Variable { + name: String, + variable_type: Type, + }, +} + +#[derive(Derivative)] +#[derivative(Clone(bound = ""), Debug(bound = ""), PartialEq(bound = ""))] +pub struct AggregateFunctionDefinition { + /// The scalar or object type of the result of this function + pub result_type: Type, +} + +#[derive(Derivative)] +#[derivative(Clone(bound = ""), Debug(bound = ""), PartialEq(bound = ""))] +pub enum ComparisonOperatorDefinition { + Equal, + In, + Custom { + /// The type of the argument to this operator + argument_type: Type, + }, +} + +#[derive(Derivative)] +#[derivative(Clone(bound = ""), Debug(bound = ""), PartialEq(bound = ""))] +pub enum ExistsInCollection { + Related { + /// Key of the relation in the [Query] joins map. Relationships are scoped to the sub-query + /// that defines the relation source. + relationship: String, + }, + Unrelated { + /// Key of the relation in the [QueryPlan] joins map. Unrelated collections are not scoped + /// to a sub-query, instead they are given in the root [QueryPlan]. + unrelated_collection: String, + }, +} diff --git a/crates/ndc-query-plan/src/type_system.rs b/crates/ndc-query-plan/src/type_system.rs new file mode 100644 index 00000000..23c9cc11 --- /dev/null +++ b/crates/ndc-query-plan/src/type_system.rs @@ -0,0 +1,112 @@ +use std::collections::BTreeMap; + +use itertools::Itertools as _; +use ndc_models as ndc; + +use crate::{self as plan, QueryPlanError}; + +/// The type of values that a column, field, or argument may take. +#[derive(Debug, Clone, PartialEq, Eq)] +pub enum Type { + Scalar(ScalarType), + /// The name of an object type declared in `objectTypes` + Object(ObjectType), + ArrayOf(Box>), + /// A nullable form of any of the other types + Nullable(Box>), +} + +impl Type { + pub fn into_nullable(self) -> Self { + match self { + t @ Type::Nullable(_) => t, + t => Type::Nullable(Box::new(t)), + } + } +} + +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct ObjectType { + /// A type name may be tracked for error reporting. The name does not affect how query plans + /// are generated. + pub name: Option, + pub fields: BTreeMap>, +} + +impl ObjectType { + pub fn named_fields(&self) -> impl Iterator)> { + self.fields + .iter() + .map(|(name, field)| (name.as_ref(), field)) + } +} + +/// Convert from ndc IR types to query plan types. The key differences are: +/// - query plan types use inline copies of object types instead of referencing object types by name +/// - query plan types are parameterized over the specific scalar type for a connector instead of +/// referencing scalar types by name +pub fn inline_object_types( + object_types: &BTreeMap, + t: &ndc::Type, + lookup_scalar_type: fn(&str) -> Option, +) -> Result, QueryPlanError> { + let plan_type = + match t { + ndc::Type::Named { name } => lookup_type(object_types, name, lookup_scalar_type)?, + ndc::Type::Nullable { underlying_type } => Type::Nullable(Box::new( + inline_object_types(object_types, underlying_type, lookup_scalar_type)?, + )), + ndc::Type::Array { element_type } => Type::ArrayOf(Box::new(inline_object_types( + object_types, + element_type, + lookup_scalar_type, + )?)), + ndc::Type::Predicate { .. } => Err(QueryPlanError::NotImplemented("predicate types"))?, + }; + Ok(plan_type) +} + +fn lookup_type( + object_types: &BTreeMap, + name: &str, + lookup_scalar_type: fn(&str) -> Option, +) -> Result, QueryPlanError> { + if let Some(scalar_type) = lookup_scalar_type(name) { + return Ok(Type::Scalar(scalar_type)); + } + let object_type = lookup_object_type_helper(object_types, name, lookup_scalar_type)?; + Ok(Type::Object(object_type)) +} + +fn lookup_object_type_helper( + object_types: &BTreeMap, + name: &str, + lookup_scalar_type: fn(&str) -> Option, +) -> Result, QueryPlanError> { + let object_type = object_types + .get(name) + .ok_or_else(|| QueryPlanError::UnknownObjectType(name.to_string()))?; + + let plan_object_type = plan::ObjectType { + name: Some(name.to_owned()), + fields: object_type + .fields + .iter() + .map(|(name, field)| { + Ok(( + name.to_owned(), + inline_object_types(object_types, &field.r#type, lookup_scalar_type)?, + )) + }) + .try_collect()?, + }; + Ok(plan_object_type) +} + +pub fn lookup_object_type( + object_types: &BTreeMap, + name: &str, + lookup_scalar_type: fn(&str) -> Option, +) -> Result, QueryPlanError> { + lookup_object_type_helper(object_types, name, lookup_scalar_type) +} diff --git a/crates/ndc-test-helpers/Cargo.toml b/crates/ndc-test-helpers/Cargo.toml index b0d18672..99349435 100644 --- a/crates/ndc-test-helpers/Cargo.toml +++ b/crates/ndc-test-helpers/Cargo.toml @@ -4,7 +4,7 @@ version = "0.1.0" edition = "2021" [dependencies] -indexmap = "2" +indexmap = { workspace = true } itertools = { workspace = true } ndc-models = { workspace = true } serde_json = "1" diff --git a/crates/ndc-test-helpers/src/comparison_target.rs b/crates/ndc-test-helpers/src/comparison_target.rs index 7838365a..73586dd4 100644 --- a/crates/ndc-test-helpers/src/comparison_target.rs +++ b/crates/ndc-test-helpers/src/comparison_target.rs @@ -6,9 +6,23 @@ macro_rules! target { path: vec![], } }; - ($column:literal, $path:expr $(,)?) => { + ($column:literal, field_path:$field_path:expr $(,)?) => { $crate::ndc_models::ComparisonTarget::Column { name: $column.to_owned(), + field_path: $field_path.into_iter().map(|x| x.into()).collect(), + path: vec![], + } + }; + ($column:literal, relations:$path:expr $(,)?) => { + $crate::ndc_models::ComparisonTarget::Column { + name: $column.to_owned(), + path: $path.into_iter().map(|x| x.into()).collect(), + } + }; + ($column:literal, field_path:$field_path:expr, relations:$path:expr $(,)?) => { + $crate::ndc_models::ComparisonTarget::Column { + name: $column.to_owned(), + // field_path: $field_path.into_iter().map(|x| x.into()).collect(), path: $path.into_iter().map(|x| x.into()).collect(), } }; diff --git a/crates/ndc-test-helpers/src/expressions.rs b/crates/ndc-test-helpers/src/expressions.rs index d8e6fe3e..26c69e5f 100644 --- a/crates/ndc-test-helpers/src/expressions.rs +++ b/crates/ndc-test-helpers/src/expressions.rs @@ -33,14 +33,6 @@ pub fn is_null(target: ComparisonTarget) -> Expression { } } -pub fn equal(op1: ComparisonTarget, op2: ComparisonValue) -> Expression { - Expression::BinaryComparisonOperator { - column: op1, - operator: "_eq".to_owned(), - value: op2, - } -} - pub fn binop(oper: S, op1: ComparisonTarget, op2: ComparisonValue) -> Expression where S: ToString, diff --git a/crates/ndc-test-helpers/src/field.rs b/crates/ndc-test-helpers/src/field.rs index d844ee2e..c5987598 100644 --- a/crates/ndc-test-helpers/src/field.rs +++ b/crates/ndc-test-helpers/src/field.rs @@ -52,7 +52,7 @@ macro_rules! array { #[macro_export] macro_rules! relation_field { - ($relationship:literal => $name:literal) => { + ($name:literal => $relationship:literal) => { ( $name, $crate::ndc_models::Field::Relationship { @@ -62,7 +62,7 @@ macro_rules! relation_field { }, ) }; - ($relationship:literal => $name:literal, $query:expr) => { + ($name:literal => $relationship:literal, $query:expr) => { ( $name, $crate::ndc_models::Field::Relationship { diff --git a/crates/ndc-test-helpers/src/lib.rs b/crates/ndc-test-helpers/src/lib.rs index 06fb273f..a2c4871c 100644 --- a/crates/ndc-test-helpers/src/lib.rs +++ b/crates/ndc-test-helpers/src/lib.rs @@ -8,6 +8,10 @@ mod comparison_value; mod exists_in_collection; mod expressions; mod field; +mod object_type; +mod query_response; +mod relationships; +mod type_helpers; use std::collections::BTreeMap; @@ -26,6 +30,10 @@ pub use comparison_value::*; pub use exists_in_collection::*; pub use expressions::*; pub use field::*; +pub use object_type::*; +pub use query_response::*; +pub use relationships::*; +pub use type_helpers::*; #[derive(Clone, Debug, Default)] pub struct QueryRequestBuilder { @@ -84,9 +92,11 @@ impl QueryRequestBuilder { self } - pub fn variables( + pub fn variables( mut self, - variables: [Vec<(&str, serde_json::Value)>; S], + variables: impl IntoIterator< + Item = impl IntoIterator)>, + >, ) -> Self { self.variables = Some( variables @@ -94,7 +104,7 @@ impl QueryRequestBuilder { .map(|var_map| { var_map .into_iter() - .map(|(name, value)| (name.to_owned(), value)) + .map(|(name, value)| (name.to_string(), value.into())) .collect() }) .collect(), @@ -200,61 +210,6 @@ pub fn empty_expression() -> Expression { } } -#[derive(Clone, Debug)] -pub struct RelationshipBuilder { - column_mapping: BTreeMap, - relationship_type: RelationshipType, - target_collection: String, - arguments: BTreeMap, -} - -pub fn relationship( - target: &str, - column_mapping: [(&str, &str); S], -) -> RelationshipBuilder { - RelationshipBuilder::new(target, column_mapping) -} - -impl RelationshipBuilder { - pub fn new(target: &str, column_mapping: [(&str, &str); S]) -> Self { - RelationshipBuilder { - column_mapping: column_mapping - .into_iter() - .map(|(source, target)| (source.to_owned(), target.to_owned())) - .collect(), - relationship_type: RelationshipType::Array, - target_collection: target.to_owned(), - arguments: Default::default(), - } - } - - pub fn relationship_type(mut self, relationship_type: RelationshipType) -> Self { - self.relationship_type = relationship_type; - self - } - - pub fn object_type(mut self) -> Self { - self.relationship_type = RelationshipType::Object; - self - } - - pub fn arguments(mut self, arguments: BTreeMap) -> Self { - self.arguments = arguments; - self - } -} - -impl From for Relationship { - fn from(value: RelationshipBuilder) -> Self { - Relationship { - column_mapping: value.column_mapping, - relationship_type: value.relationship_type, - target_collection: value.target_collection, - arguments: value.arguments, - } - } -} - #[derive(Clone, Debug)] pub struct PathElementBuilder { relationship: String, diff --git a/crates/ndc-test-helpers/src/object_type.rs b/crates/ndc-test-helpers/src/object_type.rs new file mode 100644 index 00000000..9950abad --- /dev/null +++ b/crates/ndc-test-helpers/src/object_type.rs @@ -0,0 +1,21 @@ +use ndc_models::{ObjectField, ObjectType, Type}; + +pub fn object_type( + fields: impl IntoIterator)>, +) -> ObjectType { + ObjectType { + description: Default::default(), + fields: fields + .into_iter() + .map(|(name, field_type)| { + ( + name.to_string(), + ObjectField { + description: Default::default(), + r#type: field_type.into(), + }, + ) + }) + .collect(), + } +} diff --git a/crates/ndc-test-helpers/src/query_response.rs b/crates/ndc-test-helpers/src/query_response.rs new file mode 100644 index 00000000..41c39545 --- /dev/null +++ b/crates/ndc-test-helpers/src/query_response.rs @@ -0,0 +1,119 @@ +use indexmap::IndexMap; +use ndc_models::{QueryResponse, RowFieldValue, RowSet}; + +#[derive(Clone, Debug, Default)] +pub struct QueryResponseBuilder { + row_sets: Vec, +} + +impl QueryResponseBuilder { + pub fn build(self) -> QueryResponse { + QueryResponse(self.row_sets) + } + + pub fn row_set(mut self, row_set: impl Into) -> Self { + self.row_sets.push(row_set.into()); + self + } + + pub fn row_set_rows( + mut self, + rows: impl IntoIterator< + Item = impl IntoIterator)>, + >, + ) -> Self { + self.row_sets.push(row_set().rows(rows).into()); + self + } + + pub fn empty_row_set(mut self) -> Self { + self.row_sets.push(RowSet { + aggregates: None, + rows: Some(vec![]), + }); + self + } +} + +impl From for QueryResponse { + fn from(value: QueryResponseBuilder) -> Self { + value.build() + } +} + +#[derive(Clone, Debug, Default)] +pub struct RowSetBuilder { + aggregates: IndexMap, + rows: Vec>, +} + +impl RowSetBuilder { + pub fn into_response(self) -> QueryResponse { + QueryResponse(vec![self.into()]) + } + + pub fn aggregates( + mut self, + aggregates: impl IntoIterator)>, + ) -> Self { + self.aggregates.extend( + aggregates + .into_iter() + .map(|(k, v)| (k.to_string(), v.into())), + ); + self + } + + pub fn rows( + mut self, + rows: impl IntoIterator< + Item = impl IntoIterator)>, + >, + ) -> Self { + self.rows.extend(rows.into_iter().map(|r| { + r.into_iter() + .map(|(k, v)| (k.to_string(), RowFieldValue(v.into()))) + .collect() + })); + self + } + + pub fn row( + mut self, + row: impl IntoIterator)>, + ) -> Self { + self.rows.push( + row.into_iter() + .map(|(k, v)| (k.to_string(), RowFieldValue(v.into()))) + .collect(), + ); + self + } +} + +impl From for RowSet { + fn from(RowSetBuilder { aggregates, rows }: RowSetBuilder) -> Self { + RowSet { + aggregates: if aggregates.is_empty() { + None + } else { + Some(aggregates) + }, + rows: if rows.is_empty() { None } else { Some(rows) }, + } + } +} + +impl From for QueryResponse { + fn from(value: RowSetBuilder) -> Self { + value.into_response() + } +} + +pub fn query_response() -> QueryResponseBuilder { + Default::default() +} + +pub fn row_set() -> RowSetBuilder { + Default::default() +} diff --git a/crates/ndc-test-helpers/src/relationships.rs b/crates/ndc-test-helpers/src/relationships.rs new file mode 100644 index 00000000..bdf9853c --- /dev/null +++ b/crates/ndc-test-helpers/src/relationships.rs @@ -0,0 +1,67 @@ +use std::collections::BTreeMap; + +use ndc_models::{Relationship, RelationshipArgument, RelationshipType}; + +#[derive(Clone, Debug)] +pub struct RelationshipBuilder { + column_mapping: BTreeMap, + relationship_type: RelationshipType, + target_collection: String, + arguments: BTreeMap, +} + +pub fn relationship( + target: &str, + column_mapping: [(&str, &str); S], +) -> RelationshipBuilder { + RelationshipBuilder::new(target, column_mapping) +} + +impl RelationshipBuilder { + pub fn new(target: &str, column_mapping: [(&str, &str); S]) -> Self { + RelationshipBuilder { + column_mapping: column_mapping + .into_iter() + .map(|(source, target)| (source.to_owned(), target.to_owned())) + .collect(), + relationship_type: RelationshipType::Array, + target_collection: target.to_owned(), + arguments: Default::default(), + } + } + + pub fn relationship_type(mut self, relationship_type: RelationshipType) -> Self { + self.relationship_type = relationship_type; + self + } + + pub fn object_type(mut self) -> Self { + self.relationship_type = RelationshipType::Object; + self + } + + pub fn arguments(mut self, arguments: BTreeMap) -> Self { + self.arguments = arguments; + self + } +} + +impl From for Relationship { + fn from(value: RelationshipBuilder) -> Self { + Relationship { + column_mapping: value.column_mapping, + relationship_type: value.relationship_type, + target_collection: value.target_collection, + arguments: value.arguments, + } + } +} + +pub fn collection_relationships( + relationships: [(&str, impl Into); S], +) -> BTreeMap { + relationships + .into_iter() + .map(|(name, r)| (name.to_owned(), r.into())) + .collect() +} diff --git a/crates/ndc-test-helpers/src/type_helpers.rs b/crates/ndc-test-helpers/src/type_helpers.rs new file mode 100644 index 00000000..025ab880 --- /dev/null +++ b/crates/ndc-test-helpers/src/type_helpers.rs @@ -0,0 +1,19 @@ +use ndc_models::Type; + +pub fn array_of(t: impl Into) -> Type { + Type::Array { + element_type: Box::new(t.into()), + } +} + +pub fn named_type(name: impl ToString) -> Type { + Type::Named { + name: name.to_string(), + } +} + +pub fn nullable(t: impl Into) -> Type { + Type::Nullable { + underlying_type: Box::new(t.into()), + } +} diff --git a/crates/test-helpers/Cargo.toml b/crates/test-helpers/Cargo.toml index 27c4ad6d..744d22ce 100644 --- a/crates/test-helpers/Cargo.toml +++ b/crates/test-helpers/Cargo.toml @@ -6,8 +6,10 @@ version.workspace = true [dependencies] configuration = { path = "../configuration" } mongodb-support = { path = "../mongodb-support" } +ndc-test-helpers = { path = "../ndc-test-helpers" } enum-iterator = "^2.0.0" mongodb = { workspace = true } +ndc-models = { workspace = true } proptest = "1" diff --git a/fixtures/connector/chinook/native_procedures/insert_artist.json b/fixtures/connector/chinook/native_mutations/insert_artist.json similarity index 100% rename from fixtures/connector/chinook/native_procedures/insert_artist.json rename to fixtures/connector/chinook/native_mutations/insert_artist.json