diff --git a/Cargo.lock b/Cargo.lock index 6084fa17..3c9d0c58 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -632,7 +632,7 @@ dependencies = [ "regex", "serde", "serde_json", - "serde_with 3.4.0", + "serde_with 3.7.0", ] [[package]] @@ -929,7 +929,7 @@ dependencies = [ "serde", "serde-enum-str", "serde_json", - "serde_with 3.4.0", + "serde_with 3.7.0", ] [[package]] @@ -1191,6 +1191,12 @@ dependencies = [ "unicode-normalization", ] +[[package]] +name = "indent" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d9f1a0777d972970f204fdf8ef319f1f4f8459131636d7e3c96c5d59570d0fa6" + [[package]] name = "indexmap" version = "1.9.3" @@ -1511,6 +1517,7 @@ dependencies = [ "futures", "futures-util", "http", + "indent", "indexmap 1.9.3", "itertools 0.10.5", "mockall", @@ -1522,6 +1529,7 @@ dependencies = [ "schemars", "serde", "serde_json", + "serde_with 3.7.0", "thiserror", "time", "tokio", @@ -1586,6 +1594,7 @@ dependencies = [ "dc-api-types", "enum-iterator", "indexmap 1.9.3", + "mongodb", "schemars", "serde", "serde_json", @@ -2679,9 +2688,9 @@ dependencies = [ [[package]] name = "serde_with" -version = "3.4.0" +version = "3.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "64cd236ccc1b7a29e7e2739f27c0b2dd199804abc4290e32f59f3b68d6405c23" +checksum = "ee80b0e361bbf88fd2f6e242ccd19cfda072cb0faa6ae694ecee08199938569a" dependencies = [ "base64 0.21.5", "chrono", @@ -2689,8 +2698,9 @@ dependencies = [ "indexmap 1.9.3", "indexmap 2.2.5", "serde", + "serde_derive", "serde_json", - "serde_with_macros 3.4.0", + "serde_with_macros 3.7.0", "time", ] @@ -2720,9 +2730,9 @@ dependencies = [ [[package]] name = "serde_with_macros" -version = "3.4.0" +version = "3.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "93634eb5f75a2323b16de4748022ac4297f9e76b6dced2be287a099f41b5e788" +checksum = "6561dc161a9224638a31d876ccdfefbc1df91d3f3a8342eddb35f055d48c7655" dependencies = [ "darling 0.20.3", "proc-macro2", diff --git a/crates/configuration/src/native_queries.rs b/crates/configuration/src/native_queries.rs index d7946a3f..705a3c86 100644 --- a/crates/configuration/src/native_queries.rs +++ b/crates/configuration/src/native_queries.rs @@ -22,13 +22,50 @@ pub struct NativeQuery { /// `schema.json`. pub result_type: Type, - /// Arguments for per-query customization + /// Arguments to be supplied for each query invocation. These will be substituted into the + /// given `command`. + /// + /// Argument values are standard JSON mapped from GraphQL input types, not Extended JSON. + /// Values will be converted to BSON according to the types specified here. #[serde(default)] pub arguments: BTreeMap, - /// Command to run expressed as a BSON document + /// Command to run via MongoDB's `runCommand` API. For details on how to write commands see + /// https://www.mongodb.com/docs/manual/reference/method/db.runCommand/ + /// + /// The command is read as Extended JSON. It may be in canonical or relaxed format, or + /// a mixture of both. + /// See https://www.mongodb.com/docs/manual/reference/mongodb-extended-json/ + /// + /// Keys and values in the command may contain placeholders of the form `{{variableName}}` + /// which will be substituted when the native query is executed according to the given + /// arguments. + /// + /// Placeholders must be inside quotes so that the command can be stored in JSON format. If the + /// command includes a string whose only content is a placeholder, when the variable is + /// substituted the string will be replaced by the type of the variable. For example in this + /// command, + /// + /// ```json + /// json!({ + /// "insert": "posts", + /// "documents": "{{ documents }}" + /// }) + /// ``` + /// + /// If the type of the `documents` argument is an array then after variable substitution the + /// command will expand to: + /// + /// ```json + /// json!({ + /// "insert": "posts", + /// "documents": [/* array of documents */] + /// }) + /// ``` + /// #[schemars(with = "Object")] pub command: bson::Document, + // TODO: test extjson deserialization /// Determines which servers in a cluster to read from by specifying read preference, or /// a predicate to apply to candidate servers. diff --git a/crates/configuration/src/schema/database.rs b/crates/configuration/src/schema/database.rs index 53a99521..61a9d901 100644 --- a/crates/configuration/src/schema/database.rs +++ b/crates/configuration/src/schema/database.rs @@ -60,3 +60,15 @@ pub struct ObjectField { #[serde(default)] pub description: Option, } + +impl ObjectField { + pub fn new(name: impl ToString, r#type: Type) -> (String, Self) { + ( + name.to_string(), + ObjectField { + r#type, + description: Default::default(), + }, + ) + } +} diff --git a/crates/mongodb-agent-common/Cargo.toml b/crates/mongodb-agent-common/Cargo.toml index a5e42698..0fd37bcf 100644 --- a/crates/mongodb-agent-common/Cargo.toml +++ b/crates/mongodb-agent-common/Cargo.toml @@ -17,6 +17,7 @@ futures = "0.3.28" futures-util = "0.3.28" http = "^0.2" indexmap = { version = "1", features = ["serde"] } # must match the version that ndc-client uses +indent = "^0.1" itertools = "^0.10" mongodb = "2.8" mongodb-support = { path = "../mongodb-support" } @@ -25,6 +26,7 @@ regex = "1" schemars = { version = "^0.8.12", features = ["smol_str"] } serde = { version = "1.0", features = ["derive"] } serde_json = { version = "1.0", features = ["preserve_order"] } +serde_with = { version = "^3.7", features = ["base64", "hex"] } thiserror = "1" time = { version = "0.3.29", features = ["formatting", "parsing", "serde"] } tracing = "0.1" diff --git a/crates/mongodb-agent-common/src/interface_types/mongo_config.rs b/crates/mongodb-agent-common/src/interface_types/mongo_config.rs index b7323285..0801dd0c 100644 --- a/crates/mongodb-agent-common/src/interface_types/mongo_config.rs +++ b/crates/mongodb-agent-common/src/interface_types/mongo_config.rs @@ -1,6 +1,6 @@ use std::collections::BTreeMap; -use configuration::native_queries::NativeQuery; +use configuration::{native_queries::NativeQuery, schema::ObjectType}; use mongodb::Client; #[derive(Clone, Debug)] @@ -11,4 +11,5 @@ pub struct MongoConfig { pub database: String, pub native_queries: BTreeMap, + pub object_types: BTreeMap, } diff --git a/crates/mongodb-agent-common/src/lib.rs b/crates/mongodb-agent-common/src/lib.rs index ab1585eb..664c2795 100644 --- a/crates/mongodb-agent-common/src/lib.rs +++ b/crates/mongodb-agent-common/src/lib.rs @@ -5,6 +5,7 @@ pub mod health; pub mod interface_types; pub mod mongodb; pub mod mongodb_connection; +pub mod procedure; pub mod query; pub mod scalar_types_capabilities; pub mod schema; diff --git a/crates/mongodb-agent-common/src/procedure/error.rs b/crates/mongodb-agent-common/src/procedure/error.rs new file mode 100644 index 00000000..45a5ba56 --- /dev/null +++ b/crates/mongodb-agent-common/src/procedure/error.rs @@ -0,0 +1,22 @@ +use mongodb::bson::Bson; +use thiserror::Error; + +use crate::query::arguments::ArgumentError; + +#[derive(Debug, Error)] +pub enum ProcedureError { + #[error("error executing mongodb command: {0}")] + ExecutionError(#[from] mongodb::error::Error), + + #[error("a required argument was not provided, \"{0}\"")] + MissingArgument(String), + + #[error("found a non-string argument, {0}, in a string context - if you want to use a non-string argument it must be the only thing in the string with no white space around the curly braces")] + NonStringInStringContext(String), + + #[error("object keys must be strings, but got: \"{0}\"")] + NonStringKey(Bson), + + #[error("could not resolve arguments: {0}")] + UnresolvableArguments(#[from] ArgumentError), +} diff --git a/crates/mongodb-agent-common/src/procedure/interpolated_command.rs b/crates/mongodb-agent-common/src/procedure/interpolated_command.rs new file mode 100644 index 00000000..76ff4304 --- /dev/null +++ b/crates/mongodb-agent-common/src/procedure/interpolated_command.rs @@ -0,0 +1,289 @@ +use std::collections::BTreeMap; + +use itertools::Itertools as _; +use mongodb::bson::{self, Bson}; + +use super::ProcedureError; + +type Result = std::result::Result; + +/// Parse native query commands, and interpolate arguments. +pub fn interpolated_command( + command: &bson::Document, + arguments: &BTreeMap, +) -> Result { + let bson = interpolate_helper(&command.into(), arguments)?; + match bson { + Bson::Document(doc) => Ok(doc), + _ => unreachable!("interpolated_command is guaranteed to produce a document"), + } +} + +fn interpolate_helper(command_node: &Bson, arguments: &BTreeMap) -> Result { + let result = match command_node { + Bson::Array(values) => interpolate_array(values.to_vec(), arguments)?.into(), + Bson::Document(doc) => interpolate_document(doc.clone(), arguments)?.into(), + Bson::String(string) => interpolate_string(string, arguments)?, + // TODO: Support interpolation within other scalar types + value => value.clone(), + }; + Ok(result) +} + +fn interpolate_array(values: Vec, arguments: &BTreeMap) -> Result> { + values + .iter() + .map(|value| interpolate_helper(value, arguments)) + .try_collect() +} + +fn interpolate_document( + document: bson::Document, + arguments: &BTreeMap, +) -> Result { + document + .into_iter() + .map(|(key, value)| { + let interpolated_value = interpolate_helper(&value, arguments)?; + let interpolated_key = interpolate_string(&key, arguments)?; + match interpolated_key { + Bson::String(string_key) => Ok((string_key, interpolated_value)), + _ => Err(ProcedureError::NonStringKey(interpolated_key)), + } + }) + .try_collect() +} + +/// Substitute placeholders within a string in the input template. This may produce an output that +/// is not a string if the entire content of the string is a placeholder. For example, +/// +/// ```json +/// { "key": "{{recordId}}" } +/// ``` +/// +/// might expand to, +/// +/// ```json +/// { "key": 42 } +/// ``` +/// +/// if the type of the variable `recordId` is `int`. +fn interpolate_string(string: &str, arguments: &BTreeMap) -> Result { + let parts = parse_native_query(string); + if parts.len() == 1 { + let mut parts = parts; + match parts.remove(0) { + NativeQueryPart::Text(string) => Ok(Bson::String(string)), + NativeQueryPart::Parameter(param) => resolve_argument(¶m, arguments), + } + } else { + let interpolated_parts: Vec = parts + .into_iter() + .map(|part| match part { + NativeQueryPart::Text(string) => Ok(string), + NativeQueryPart::Parameter(param) => { + let argument_value = resolve_argument(¶m, arguments)?; + match argument_value { + Bson::String(string) => Ok(string), + _ => Err(ProcedureError::NonStringInStringContext(param)), + } + } + }) + .try_collect()?; + Ok(Bson::String(interpolated_parts.join(""))) + } +} + +fn resolve_argument(argument_name: &str, arguments: &BTreeMap) -> Result { + let argument = arguments + .get(argument_name) + .ok_or_else(|| ProcedureError::MissingArgument(argument_name.to_owned()))?; + Ok(argument.clone()) +} + +/// A part of a Native Query text, either raw text or a parameter. +#[derive(Debug, Clone, PartialEq, Eq)] +enum NativeQueryPart { + /// A raw text part + Text(String), + /// A parameter + Parameter(String), +} + +/// Parse a string or key in a native query into parts where variables have the syntax +/// `{{}}`. +fn parse_native_query(string: &str) -> Vec { + let vec: Vec> = string + .split("{{") + .filter(|part| !part.is_empty()) + .map(|part| match part.split_once("}}") { + None => vec![NativeQueryPart::Text(part.to_string())], + Some((var, text)) => { + if text.is_empty() { + vec![NativeQueryPart::Parameter(var.trim().to_owned())] + } else { + vec![ + NativeQueryPart::Parameter(var.trim().to_owned()), + NativeQueryPart::Text(text.to_string()), + ] + } + } + }) + .collect(); + vec.concat() +} + +#[cfg(test)] +mod tests { + use configuration::native_queries::NativeQuery; + use pretty_assertions::assert_eq; + use serde_json::json; + + use crate::query::arguments::resolve_arguments; + + use super::*; + + // TODO: key + // TODO: key with multiple placeholders + + #[test] + fn interpolates_non_string_type() -> anyhow::Result<()> { + let native_query_input = json!({ + "resultType": { "object": "InsertArtist" }, + "arguments": { + "id": { "type": { "scalar": "int" } }, + "name": { "type": { "scalar": "string" } }, + }, + "command": { + "insert": "Artist", + "documents": [{ + "ArtistId": "{{ id }}", + "Name": "{{name }}", + }], + }, + }); + let input_arguments = [ + ("id".to_owned(), json!(1001)), + ("name".to_owned(), json!("Regina Spektor")), + ] + .into_iter() + .collect(); + + let native_query: NativeQuery = serde_json::from_value(native_query_input)?; + let arguments = resolve_arguments( + &native_query.object_types, + &native_query.arguments, + input_arguments, + )?; + let command = interpolated_command(&native_query.command, &arguments)?; + + assert_eq!( + command, + bson::doc! { + "insert": "Artist", + "documents": [{ + "ArtistId": 1001, + "Name": "Regina Spektor", + }], + } + ); + Ok(()) + } + + #[test] + fn interpolates_array_argument() -> anyhow::Result<()> { + let native_query_input = json!({ + "name": "insertArtist", + "resultType": { "object": "InsertArtist" }, + "objectTypes": { + "ArtistInput": { + "fields": { + "ArtistId": { "type": { "scalar": "int" } }, + "Name": { "type": { "scalar": "string" } }, + }, + } + }, + "arguments": { + "documents": { "type": { "arrayOf": { "object": "ArtistInput" } } }, + }, + "command": { + "insert": "Artist", + "documents": "{{ documents }}", + }, + }); + let input_arguments = [( + "documents".to_owned(), + json!([ + { "ArtistId": 1001, "Name": "Regina Spektor" } , + { "ArtistId": 1002, "Name": "Ok Go" } , + ]), + )] + .into_iter() + .collect(); + + let native_query: NativeQuery = serde_json::from_value(native_query_input)?; + let arguments = resolve_arguments( + &native_query.object_types, + &native_query.arguments, + input_arguments, + )?; + let command = interpolated_command(&native_query.command, &arguments)?; + + assert_eq!( + command, + bson::doc! { + "insert": "Artist", + "documents": [ + { + "ArtistId": 1001, + "Name": "Regina Spektor", + }, + { + "ArtistId": 1002, + "Name": "Ok Go", + } + ], + } + ); + Ok(()) + } + + #[test] + fn interpolates_arguments_within_string() -> anyhow::Result<()> { + let native_query_input = json!({ + "name": "insert", + "resultType": { "object": "Insert" }, + "arguments": { + "prefix": { "type": { "scalar": "string" } }, + "basename": { "type": { "scalar": "string" } }, + }, + "command": { + "insert": "{{prefix}}-{{basename}}", + "empty": "", + }, + }); + let input_arguments = [ + ("prefix".to_owned(), json!("current")), + ("basename".to_owned(), json!("some-coll")), + ] + .into_iter() + .collect(); + + let native_query: NativeQuery = serde_json::from_value(native_query_input)?; + let arguments = resolve_arguments( + &native_query.object_types, + &native_query.arguments, + input_arguments, + )?; + let command = interpolated_command(&native_query.command, &arguments)?; + + assert_eq!( + command, + bson::doc! { + "insert": "current-some-coll", + "empty": "", + } + ); + Ok(()) + } +} diff --git a/crates/mongodb-agent-common/src/procedure/mod.rs b/crates/mongodb-agent-common/src/procedure/mod.rs new file mode 100644 index 00000000..cf193236 --- /dev/null +++ b/crates/mongodb-agent-common/src/procedure/mod.rs @@ -0,0 +1,74 @@ +mod error; +mod interpolated_command; + +use std::borrow::Cow; +use std::collections::BTreeMap; + +use configuration::native_queries::NativeQuery; +use configuration::schema::{ObjectField, ObjectType}; +use mongodb::options::SelectionCriteria; +use mongodb::{bson, Database}; + +use crate::query::arguments::resolve_arguments; + +pub use self::error::ProcedureError; +pub use self::interpolated_command::interpolated_command; + +/// Encapsulates running arbitrary mongodb commands with interpolated arguments +#[derive(Clone, Debug)] +pub struct Procedure<'a> { + arguments: BTreeMap, + command: Cow<'a, bson::Document>, + object_types: Cow<'a, BTreeMap>, + parameters: Cow<'a, BTreeMap>, + selection_criteria: Option>, +} + +impl<'a> Procedure<'a> { + /// Note: the `object_types` argument here is not the object types from the native query - it + /// should be the set of *all* object types collected from schema and native query definitions. + pub fn from_native_query( + native_query: &'a NativeQuery, + object_types: &'a BTreeMap, + arguments: BTreeMap, + ) -> Self { + Procedure { + arguments, + command: Cow::Borrowed(&native_query.command), + object_types: Cow::Borrowed(object_types), + parameters: Cow::Borrowed(&native_query.arguments), + selection_criteria: native_query.selection_criteria.as_ref().map(Cow::Borrowed), + } + } + + pub async fn execute(self, database: Database) -> Result { + let selection_criteria = self.selection_criteria.map(Cow::into_owned); + let command = interpolate( + &self.object_types, + &self.parameters, + self.arguments, + &self.command, + )?; + let result = database.run_command(command, selection_criteria).await?; + Ok(result) + } + + pub fn interpolated_command(self) -> Result { + interpolate( + &self.object_types, + &self.parameters, + self.arguments, + &self.command, + ) + } +} + +fn interpolate( + object_types: &BTreeMap, + parameters: &BTreeMap, + arguments: BTreeMap, + command: &bson::Document, +) -> Result { + let bson_arguments = resolve_arguments(object_types, parameters, arguments)?; + interpolated_command(command, &bson_arguments) +} diff --git a/crates/mongodb-agent-common/src/query/arguments/json_to_bson.rs b/crates/mongodb-agent-common/src/query/arguments/json_to_bson.rs new file mode 100644 index 00000000..6ffa3bf8 --- /dev/null +++ b/crates/mongodb-agent-common/src/query/arguments/json_to_bson.rs @@ -0,0 +1,425 @@ +use std::{collections::BTreeMap, str::FromStr}; + +use configuration::schema::{ObjectType, Type}; +use itertools::Itertools as _; +use mongodb::bson::{self, Bson, Decimal128}; +use mongodb_support::BsonScalarType; +use serde::de::DeserializeOwned; +use serde_json::Value; +use thiserror::Error; +use time::{format_description::well_known::Iso8601, OffsetDateTime}; + +#[derive(Debug, Error)] +pub enum JsonToBsonError { + #[error("error converting \"{1}\" to type, \"{0:?}\"")] + ConversionError(Type, Value), + + #[error("error converting \"{1}\" to type, \"{0:?}\": {2}")] + ConversionErrorWithContext(Type, Value, #[source] anyhow::Error), + + #[error("cannot use value, \"{0:?}\", in position of type, \"{1:?}\"")] + IncompatibleType(Type, Value), + + #[error("input with BSON type {expected_type:?} should be encoded in GraphQL as {expected_backing_type}, but got: {value}")] + IncompatibleBackingType { + expected_type: Type, + expected_backing_type: &'static str, + value: Value, + }, + + #[error("input object of type \"{0:?}\" is missing a field, \"{1}\"")] + MissingObjectField(Type, String), + + #[error("inputs of type {0} are not implemented")] + NotImplemented(BsonScalarType), + + #[error("error deserializing input: {0}")] + SerdeError(#[from] serde_json::Error), + + #[error("unknown object type, \"{0}\"")] + UnknownObjectType(String), +} + +type Result = std::result::Result; + +/// Converts JSON input to BSON according to an expected BSON type. +/// +/// The BSON library already has a `Deserialize` impl that can convert from JSON. But that +/// implementation cannot take advantage of the type information that we have available. Instead it +/// uses Extended JSON which uses tags in JSON data to distinguish BSON types. +pub fn json_to_bson( + expected_type: &Type, + object_types: &BTreeMap, + value: Value, +) -> Result { + match expected_type { + Type::Scalar(t) => json_to_bson_scalar(*t, value), + Type::Object(object_type_name) => { + let object_type = object_types + .get(object_type_name) + .ok_or_else(|| JsonToBsonError::UnknownObjectType(object_type_name.to_owned()))?; + convert_object(object_type_name, object_type, object_types, value) + } + Type::ArrayOf(element_type) => convert_array(element_type, object_types, value), + Type::Nullable(t) => convert_nullable(t, object_types, value), + } +} + +/// Works like json_to_bson, but only converts BSON scalar types. +pub fn json_to_bson_scalar(expected_type: BsonScalarType, value: Value) -> Result { + let result = match expected_type { + BsonScalarType::Double => Bson::Double(deserialize(expected_type, value)?), + BsonScalarType::Int => Bson::Int32(deserialize(expected_type, value)?), + BsonScalarType::Long => Bson::Int64(deserialize(expected_type, value)?), + BsonScalarType::Decimal => Bson::Decimal128( + Decimal128::from_str(&from_string(expected_type, value.clone())?).map_err(|err| { + JsonToBsonError::ConversionErrorWithContext( + Type::Scalar(expected_type), + value, + err.into(), + ) + })?, + ), + BsonScalarType::String => Bson::String(deserialize(expected_type, value)?), + BsonScalarType::Date => convert_date(&from_string(expected_type, value)?)?, + BsonScalarType::Timestamp => deserialize::(expected_type, value)?.into(), + BsonScalarType::BinData => deserialize::(expected_type, value)?.into(), + BsonScalarType::ObjectId => Bson::ObjectId(deserialize(expected_type, value)?), + BsonScalarType::Bool => match value { + Value::Bool(b) => Bson::Boolean(b), + _ => incompatible_scalar_type(BsonScalarType::Bool, value)?, + }, + BsonScalarType::Null => match value { + Value::Null => Bson::Null, + _ => incompatible_scalar_type(BsonScalarType::Null, value)?, + }, + BsonScalarType::Undefined => match value { + Value::Null => Bson::Undefined, + _ => incompatible_scalar_type(BsonScalarType::Undefined, value)?, + }, + BsonScalarType::Regex => deserialize::(expected_type, value)?.into(), + BsonScalarType::Javascript => Bson::JavaScriptCode(deserialize(expected_type, value)?), + BsonScalarType::JavascriptWithScope => { + deserialize::(expected_type, value)?.into() + } + BsonScalarType::MinKey => Bson::MinKey, + BsonScalarType::MaxKey => Bson::MaxKey, + BsonScalarType::Symbol => Bson::Symbol(deserialize(expected_type, value)?), + // dbPointer is deprecated + BsonScalarType::DbPointer => Err(JsonToBsonError::NotImplemented(expected_type))?, + }; + Ok(result) +} + +/// Types defined just to get deserialization logic for BSON "scalar" types that are represented in +/// JSON as composite structures. The types here are designed to match the representations of BSON +/// types in extjson. +mod de { + use mongodb::bson::{self, Bson}; + use serde::Deserialize; + use serde_with::{base64::Base64, hex::Hex, serde_as}; + + #[serde_as] + #[derive(Deserialize)] + #[serde(rename_all = "camelCase")] + pub struct BinData { + #[serde_as(as = "Base64")] + base64: Vec, + #[serde_as(as = "Hex")] + sub_type: [u8; 1], + } + + impl From for Bson { + fn from(value: BinData) -> Self { + Bson::Binary(bson::Binary { + bytes: value.base64, + subtype: value.sub_type[0].into(), + }) + } + } + + #[derive(Deserialize)] + pub struct JavaScripCodetWithScope { + #[serde(rename = "$code")] + code: String, + #[serde(rename = "$scope")] + scope: bson::Document, + } + + impl From for Bson { + fn from(value: JavaScripCodetWithScope) -> Self { + Bson::JavaScriptCodeWithScope(bson::JavaScriptCodeWithScope { + code: value.code, + scope: value.scope, + }) + } + } + + #[derive(Deserialize)] + pub struct Regex { + pattern: String, + options: String, + } + + impl From for Bson { + fn from(value: Regex) -> Self { + Bson::RegularExpression(bson::Regex { + pattern: value.pattern, + options: value.options, + }) + } + } + + #[derive(Deserialize)] + pub struct Timestamp { + t: u32, + i: u32, + } + + impl From for Bson { + fn from(value: Timestamp) -> Self { + Bson::Timestamp(bson::Timestamp { + time: value.t, + increment: value.i, + }) + } + } +} + +fn convert_array( + element_type: &Type, + object_types: &BTreeMap, + value: Value, +) -> Result { + let input_elements: Vec = serde_json::from_value(value)?; + let bson_array = input_elements + .into_iter() + .map(|v| json_to_bson(element_type, object_types, v)) + .try_collect()?; + Ok(Bson::Array(bson_array)) +} + +fn convert_object( + object_type_name: &str, + object_type: &ObjectType, + object_types: &BTreeMap, + value: Value, +) -> Result { + let input_fields: BTreeMap = serde_json::from_value(value)?; + let bson_doc: bson::Document = object_type + .named_fields() + .map(|field| { + let input_field_value = input_fields.get(field.name).ok_or_else(|| { + JsonToBsonError::MissingObjectField( + Type::Object(object_type_name.to_owned()), + field.name.to_owned(), + ) + })?; + Ok(( + field.name.to_owned(), + json_to_bson(&field.value.r#type, object_types, input_field_value.clone())?, + )) + }) + .try_collect::<_, _, JsonToBsonError>()?; + Ok(bson_doc.into()) +} + +fn convert_nullable( + underlying_type: &Type, + object_types: &BTreeMap, + value: Value, +) -> Result { + match value { + Value::Null => Ok(Bson::Null), + non_null_value => json_to_bson(underlying_type, object_types, non_null_value), + } +} + +fn convert_date(value: &str) -> Result { + let date = OffsetDateTime::parse(value, &Iso8601::DEFAULT).map_err(|err| { + JsonToBsonError::ConversionErrorWithContext( + Type::Scalar(BsonScalarType::Date), + Value::String(value.to_owned()), + err.into(), + ) + })?; + Ok(Bson::DateTime(bson::DateTime::from_system_time( + date.into(), + ))) +} + +fn deserialize(expected_type: BsonScalarType, value: Value) -> Result +where + T: DeserializeOwned, +{ + serde_json::from_value::(value.clone()).map_err(|err| { + JsonToBsonError::ConversionErrorWithContext(Type::Scalar(expected_type), value, err.into()) + }) +} + +fn from_string(expected_type: BsonScalarType, value: Value) -> Result { + match value { + Value::String(s) => Ok(s), + _ => Err(JsonToBsonError::IncompatibleBackingType { + expected_type: Type::Scalar(expected_type), + expected_backing_type: "String", + value, + }), + } +} + +fn incompatible_scalar_type(expected_type: BsonScalarType, value: Value) -> Result { + Err(JsonToBsonError::IncompatibleType( + Type::Scalar(expected_type), + value, + )) +} + +#[cfg(test)] +mod tests { + use std::{collections::BTreeMap, str::FromStr}; + + use configuration::schema::{ObjectField, ObjectType, Type}; + use mongodb::bson::{self, datetime::DateTimeBuilder, Bson}; + use mongodb_support::BsonScalarType; + use pretty_assertions::assert_eq; + use serde_json::json; + + use super::json_to_bson; + + #[test] + #[allow(clippy::approx_constant)] + fn deserializes_specialized_scalar_types() -> anyhow::Result<()> { + let object_type_name = "scalar_test".to_owned(); + let object_type = ObjectType { + fields: BTreeMap::from([ + ObjectField::new("double", Type::Scalar(BsonScalarType::Double)), + ObjectField::new("int", Type::Scalar(BsonScalarType::Int)), + ObjectField::new("long", Type::Scalar(BsonScalarType::Long)), + ObjectField::new("decimal", Type::Scalar(BsonScalarType::Decimal)), + ObjectField::new("string", Type::Scalar(BsonScalarType::String)), + ObjectField::new("date", Type::Scalar(BsonScalarType::Date)), + ObjectField::new("timestamp", Type::Scalar(BsonScalarType::Timestamp)), + ObjectField::new("binData", Type::Scalar(BsonScalarType::BinData)), + ObjectField::new("objectId", Type::Scalar(BsonScalarType::ObjectId)), + ObjectField::new("bool", Type::Scalar(BsonScalarType::Bool)), + ObjectField::new("null", Type::Scalar(BsonScalarType::Null)), + ObjectField::new("undefined", Type::Scalar(BsonScalarType::Undefined)), + ObjectField::new("regex", Type::Scalar(BsonScalarType::Regex)), + ObjectField::new("javascript", Type::Scalar(BsonScalarType::Javascript)), + ObjectField::new( + "javascriptWithScope", + Type::Scalar(BsonScalarType::JavascriptWithScope), + ), + ObjectField::new("minKey", Type::Scalar(BsonScalarType::MinKey)), + ObjectField::new("maxKey", Type::Scalar(BsonScalarType::MaxKey)), + ObjectField::new("symbol", Type::Scalar(BsonScalarType::Symbol)), + ]), + description: Default::default(), + }; + + let input = json!({ + "double": 3.14159, + "int": 3, + "long": 3, + "decimal": "3.14159", + "string": "hello", + "date": "2024-03-22T00:59:01Z", + "timestamp": { "t": 1565545664, "i": 1 }, + "binData": { + "base64": "EEEBEIEIERA=", + "subType": "00" + }, + "objectId": "e7c8f79873814cbae1f8d84c", + "bool": true, + "null": null, + "undefined": null, + "regex": { "pattern": "^fo+$", "options": "i" }, + "javascript": "console.log('hello, world!')", + "javascriptWithScope": { + "$code": "console.log('hello, ', name)", + "$scope": { "name": "you!" }, + }, + "minKey": {}, + "maxKey": {}, + "symbol": "a_symbol", + }); + + let expected = bson::doc! { + "double": Bson::Double(3.14159), + "int": Bson::Int32(3), + "long": Bson::Int64(3), + "decimal": Bson::Decimal128(bson::Decimal128::from_str("3.14159")?), + "string": Bson::String("hello".to_owned()), + "date": Bson::DateTime(DateTimeBuilder::default().year(2024).month(3).day(22).hour(0).minute(59).second(1).build()?), + "timestamp": Bson::Timestamp(bson::Timestamp { time: 1565545664, increment: 1 }), + "binData": Bson::Binary(bson::Binary { + bytes: vec![0x10, 0x41, 0x01, 0x10, 0x81, 0x08, 0x11, 0x10], + subtype: bson::spec::BinarySubtype::Generic, + }), + "objectId": Bson::ObjectId(FromStr::from_str("e7c8f79873814cbae1f8d84c")?), + "bool": Bson::Boolean(true), + "null": Bson::Null, + "undefined": Bson::Undefined, + "regex": Bson::RegularExpression(bson::Regex { pattern: "^fo+$".to_owned(), options: "i".to_owned() }), + "javascript": Bson::JavaScriptCode("console.log('hello, world!')".to_owned()), + "javascriptWithScope": Bson::JavaScriptCodeWithScope(bson::JavaScriptCodeWithScope { + code: "console.log('hello, ', name)".to_owned(), + scope: bson::doc! { "name": "you!" }, + }), + "minKey": Bson::MinKey, + "maxKey": Bson::MaxKey, + "symbol": Bson::Symbol("a_symbol".to_owned()), + }; + + let actual = json_to_bson( + &Type::Object(object_type_name.clone()), + &[(object_type_name.clone(), object_type)] + .into_iter() + .collect(), + input, + )?; + assert_eq!(actual, expected.into()); + Ok(()) + } + + #[test] + fn deserializes_arrays() -> anyhow::Result<()> { + let input = json!([ + "e7c8f79873814cbae1f8d84c", + "76a3317b46f1eea7fae4f643", + "fae1840a2b85872385c67de5", + ]); + let expected = Bson::Array(vec![ + Bson::ObjectId(FromStr::from_str("e7c8f79873814cbae1f8d84c")?), + Bson::ObjectId(FromStr::from_str("76a3317b46f1eea7fae4f643")?), + Bson::ObjectId(FromStr::from_str("fae1840a2b85872385c67de5")?), + ]); + let actual = json_to_bson( + &Type::ArrayOf(Box::new(Type::Scalar(BsonScalarType::ObjectId))), + &Default::default(), + input, + )?; + assert_eq!(actual, expected); + Ok(()) + } + + #[test] + fn deserializes_nullable_values() -> anyhow::Result<()> { + let input = json!(["e7c8f79873814cbae1f8d84c", null, "fae1840a2b85872385c67de5",]); + let expected = Bson::Array(vec![ + Bson::ObjectId(FromStr::from_str("e7c8f79873814cbae1f8d84c")?), + Bson::Null, + Bson::ObjectId(FromStr::from_str("fae1840a2b85872385c67de5")?), + ]); + let actual = json_to_bson( + &Type::ArrayOf(Box::new(Type::Nullable(Box::new(Type::Scalar( + BsonScalarType::ObjectId, + ))))), + &Default::default(), + input, + )?; + assert_eq!(actual, expected); + Ok(()) + } +} diff --git a/crates/mongodb-agent-common/src/query/arguments/mod.rs b/crates/mongodb-agent-common/src/query/arguments/mod.rs new file mode 100644 index 00000000..ab84a740 --- /dev/null +++ b/crates/mongodb-agent-common/src/query/arguments/mod.rs @@ -0,0 +1,96 @@ +mod json_to_bson; + +use std::collections::BTreeMap; + +use configuration::schema::{ObjectField, ObjectType, Type}; +use indent::indent_all_by; +use itertools::Itertools as _; +use mongodb::bson::Bson; +use serde_json::Value; +use thiserror::Error; + +use self::json_to_bson::json_to_bson; + +pub use self::json_to_bson::{json_to_bson_scalar, JsonToBsonError}; + +#[derive(Debug, Error)] +pub enum ArgumentError { + #[error("unknown variables or arguments: {}", .0.join(", "))] + Excess(Vec), + + #[error("some variables or arguments are invalid:\n{}", format_errors(.0))] + Invalid(BTreeMap), + + #[error("missing variables or arguments: {}", .0.join(", "))] + Missing(Vec), +} + +/// Translate arguments to queries or native queries to BSON according to declared parameter types. +/// +/// Checks that all arguments have been provided, and that no arguments have been given that do not +/// map to declared paremeters (no excess arguments). +pub fn resolve_arguments( + object_types: &BTreeMap, + parameters: &BTreeMap, + mut arguments: BTreeMap, +) -> Result, ArgumentError> { + validate_no_excess_arguments(parameters, &arguments)?; + + let (arguments, missing): (Vec<(String, Value, &Type)>, Vec) = parameters + .iter() + .map(|(name, parameter)| { + if let Some((name, argument)) = arguments.remove_entry(name) { + Ok((name, argument, ¶meter.r#type)) + } else { + Err(name.clone()) + } + }) + .partition_result(); + if !missing.is_empty() { + return Err(ArgumentError::Missing(missing)); + } + + let (resolved, errors): (BTreeMap, BTreeMap) = arguments + .into_iter() + .map(|(name, argument, parameter_type)| { + match json_to_bson(parameter_type, object_types, argument) { + Ok(bson) => Ok((name, bson)), + Err(err) => Err((name, err)), + } + }) + .partition_result(); + if !errors.is_empty() { + return Err(ArgumentError::Invalid(errors)); + } + + Ok(resolved) +} + +pub fn validate_no_excess_arguments( + parameters: &BTreeMap, + arguments: &BTreeMap, +) -> Result<(), ArgumentError> { + let excess: Vec = arguments + .iter() + .filter_map(|(name, _)| { + let parameter = parameters.get(name); + match parameter { + Some(_) => None, + None => Some(name.clone()), + } + }) + .collect(); + if !excess.is_empty() { + Err(ArgumentError::Excess(excess)) + } else { + Ok(()) + } +} + +fn format_errors(errors: &BTreeMap) -> String { + errors + .iter() + .map(|(name, error)| format!(" {name}:\n{}", indent_all_by(4, error.to_string()))) + .collect::>() + .join("\n") +} diff --git a/crates/mongodb-agent-common/src/query/foreach.rs b/crates/mongodb-agent-common/src/query/foreach.rs index a70e0ffc..7febe1c0 100644 --- a/crates/mongodb-agent-common/src/query/foreach.rs +++ b/crates/mongodb-agent-common/src/query/foreach.rs @@ -166,8 +166,8 @@ mod tests { "target": {"name": ["tracks"], "type": "table"}, "relationships": [], "foreach": [ - { "artistId": {"value": 1, "value_type": "number"} }, - { "artistId": {"value": 2, "value_type": "number"} } + { "artistId": {"value": 1, "value_type": "int"} }, + { "artistId": {"value": 2, "value_type": "int"} } ] }))?; @@ -279,8 +279,8 @@ mod tests { "target": {"name": ["tracks"], "type": "table"}, "relationships": [], "foreach": [ - { "artistId": {"value": 1, "value_type": "number"} }, - { "artistId": {"value": 2, "value_type": "number"} } + { "artistId": {"value": 1, "value_type": "int"} }, + { "artistId": {"value": 2, "value_type": "int"} } ] }))?; diff --git a/crates/mongodb-agent-common/src/query/make_selector.rs b/crates/mongodb-agent-common/src/query/make_selector.rs index 8c60abb8..170eba54 100644 --- a/crates/mongodb-agent-common/src/query/make_selector.rs +++ b/crates/mongodb-agent-common/src/query/make_selector.rs @@ -5,12 +5,12 @@ use dc_api_types::{ ArrayComparisonValue, BinaryArrayComparisonOperator, ComparisonValue, ExistsInTable, Expression, UnaryComparisonOperator, }; -use mongodb::bson::{self, doc, Bson, Document}; -use time::{format_description::well_known::Iso8601, OffsetDateTime}; +use mongodb::bson::{self, doc, Document}; +use mongodb_support::BsonScalarType; use crate::{ comparison_function::ComparisonFunction, interface_types::MongoAgentError, - query::column_ref::column_ref, + query::arguments::json_to_bson_scalar, query::column_ref::column_ref, }; use BinaryArrayComparisonOperator as ArrOp; @@ -21,21 +21,13 @@ fn bson_from_scalar_value( value: &serde_json::Value, value_type: &str, ) -> Result { - match value_type { - "date" | "Date" => { - let parsed_date = value - .as_str() - .and_then(|s| OffsetDateTime::parse(s, &Iso8601::DEFAULT).ok()) - .ok_or_else(|| { - MongoAgentError::BadQuery(anyhow!( - "unrecognized date value: {value} - date values should be strings in ISO 8601 format including a time and a time zone specifier" - )) - })?; - Ok(Bson::DateTime(bson::DateTime::from_system_time( - parsed_date.into(), - ))) + // TODO: fail on unrecognized types + let bson_type = BsonScalarType::from_bson_name(value_type).ok(); + match bson_type { + Some(t) => { + json_to_bson_scalar(t, value.clone()).map_err(|e| MongoAgentError::BadQuery(anyhow!(e))) } - _ => bson::to_bson(value).map_err(|e| MongoAgentError::BadQuery(anyhow!(e))), + None => bson::to_bson(value).map_err(|e| MongoAgentError::BadQuery(anyhow!(e))), } } diff --git a/crates/mongodb-agent-common/src/query/mod.rs b/crates/mongodb-agent-common/src/query/mod.rs index 53a4bc92..b079d9d8 100644 --- a/crates/mongodb-agent-common/src/query/mod.rs +++ b/crates/mongodb-agent-common/src/query/mod.rs @@ -1,3 +1,4 @@ +pub mod arguments; mod column_ref; mod constants; mod execute_native_query_request; diff --git a/crates/mongodb-agent-common/src/state.rs b/crates/mongodb-agent-common/src/state.rs index d51ec3c2..7bc2df3a 100644 --- a/crates/mongodb-agent-common/src/state.rs +++ b/crates/mongodb-agent-common/src/state.rs @@ -31,5 +31,9 @@ pub async fn try_init_state_from_uri( client, database: database_name, native_queries: configuration.native_queries.clone(), + object_types: configuration + .object_types() + .map(|(name, object_type)| (name.clone(), object_type.clone())) + .collect(), }) } diff --git a/crates/mongodb-connector/src/mutation.rs b/crates/mongodb-connector/src/mutation.rs index dde8a43a..c751073c 100644 --- a/crates/mongodb-connector/src/mutation.rs +++ b/crates/mongodb-connector/src/mutation.rs @@ -1,34 +1,12 @@ -use std::collections::BTreeMap; - -use configuration::native_queries::NativeQuery; use futures::future::try_join_all; use itertools::Itertools; use mongodb::Database; -use mongodb_agent_common::interface_types::MongoConfig; +use mongodb_agent_common::{interface_types::MongoConfig, procedure::Procedure}; use ndc_sdk::{ connector::MutationError, json_response::JsonResponse, models::{MutationOperation, MutationOperationResults, MutationRequest, MutationResponse}, }; -use serde_json::Value; - -/// A procedure combined with inputs -#[derive(Clone, Debug)] -#[allow(dead_code)] -struct Job<'a> { - // For the time being all procedures are native queries. - native_query: &'a NativeQuery, - arguments: BTreeMap, -} - -impl<'a> Job<'a> { - pub fn new(native_query: &'a NativeQuery, arguments: BTreeMap) -> Self { - Job { - native_query, - arguments, - } - } -} pub async fn handle_mutation_request( config: &MongoConfig, @@ -39,7 +17,7 @@ pub async fn handle_mutation_request( let jobs = look_up_procedures(config, mutation_request)?; let operation_results = try_join_all( jobs.into_iter() - .map(|job| execute_job(database.clone(), job)), + .map(|procedure| execute_procedure(database.clone(), procedure)), ) .await?; Ok(JsonResponse::Value(MutationResponse { operation_results })) @@ -50,23 +28,18 @@ pub async fn handle_mutation_request( fn look_up_procedures( config: &MongoConfig, mutation_request: MutationRequest, -) -> Result>, MutationError> { - let (jobs, not_found): (Vec, Vec) = mutation_request +) -> Result>, MutationError> { + let (procedures, not_found): (Vec, Vec) = mutation_request .operations .into_iter() .map(|operation| match operation { MutationOperation::Procedure { - name: procedure_name, - arguments, - .. + name, arguments, .. } => { - let native_query = config - .native_queries - .iter() - .find(|(native_query_name, _)| *native_query_name == &procedure_name); - native_query - .ok_or(procedure_name) - .map(|(_, nq)| Job::new(nq, arguments)) + let native_query = config.native_queries.get(&name); + native_query.ok_or(name).map(|native_query| { + Procedure::from_native_query(native_query, &config.object_types, arguments) + }) } }) .partition_result(); @@ -78,22 +51,20 @@ fn look_up_procedures( ))); } - Ok(jobs) + Ok(procedures) } -async fn execute_job( +async fn execute_procedure( database: Database, - job: Job<'_>, + procedure: Procedure<'_>, ) -> Result { - let result = database - .run_command(job.native_query.command.clone(), None) + let result = procedure + .execute(database.clone()) .await - .map_err(|err| match *err.kind { - mongodb::error::ErrorKind::InvalidArgument { message, .. } => { - MutationError::UnprocessableContent(message) - } - err => MutationError::Other(Box::new(err)), - })?; + .map_err(|err| MutationError::InvalidRequest(err.to_string()))?; + + // TODO: instead of outputting extended JSON, map to JSON using a reverse of `json_to_bson` + // according to the native query result type let json_result = serde_json::to_value(result).map_err(|err| MutationError::Other(Box::new(err)))?; Ok(MutationOperationResults::Procedure { diff --git a/crates/mongodb-support/Cargo.toml b/crates/mongodb-support/Cargo.toml index dbd6cb2e..1a1d43a6 100644 --- a/crates/mongodb-support/Cargo.toml +++ b/crates/mongodb-support/Cargo.toml @@ -7,6 +7,7 @@ edition = "2021" dc-api-types = { path = "../dc-api-types" } enum-iterator = "1.4.1" indexmap = { version = "1", features = ["serde"] } # must match the version that ndc-client uses +mongodb = "2.8" schemars = "^0.8.12" serde = { version = "1", features = ["derive"] } serde_json = "1" diff --git a/crates/mongodb-support/src/bson_type.rs b/crates/mongodb-support/src/bson_type.rs index 5f948553..b7fb52ac 100644 --- a/crates/mongodb-support/src/bson_type.rs +++ b/crates/mongodb-support/src/bson_type.rs @@ -1,5 +1,6 @@ use dc_api_types::GraphQlType; use enum_iterator::{all, Sequence}; +use mongodb::bson::Bson; use schemars::JsonSchema; use serde::{Deserialize, Serialize}; @@ -163,11 +164,50 @@ impl BsonScalarType { if name == "number" { return Ok(S::Double); } - let scalar_type = all::().find(|s| s.bson_name() == name); + // case-insensitive comparison because we are inconsistent about initial-letter + // capitalization between v2 and v3 + let scalar_type = + all::().find(|s| s.bson_name().eq_ignore_ascii_case(name)); scalar_type.ok_or_else(|| Error::UnknownScalarType(name.to_owned())) } } +impl std::fmt::Display for BsonScalarType { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + f.write_str(self.bson_name()) + } +} + +impl TryFrom<&Bson> for BsonScalarType { + type Error = Error; + + fn try_from(value: &Bson) -> Result { + match value { + Bson::Double(_) => Ok(S::Double), + Bson::String(_) => Ok(S::String), + Bson::Array(_) => Err(Error::ExpectedScalarType(BsonType::Array)), + Bson::Document(_) => Err(Error::ExpectedScalarType(BsonType::Object)), + Bson::Boolean(_) => Ok(S::Bool), + Bson::Null => Ok(S::Null), + Bson::RegularExpression(_) => Ok(S::Regex), + Bson::JavaScriptCode(_) => Ok(S::Javascript), + Bson::JavaScriptCodeWithScope(_) => Ok(S::JavascriptWithScope), + Bson::Int32(_) => Ok(S::Int), + Bson::Int64(_) => Ok(S::Long), + Bson::Timestamp(_) => Ok(S::Timestamp), + Bson::Binary(_) => Ok(S::BinData), + Bson::ObjectId(_) => Ok(S::ObjectId), + Bson::DateTime(_) => Ok(S::Date), + Bson::Symbol(_) => Ok(S::Symbol), + Bson::Decimal128(_) => Ok(S::Decimal), + Bson::Undefined => Ok(S::Undefined), + Bson::MaxKey => Ok(S::MaxKey), + Bson::MinKey => Ok(S::MinKey), + Bson::DbPointer(_) => Ok(S::DbPointer), + } + } +} + impl TryFrom for BsonScalarType { type Error = Error; diff --git a/fixtures/connector/chinook/native_queries/hello.json b/fixtures/connector/chinook/native_queries/hello.json new file mode 100644 index 00000000..c88d253f --- /dev/null +++ b/fixtures/connector/chinook/native_queries/hello.json @@ -0,0 +1,26 @@ +{ + "name": "hello", + "description": "Example of a read-only native query", + "objectTypes": { + "HelloResult": { + "fields": { + "ok": { + "type": { + "scalar": "int" + } + }, + "readOnly": { + "type": { + "scalar": "bool" + } + } + } + } + }, + "resultType": { + "object": "HelloResult" + }, + "command": { + "hello": 1 + } +} diff --git a/fixtures/connector/chinook/native_queries/hello.yaml b/fixtures/connector/chinook/native_queries/hello.yaml deleted file mode 100644 index 4a027c8f..00000000 --- a/fixtures/connector/chinook/native_queries/hello.yaml +++ /dev/null @@ -1,13 +0,0 @@ -name: hello -description: Example of a read-only native query -objectTypes: - HelloResult: - fields: - ok: - type: !scalar int - readOnly: - type: !scalar bool - # There are more fields but you get the idea -resultType: !object HelloResult -command: - hello: 1 diff --git a/fixtures/connector/chinook/native_queries/insert_artist.json b/fixtures/connector/chinook/native_queries/insert_artist.json new file mode 100644 index 00000000..7ff29310 --- /dev/null +++ b/fixtures/connector/chinook/native_queries/insert_artist.json @@ -0,0 +1,45 @@ +{ + "name": "insertArtist", + "description": "Example of a database update using a native query", + "mode": "readWrite", + "resultType": { + "object": "InsertArtist" + }, + "arguments": { + "id": { + "type": { + "scalar": "int" + } + }, + "name": { + "type": { + "scalar": "string" + } + } + }, + "objectTypes": { + "InsertArtist": { + "fields": { + "ok": { + "type": { + "scalar": "int" + } + }, + "n": { + "type": { + "scalar": "int" + } + } + } + } + }, + "command": { + "insert": "Artist", + "documents": [ + { + "ArtistId": "{{ id }}", + "Name": "{{ name }}" + } + ] + } +} diff --git a/fixtures/connector/chinook/native_queries/insert_artist.yaml b/fixtures/connector/chinook/native_queries/insert_artist.yaml deleted file mode 100644 index cb04258b..00000000 --- a/fixtures/connector/chinook/native_queries/insert_artist.yaml +++ /dev/null @@ -1,16 +0,0 @@ -name: insertArtist -description: Example of a database update using a native query -objectTypes: - InsertArtist: - fields: - ok: - type: !scalar int - n: - type: !scalar int -resultType: !object InsertArtist -# TODO: implement arguments instead of hard-coding inputs -command: - insert: "Artist" - documents: - - ArtistId: 1001 - Name: Regina Spektor diff --git a/fixtures/ddn/subgraphs/chinook/commands/InsertArtist.hml b/fixtures/ddn/subgraphs/chinook/commands/InsertArtist.hml index 7b1d3fff..663e9199 100644 --- a/fixtures/ddn/subgraphs/chinook/commands/InsertArtist.hml +++ b/fixtures/ddn/subgraphs/chinook/commands/InsertArtist.hml @@ -4,7 +4,11 @@ definition: name: insertArtist description: Example of a database update using a native query outputType: InsertArtist - arguments: [] + arguments: + - name: id + type: Int! + - name: name + type: String! source: dataConnectorName: mongodb dataConnectorCommand: diff --git a/fixtures/ddn/subgraphs/chinook/dataconnectors/mongodb.hml b/fixtures/ddn/subgraphs/chinook/dataconnectors/mongodb.hml index d94ec308..37db817e 100644 --- a/fixtures/ddn/subgraphs/chinook/dataconnectors/mongodb.hml +++ b/fixtures/ddn/subgraphs/chinook/dataconnectors/mongodb.hml @@ -1016,8 +1016,10 @@ definition: - name: insertArtist description: Example of a database update using a native query result_type: { type: named, name: InsertArtist } - arguments: {} - command: { insert: Artist, documents: [{ ArtistId: 1001, Name: Regina Spektor }] } + arguments: + id: { type: { type: named, name: Int } } + name: { type: { type: named, name: String } } + command: { insert: Artist, documents: [{ ArtistId: "{{ id }}", Name: "{{ name }}" }] } capabilities: version: ^0.1.0 capabilities: