diff --git a/CHANGELOG.md b/CHANGELOG.md index fdf66752..27a2ae7b 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,10 +4,46 @@ This changelog documents the changes between release versions. ## [Unreleased] +### Added + +- Add uuid scalar type ([#148](https://github.com/hasura/ndc-mongodb/pull/148)) + ### Fixed - Update dependencies to get fixes for reported security vulnerabilities ([#149](https://github.com/hasura/ndc-mongodb/pull/149)) +#### UUID scalar type + +Previously UUID values would show up in GraphQL as `BinData`. BinData is a generalized BSON type for binary data. It +doesn't provide a great interface for working with UUIDs because binary data must be given as a JSON object with binary +data in base64-encoding (while UUIDs are usually given in a specific hex-encoded string format), and there is also +a mandatory "subtype" field. For example a BinData value representing a UUID fetched via GraphQL looks like this: + +```json +{ "base64": "QKaT0MAKQl2vXFNeN/3+nA==", "subType":"04" } +``` + +With this change UUID fields can use the new `uuid` type instead of `binData`. Values of type `uuid` are represented in +JSON as strings. The same value in a field with type `uuid` looks like this: + +```json +"40a693d0-c00a-425d-af5c-535e37fdfe9c" +``` + +This means that you can now, for example, filter using string representations for UUIDs: + +```gql +query { + posts(where: {id: {_eq: "40a693d0-c00a-425d-af5c-535e37fdfe9c"}}) { + title + } +} +``` + +Introspection has been updated so that database fields containing UUIDs will use the `uuid` type when setting up new +collections, or when re-introspecting after deleting the existing schema configuration. For migrating you may delete and +re-introspect, or edit schema files to change occurrences of `binData` to `uuid`. + #### Security Fixes Rust dependencies have been updated to get fixes for these advisories: diff --git a/crates/cli/src/introspection/sampling.rs b/crates/cli/src/introspection/sampling.rs index fcfc5e9d..c0809fe9 100644 --- a/crates/cli/src/introspection/sampling.rs +++ b/crates/cli/src/introspection/sampling.rs @@ -8,7 +8,7 @@ use configuration::{ Schema, WithName, }; use futures_util::TryStreamExt; -use mongodb::bson::{doc, Bson, Document}; +use mongodb::bson::{doc, spec::BinarySubtype, Binary, Bson, Document}; use mongodb_agent_common::mongodb::{CollectionTrait as _, DatabaseTrait}; use mongodb_support::{ aggregate::{Pipeline, Stage}, @@ -220,7 +220,13 @@ fn make_field_type( Bson::Int32(_) => scalar(Int), Bson::Int64(_) => scalar(Long), Bson::Timestamp(_) => scalar(Timestamp), - Bson::Binary(_) => scalar(BinData), + Bson::Binary(Binary { subtype, .. }) => { + if *subtype == BinarySubtype::Uuid { + scalar(UUID) + } else { + scalar(BinData) + } + } Bson::ObjectId(_) => scalar(ObjectId), Bson::DateTime(_) => scalar(Date), Bson::Symbol(_) => scalar(Symbol), diff --git a/crates/cli/src/introspection/type_unification.rs b/crates/cli/src/introspection/type_unification.rs index 1203593f..fc4216be 100644 --- a/crates/cli/src/introspection/type_unification.rs +++ b/crates/cli/src/introspection/type_unification.rs @@ -48,13 +48,9 @@ pub fn unify_type(type_a: Type, type_b: Type) -> Type { // Scalar types unify if they are the same type, or if one is a superset of the other. // If they are diffferent then the union is ExtendedJSON. (Type::Scalar(scalar_a), Type::Scalar(scalar_b)) => { - if scalar_a == scalar_b || is_supertype(&scalar_a, &scalar_b) { - Type::Scalar(scalar_a) - } else if is_supertype(&scalar_b, &scalar_a) { - Type::Scalar(scalar_b) - } else { - Type::ExtendedJSON - } + BsonScalarType::common_supertype(scalar_a, scalar_b) + .map(Type::Scalar) + .unwrap_or(Type::ExtendedJSON) } // Object types unify if they have the same name. @@ -192,20 +188,6 @@ pub fn unify_object_types( merged_type_map.into_values().collect() } -/// True iff we consider a to be a supertype of b. -/// -/// Note that if you add more supertypes here then it is important to also update the custom -/// equality check in our tests in mongodb_agent_common::query::serialization::tests. Equality -/// needs to be transitive over supertypes, so for example if we have, -/// -/// (Double, Int), (Decimal, Double) -/// -/// then in addition to comparing ints to doubles, and doubles to decimals, we also need to compare -/// decimals to ints. -pub fn is_supertype(a: &BsonScalarType, b: &BsonScalarType) -> bool { - matches!((a, b), (Double, Int)) -} - #[cfg(test)] mod tests { use std::collections::{HashMap, HashSet}; diff --git a/crates/cli/src/native_query/type_solver/simplify.rs b/crates/cli/src/native_query/type_solver/simplify.rs index be8cc41d..f007c554 100644 --- a/crates/cli/src/native_query/type_solver/simplify.rs +++ b/crates/cli/src/native_query/type_solver/simplify.rs @@ -7,8 +7,6 @@ use mongodb_support::BsonScalarType; use ndc_models::{FieldName, ObjectTypeName}; use nonempty::NonEmpty; -use crate::introspection::type_unification::is_supertype; - use crate::native_query::helpers::get_object_field_type; use crate::native_query::type_constraint::Variance; use crate::native_query::{ @@ -290,19 +288,13 @@ fn solve_scalar( b: BsonScalarType, ) -> Result { let solution = match variance { - Variance::Covariant => { - if a == b || is_supertype(&a, &b) { - Some(C::Scalar(a)) - } else if is_supertype(&b, &a) { - Some(C::Scalar(b)) - } else { - Some(C::Union([C::Scalar(a), C::Scalar(b)].into())) - } - } + Variance::Covariant => BsonScalarType::common_supertype(a, b) + .map(C::Scalar) + .or_else(|| Some(C::Union([C::Scalar(a), C::Scalar(b)].into()))), Variance::Contravariant => { - if a == b || is_supertype(&a, &b) { + if a == b || BsonScalarType::is_supertype(a, b) { Some(C::Scalar(b)) - } else if is_supertype(&b, &a) { + } else if BsonScalarType::is_supertype(b, a) { Some(C::Scalar(a)) } else { None diff --git a/crates/integration-tests/src/tests/filtering.rs b/crates/integration-tests/src/tests/filtering.rs index d0f68a68..2d8fba81 100644 --- a/crates/integration-tests/src/tests/filtering.rs +++ b/crates/integration-tests/src/tests/filtering.rs @@ -1,5 +1,5 @@ use insta::assert_yaml_snapshot; -use ndc_test_helpers::{binop, field, query, query_request, target, variable}; +use ndc_test_helpers::{binop, field, query, query_request, target, value, variable}; use crate::{connector::Connector, graphql_query, run_connector_query}; @@ -85,3 +85,23 @@ async fn filters_by_comparisons_on_elements_of_array_of_scalars_against_variable ); Ok(()) } + +#[tokio::test] +async fn filters_by_uuid() -> anyhow::Result<()> { + assert_yaml_snapshot!( + run_connector_query( + Connector::TestCases, + query_request().collection("uuids").query( + query() + .predicate(binop( + "_eq", + target!("uuid"), + value!("40a693d0-c00a-425d-af5c-535e37fdfe9c") + )) + .fields([field!("name"), field!("uuid"), field!("uuid_as_string")]), + ) + ) + .await? + ); + Ok(()) +} diff --git a/crates/integration-tests/src/tests/snapshots/integration_tests__tests__filtering__filters_by_uuid.snap b/crates/integration-tests/src/tests/snapshots/integration_tests__tests__filtering__filters_by_uuid.snap new file mode 100644 index 00000000..80fd4607 --- /dev/null +++ b/crates/integration-tests/src/tests/snapshots/integration_tests__tests__filtering__filters_by_uuid.snap @@ -0,0 +1,8 @@ +--- +source: crates/integration-tests/src/tests/filtering.rs +expression: "run_connector_query(Connector::TestCases,\nquery_request().collection(\"uuids\").query(query().predicate(binop(\"_eq\",\ntarget!(\"uuid\"),\nvalue!(\"40a693d0-c00a-425d-af5c-535e37fdfe9c\"))).fields([field!(\"name\"),\nfield!(\"uuid\"), field!(\"uuid_as_string\")]),)).await?" +--- +- rows: + - name: peristeria elata + uuid: 40a693d0-c00a-425d-af5c-535e37fdfe9c + uuid_as_string: 40a693d0-c00a-425d-af5c-535e37fdfe9c diff --git a/crates/mongodb-agent-common/proptest-regressions/query/serialization/tests.txt b/crates/mongodb-agent-common/proptest-regressions/query/serialization/tests.txt index e85c3bad..cbce5bb6 100644 --- a/crates/mongodb-agent-common/proptest-regressions/query/serialization/tests.txt +++ b/crates/mongodb-agent-common/proptest-regressions/query/serialization/tests.txt @@ -11,3 +11,4 @@ cc 21360610045c5a616b371fb8d5492eb0c22065d62e54d9c8a8761872e2e192f3 # shrinks to cc 8842e7f78af24e19847be5d8ee3d47c547ef6c1bb54801d360a131f41a87f4fa cc 2a192b415e5669716701331fe4141383a12ceda9acc9f32e4284cbc2ed6f2d8a # shrinks to bson = Document({"A": Document({"¡": JavaScriptCodeWithScope { code: "", scope: Document({"\0": Int32(-1)}) }})}), mode = Relaxed cc 4c37daee6ab1e1bcc75b4089786253f29271d116a1785180560ca431d2b4a651 # shrinks to bson = Document({"0": Document({"A": Array([Int32(0), Decimal128(...)])})}) +cc ad219d6630a8e9a386e734b6ba440577162cca8435c7685e32b574e9b1aa390e diff --git a/crates/mongodb-agent-common/src/query/serialization/bson_to_json.rs b/crates/mongodb-agent-common/src/query/serialization/bson_to_json.rs index ead29d93..a03d50e0 100644 --- a/crates/mongodb-agent-common/src/query/serialization/bson_to_json.rs +++ b/crates/mongodb-agent-common/src/query/serialization/bson_to_json.rs @@ -18,6 +18,9 @@ pub enum BsonToJsonError { #[error("error converting 64-bit floating point number from BSON to JSON: {0}")] DoubleConversion(f64), + #[error("error converting UUID from BSON to JSON: {0}")] + UuidConversion(#[from] bson::uuid::Error), + #[error("input object of type {0:?} is missing a field, \"{1}\"")] MissingObjectField(Type, String), @@ -85,6 +88,7 @@ fn bson_scalar_to_json( (BsonScalarType::Timestamp, Bson::Timestamp(v)) => { Ok(to_value::(v.into())?) } + (BsonScalarType::UUID, Bson::Binary(b)) => Ok(serde_json::to_value(b.to_uuid()?)?), (BsonScalarType::BinData, Bson::Binary(b)) => { Ok(to_value::(b.into())?) } diff --git a/crates/mongodb-agent-common/src/query/serialization/json_to_bson.rs b/crates/mongodb-agent-common/src/query/serialization/json_to_bson.rs index 5dff0be0..ea855132 100644 --- a/crates/mongodb-agent-common/src/query/serialization/json_to_bson.rs +++ b/crates/mongodb-agent-common/src/query/serialization/json_to_bson.rs @@ -71,11 +71,12 @@ pub fn json_to_bson(expected_type: &Type, value: Value) -> Result { /// Works like json_to_bson, but only converts BSON scalar types. pub fn json_to_bson_scalar(expected_type: BsonScalarType, value: Value) -> Result { + use BsonScalarType as S; let result = match expected_type { - BsonScalarType::Double => Bson::Double(deserialize(expected_type, value)?), - BsonScalarType::Int => Bson::Int32(deserialize(expected_type, value)?), - BsonScalarType::Long => convert_long(&from_string(expected_type, value)?)?, - BsonScalarType::Decimal => Bson::Decimal128( + S::Double => Bson::Double(deserialize(expected_type, value)?), + S::Int => Bson::Int32(deserialize(expected_type, value)?), + S::Long => convert_long(&from_string(expected_type, value)?)?, + S::Decimal => Bson::Decimal128( Decimal128::from_str(&from_string(expected_type, value.clone())?).map_err(|err| { JsonToBsonError::ConversionErrorWithContext( Type::Scalar(MongoScalarType::Bson(expected_type)), @@ -84,37 +85,34 @@ pub fn json_to_bson_scalar(expected_type: BsonScalarType, value: Value) -> Resul ) })?, ), - BsonScalarType::String => Bson::String(deserialize(expected_type, value)?), - BsonScalarType::Date => convert_date(&from_string(expected_type, value)?)?, - BsonScalarType::Timestamp => { - deserialize::(expected_type, value)?.into() - } - BsonScalarType::BinData => { - deserialize::(expected_type, value)?.into() - } - BsonScalarType::ObjectId => Bson::ObjectId(deserialize(expected_type, value)?), - BsonScalarType::Bool => match value { + S::String => Bson::String(deserialize(expected_type, value)?), + S::Date => convert_date(&from_string(expected_type, value)?)?, + S::Timestamp => deserialize::(expected_type, value)?.into(), + S::BinData => deserialize::(expected_type, value)?.into(), + S::UUID => convert_uuid(&from_string(expected_type, value)?)?, + S::ObjectId => Bson::ObjectId(deserialize(expected_type, value)?), + S::Bool => match value { Value::Bool(b) => Bson::Boolean(b), - _ => incompatible_scalar_type(BsonScalarType::Bool, value)?, + _ => incompatible_scalar_type(S::Bool, value)?, }, - BsonScalarType::Null => match value { + S::Null => match value { Value::Null => Bson::Null, - _ => incompatible_scalar_type(BsonScalarType::Null, value)?, + _ => incompatible_scalar_type(S::Null, value)?, }, - BsonScalarType::Undefined => match value { + S::Undefined => match value { Value::Null => Bson::Undefined, - _ => incompatible_scalar_type(BsonScalarType::Undefined, value)?, + _ => incompatible_scalar_type(S::Undefined, value)?, }, - BsonScalarType::Regex => deserialize::(expected_type, value)?.into(), - BsonScalarType::Javascript => Bson::JavaScriptCode(deserialize(expected_type, value)?), - BsonScalarType::JavascriptWithScope => { + S::Regex => deserialize::(expected_type, value)?.into(), + S::Javascript => Bson::JavaScriptCode(deserialize(expected_type, value)?), + S::JavascriptWithScope => { deserialize::(expected_type, value)?.into() } - BsonScalarType::MinKey => Bson::MinKey, - BsonScalarType::MaxKey => Bson::MaxKey, - BsonScalarType::Symbol => Bson::Symbol(deserialize(expected_type, value)?), + S::MinKey => Bson::MinKey, + S::MaxKey => Bson::MaxKey, + S::Symbol => Bson::Symbol(deserialize(expected_type, value)?), // dbPointer is deprecated - BsonScalarType::DbPointer => Err(JsonToBsonError::NotImplemented(expected_type))?, + S::DbPointer => Err(JsonToBsonError::NotImplemented(expected_type))?, }; Ok(result) } @@ -191,6 +189,17 @@ fn convert_long(value: &str) -> Result { Ok(Bson::Int64(n)) } +fn convert_uuid(value: &str) -> Result { + let uuid = bson::Uuid::parse_str(value).map_err(|err| { + JsonToBsonError::ConversionErrorWithContext( + Type::Scalar(MongoScalarType::Bson(BsonScalarType::UUID)), + value.into(), + err.into(), + ) + })?; + Ok(bson::binary::Binary::from_uuid(uuid).into()) +} + fn deserialize(expected_type: BsonScalarType, value: Value) -> Result where T: DeserializeOwned, diff --git a/crates/mongodb-agent-common/src/scalar_types_capabilities.rs b/crates/mongodb-agent-common/src/scalar_types_capabilities.rs index ea7d2352..f77bcca9 100644 --- a/crates/mongodb-agent-common/src/scalar_types_capabilities.rs +++ b/crates/mongodb-agent-common/src/scalar_types_capabilities.rs @@ -94,6 +94,7 @@ fn bson_scalar_type_representation(bson_scalar_type: BsonScalarType) -> Option Some(TypeRepresentation::Timestamp), // Mongo Date is milliseconds since unix epoch BsonScalarType::Timestamp => None, // Internal Mongo timestamp type BsonScalarType::BinData => None, + BsonScalarType::UUID => Some(TypeRepresentation::String), BsonScalarType::ObjectId => Some(TypeRepresentation::String), // Mongo ObjectId is usually expressed as a 24 char hex string (12 byte number) BsonScalarType::Bool => Some(TypeRepresentation::Boolean), BsonScalarType::Null => None, diff --git a/crates/mongodb-agent-common/src/schema.rs b/crates/mongodb-agent-common/src/schema.rs index 63daf74e..e475eb7f 100644 --- a/crates/mongodb-agent-common/src/schema.rs +++ b/crates/mongodb-agent-common/src/schema.rs @@ -35,7 +35,11 @@ pub enum Property { }, #[serde(untagged)] Scalar { - #[serde(rename = "bsonType", default = "default_bson_scalar_type")] + #[serde( + rename = "bsonType", + deserialize_with = "deserialize_scalar_bson_type", + default = "default_bson_scalar_type" + )] bson_type: BsonScalarType, #[serde(skip_serializing_if = "Option::is_none")] description: Option, @@ -60,6 +64,15 @@ pub fn get_property_description(p: &Property) -> Option { } } +fn deserialize_scalar_bson_type<'de, D>(deserializer: D) -> Result +where + D: serde::Deserializer<'de>, +{ + use serde::de::Error; + let value = BsonType::deserialize(deserializer)?; + value.try_into().map_err(D::Error::custom) +} + fn default_bson_scalar_type() -> BsonScalarType { BsonScalarType::Undefined } diff --git a/crates/mongodb-support/src/bson_type.rs b/crates/mongodb-support/src/bson_type.rs index 2289e534..c1950ec6 100644 --- a/crates/mongodb-support/src/bson_type.rs +++ b/crates/mongodb-support/src/bson_type.rs @@ -80,21 +80,7 @@ impl<'de> Deserialize<'de> for BsonType { } } -#[derive( - Copy, - Clone, - Debug, - PartialEq, - Eq, - Hash, - PartialOrd, - Ord, - Sequence, - Serialize, - Deserialize, - JsonSchema, -)] -#[serde(try_from = "BsonType", rename_all = "camelCase")] +#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, Sequence, JsonSchema)] pub enum BsonScalarType { // numeric Double, @@ -109,6 +95,10 @@ pub enum BsonScalarType { Date, Timestamp, + // binary subtypes - these are stored in BSON using the BinData type, but there are multiple + // binary subtype codes, and it's useful to have first-class representations for those + UUID, // subtype 4 + // other BinData, ObjectId, @@ -150,6 +140,7 @@ impl BsonScalarType { S::Undefined => "undefined", S::DbPointer => "dbPointer", S::Symbol => "symbol", + S::UUID => "uuid", } } @@ -174,6 +165,7 @@ impl BsonScalarType { S::Undefined => "Undefined", S::DbPointer => "DbPointer", S::Symbol => "Symbol", + S::UUID => "UUID", } } @@ -190,6 +182,31 @@ impl BsonScalarType { scalar_type.ok_or_else(|| Error::UnknownScalarType(name.to_owned())) } + pub fn is_binary(self) -> bool { + match self { + S::BinData => true, + S::UUID => true, + S::Double => false, + S::Decimal => false, + S::Int => false, + S::Long => false, + S::String => false, + S::Date => false, + S::Timestamp => false, + S::ObjectId => false, + S::Bool => false, + S::Null => false, + S::Regex => false, + S::Javascript => false, + S::JavascriptWithScope => false, + S::MinKey => false, + S::MaxKey => false, + S::Undefined => false, + S::DbPointer => false, + S::Symbol => false, + } + } + pub fn is_orderable(self) -> bool { match self { S::Double => true, @@ -211,6 +228,7 @@ impl BsonScalarType { S::Undefined => false, S::DbPointer => false, S::Symbol => false, + S::UUID => false, } } @@ -235,6 +253,7 @@ impl BsonScalarType { S::Undefined => false, S::DbPointer => false, S::Symbol => false, + S::UUID => false, } } @@ -259,7 +278,60 @@ impl BsonScalarType { S::Undefined => true, S::DbPointer => true, S::Symbol => true, + S::UUID => true, + } + } + + /// True iff we consider a to be a supertype of b. + /// + /// Note that if you add more supertypes here then it is important to also update the custom + /// equality check in our tests in mongodb_agent_common::query::serialization::tests. Equality + /// needs to be transitive over supertypes, so for example if we have, + /// + /// (Double, Int), (Decimal, Double) + /// + /// then in addition to comparing ints to doubles, and doubles to decimals, we also need to compare + /// decimals to ints. + pub fn is_supertype(a: Self, b: Self) -> bool { + Self::common_supertype(a, b).is_some_and(|c| c == a) + } + + /// If there is a BSON scalar type that encompasses both a and b, return it. This does not + /// require a and to overlap. The returned type may be equal to a or b if one is a supertype of + /// the other. + pub fn common_supertype(a: BsonScalarType, b: BsonScalarType) -> Option { + fn helper(a: BsonScalarType, b: BsonScalarType) -> Option { + if a == b { + Some(a) + } else if a.is_binary() && b.is_binary() { + Some(S::BinData) + } else { + match (a, b) { + (S::Double, S::Int) => Some(S::Double), + _ => None, + } + } } + helper(a, b).or_else(|| helper(b, a)) + } +} + +impl Serialize for BsonScalarType { + fn serialize(&self, serializer: S) -> Result + where + S: serde::Serializer, + { + serializer.serialize_str(self.bson_name()) + } +} + +impl<'de> Deserialize<'de> for BsonScalarType { + fn deserialize(deserializer: D) -> Result + where + D: serde::Deserializer<'de>, + { + let s = String::deserialize(deserializer)?; + BsonScalarType::from_bson_name(&s).map_err(serde::de::Error::custom) } } @@ -329,4 +401,22 @@ mod tests { assert_eq!(t, BsonType::Scalar(BsonScalarType::Double)); Ok(()) } + + #[test] + fn unifies_double_and_int() { + use BsonScalarType as S; + let t1 = S::common_supertype(S::Double, S::Int); + let t2 = S::common_supertype(S::Int, S::Double); + assert_eq!(t1, Some(S::Double)); + assert_eq!(t2, Some(S::Double)); + } + + #[test] + fn unifies_bin_data_and_uuid() { + use BsonScalarType as S; + let t1 = S::common_supertype(S::BinData, S::UUID); + let t2 = S::common_supertype(S::UUID, S::BinData); + assert_eq!(t1, Some(S::BinData)); + assert_eq!(t2, Some(S::BinData)); + } } diff --git a/crates/test-helpers/src/arb_bson.rs b/crates/test-helpers/src/arb_bson.rs index 295e91c6..066d4027 100644 --- a/crates/test-helpers/src/arb_bson.rs +++ b/crates/test-helpers/src/arb_bson.rs @@ -1,7 +1,7 @@ use std::time::SystemTime; -use mongodb::bson::{self, oid::ObjectId, Bson}; -use proptest::{collection, prelude::*, sample::SizeRange}; +use mongodb::bson::{self, oid::ObjectId, spec::BinarySubtype, Binary, Bson}; +use proptest::{array, collection, prelude::*, sample::SizeRange}; pub fn arb_bson() -> impl Strategy { arb_bson_with_options(Default::default()) @@ -56,6 +56,7 @@ pub fn arb_bson_with_options(options: ArbBsonOptions) -> impl Strategy(), any::()) .prop_map(|(time, increment)| Bson::Timestamp(bson::Timestamp { time, increment })), arb_binary().prop_map(Bson::Binary), + arb_uuid().prop_map(Bson::Binary), (".*", "i?l?m?s?u?x?").prop_map(|(pattern, options)| Bson::RegularExpression( bson::Regex { pattern, options } )), @@ -120,8 +121,21 @@ fn arb_bson_document_recursive( fn arb_binary() -> impl Strategy { let binary_subtype = any::().prop_map(Into::into); - let bytes = collection::vec(any::(), 1..256); - (binary_subtype, bytes).prop_map(|(subtype, bytes)| bson::Binary { subtype, bytes }) + binary_subtype.prop_flat_map(|subtype| { + let bytes = match subtype { + BinarySubtype::Uuid => array::uniform16(any::()).prop_map_into().boxed(), + _ => collection::vec(any::(), 1..256).boxed(), + }; + bytes.prop_map(move |bytes| Binary { subtype, bytes }) + }) +} + +fn arb_uuid() -> impl Strategy { + let bytes = array::uniform16(any::()); + bytes.prop_map(|bytes| { + let uuid = bson::Uuid::from_bytes(bytes); + bson::Binary::from_uuid(uuid) + }) } pub fn arb_datetime() -> impl Strategy { diff --git a/fixtures/hasura/README.md b/fixtures/hasura/README.md index a1ab7b15..814f1d9b 100644 --- a/fixtures/hasura/README.md +++ b/fixtures/hasura/README.md @@ -32,11 +32,11 @@ this repo. The plugin binary is provided by the Nix dev shell. Use these commands: ```sh -$ nix run .#mongodb-cli-plugin -- --connection-uri mongodb://localhost/sample_mflix --context-path sample_mflix/connector/ update +$ nix run .#mongodb-cli-plugin -- --connection-uri mongodb://localhost/sample_mflix --context-path app/connector/sample_mflix/ update -$ nix run .#mongodb-cli-plugin -- --connection-uri mongodb://localhost/chinook --context-path chinook/connector/ update +$ nix run .#mongodb-cli-plugin -- --connection-uri mongodb://localhost/chinook --context-path app/connector/chinook/ update -$ nix run .#mongodb-cli-plugin -- --connection-uri mongodb://localhost/test_cases --context-path test_cases/connector/ update +$ nix run .#mongodb-cli-plugin -- --connection-uri mongodb://localhost/test_cases --context-path app/connector/test_cases/ update ``` Update Hasura metadata based on connector configuration diff --git a/fixtures/hasura/app/connector/test_cases/schema/uuids.json b/fixtures/hasura/app/connector/test_cases/schema/uuids.json new file mode 100644 index 00000000..42a0dd4d --- /dev/null +++ b/fixtures/hasura/app/connector/test_cases/schema/uuids.json @@ -0,0 +1,34 @@ +{ + "name": "uuids", + "collections": { + "uuids": { + "type": "uuids" + } + }, + "objectTypes": { + "uuids": { + "fields": { + "_id": { + "type": { + "scalar": "objectId" + } + }, + "name": { + "type": { + "scalar": "string" + } + }, + "uuid": { + "type": { + "scalar": "uuid" + } + }, + "uuid_as_string": { + "type": { + "scalar": "string" + } + } + } + } + } +} \ No newline at end of file diff --git a/fixtures/mongodb/test_cases/import.sh b/fixtures/mongodb/test_cases/import.sh index 6f647970..9d512a9a 100755 --- a/fixtures/mongodb/test_cases/import.sh +++ b/fixtures/mongodb/test_cases/import.sh @@ -14,5 +14,6 @@ echo "📡 Importing test case data..." mongoimport --db test_cases --collection weird_field_names --file "$FIXTURES"/weird_field_names.json mongoimport --db test_cases --collection nested_collection --file "$FIXTURES"/nested_collection.json mongoimport --db test_cases --collection nested_field_with_dollar --file "$FIXTURES"/nested_field_with_dollar.json +mongoimport --db test_cases --collection uuids --file "$FIXTURES"/uuids.json echo "✅ test case data imported..." diff --git a/fixtures/mongodb/test_cases/uuids.json b/fixtures/mongodb/test_cases/uuids.json new file mode 100644 index 00000000..16d6aade --- /dev/null +++ b/fixtures/mongodb/test_cases/uuids.json @@ -0,0 +1,4 @@ +{ "_id": { "$oid": "67c1fc84d5c3213534bdce10" }, "uuid": { "$binary": { "base64": "+gpObj88QmaOlr9rXJurAQ==", "subType":"04" } }, "uuid_as_string": "fa0a4e6e-3f3c-4266-8e96-bf6b5c9bab01", "name": "brassavola nodosa" } +{ "_id": { "$oid": "67c1fc84d5c3213534bdce11" }, "uuid": { "$binary": { "base64": "QKaT0MAKQl2vXFNeN/3+nA==", "subType":"04" } }, "uuid_as_string": "40a693d0-c00a-425d-af5c-535e37fdfe9c", "name": "peristeria elata" } +{ "_id": { "$oid": "67c1fc84d5c3213534bdce12" }, "uuid": { "$binary": { "base64": "CsKZiCoHTfWn7lckxrpD+Q==", "subType":"04" } }, "uuid_as_string": "0ac29988-2a07-4df5-a7ee-5724c6ba43f9", "name": "vanda coerulea" } +{ "_id": { "$oid": "67c1fc84d5c3213534bdce13" }, "uuid": { "$binary": { "base64": "BBBI52lNSUCHBlF/QKW9Vw==", "subType":"04" } }, "uuid_as_string": "041048e7-694d-4940-8706-517f40a5bd57", "name": "tuberous grasspink" }