From f3e32caefa015aa0ec5a2f53316a1e81592d2c5f Mon Sep 17 00:00:00 2001 From: David Overton Date: Wed, 13 Mar 2024 11:41:28 +1100 Subject: [PATCH 01/12] Move branch dmoverton/cli-plugin from the old repository --- Cargo.toml | 3 +- connector-definition/Makefile | 21 +++++ connector-definition/connector-metadata.yaml | 15 ++++ crates/cli/Cargo.toml | 21 +++++ crates/cli/src/lib.rs | 75 ++++++++++++++++ crates/cli/src/main.rs | 54 +++++++++++ crates/configuration/src/configuration.rs | 4 +- crates/configuration/src/lib.rs | 1 + crates/configuration/src/metadata/database.rs | 10 +-- crates/configuration/src/metadata/mod.rs | 4 +- crates/configuration/src/read_directory.rs | 23 +++++ .../src/api_type_conversions/configuration.rs | 89 +++++++++++++++++++ .../src/api_type_conversions/mod.rs | 2 + crates/mongodb-connector/src/lib.rs | 6 ++ crates/mongodb-connector/src/state.rs | 6 +- crates/mongodb-support/src/bson_type.rs | 4 +- 16 files changed, 325 insertions(+), 13 deletions(-) create mode 100644 connector-definition/Makefile create mode 100644 connector-definition/connector-metadata.yaml create mode 100644 crates/cli/Cargo.toml create mode 100644 crates/cli/src/lib.rs create mode 100644 crates/cli/src/main.rs create mode 100644 crates/mongodb-connector/src/api_type_conversions/configuration.rs create mode 100644 crates/mongodb-connector/src/lib.rs diff --git a/Cargo.toml b/Cargo.toml index fd9e7429..5c9e06e3 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -10,7 +10,8 @@ members = [ "crates/dc-api", "crates/dc-api-types", "crates/dc-api-test-helpers", - "crates/ndc-test-helpers" + "crates/ndc-test-helpers", + "crates/cli" ] resolver = "2" diff --git a/connector-definition/Makefile b/connector-definition/Makefile new file mode 100644 index 00000000..d6744a88 --- /dev/null +++ b/connector-definition/Makefile @@ -0,0 +1,21 @@ +.DEFAULT_GOAL := build +SHELL = /usr/bin/env bash + +.PHONY: build +build: dist/connector-definition.tgz + +.PHONY: clean +clean: + rm -rf dist + +dist dist/.hasura-connector: + mkdir dist + mkdir dist/.hasura-connector + +dist/.hasura-connector/connector-metadata.yaml: DOCKER_IMAGE ?= $(error The DOCKER_IMAGE variable must be defined) +dist/.hasura-connector/connector-metadata.yaml: connector-metadata.yaml dist/.hasura-connector + cp -f connector-metadata.yaml dist/.hasura-connector/ + yq -i '.packagingDefinition.dockerImage = "$(DOCKER_IMAGE)"' dist/.hasura-connector/connector-metadata.yaml + +dist/connector-definition.tgz: dist/.hasura-connector/connector-metadata.yaml + shopt -s dotglob && cd dist && tar -czvf connector-definition.tgz * \ No newline at end of file diff --git a/connector-definition/connector-metadata.yaml b/connector-definition/connector-metadata.yaml new file mode 100644 index 00000000..833db913 --- /dev/null +++ b/connector-definition/connector-metadata.yaml @@ -0,0 +1,15 @@ +packagingDefinition: + type: PrebuiltDockerImage + dockerImage: +supportedEnvironmentVariables: + - name: MONGODB_DATABASE_URI + description: The URI for the MongoDB database +commands: + update: hasura-mongodb update +cliPlugin: + name: hasura-mongodb + version: "0.0.1" +dockerComposeWatch: + - path: ./ + target: /etc/connector + action: sync+restart \ No newline at end of file diff --git a/crates/cli/Cargo.toml b/crates/cli/Cargo.toml new file mode 100644 index 00000000..82a71388 --- /dev/null +++ b/crates/cli/Cargo.toml @@ -0,0 +1,21 @@ +[package] +name = "mongodb-cli-plugin" +edition = "2021" +version.workspace = true + +[[bin]] +name = "hasura-mongodb" +path = "./src/main.rs" + +[dependencies] +configuration = { path = "../configuration" } +mongodb-agent-v3 = { path = "../mongodb-agent-v3" } +mongodb-agent-common = { path = "../mongodb-agent-common" } +mongodb = "2.8" +ndc-sdk = { git = "https://github.com/hasura/ndc-hub.git" } + +anyhow = "1.0.80" +clap = { version = "4.5.1", features = ["derive", "env"] } +serde_json = { version = "1.0.113", features = ["raw_value"] } +thiserror = "1.0.57" +tokio = { version = "1.36.0", features = ["full"] } diff --git a/crates/cli/src/lib.rs b/crates/cli/src/lib.rs new file mode 100644 index 00000000..e40d88a3 --- /dev/null +++ b/crates/cli/src/lib.rs @@ -0,0 +1,75 @@ +//! The interpretation of the commands that the CLI can handle. +//! +//! The CLI can do a few things. This provides a central point where those things are routed and +//! then done, making it easier to test this crate deterministically. + +use std::path::PathBuf; + +use clap::Subcommand; + +use mongodb_agent_common::{interface_types::MongoConfig, schema::get_schema}; +use configuration::Configuration; + +/// The command invoked by the user. +#[derive(Debug, Clone, Subcommand)] +pub enum Command { + /// Initialize a configuration in the current (empty) directory. + // Initialize, + /// Update the configuration by introspecting the database, using the configuration options. + Update, +} + +/// The set of errors that can go wrong _in addition to_ generic I/O or parsing errors. +#[derive(Debug, PartialEq, thiserror::Error)] +pub enum Error { + #[error("directory is not empty")] + DirectoryIsNotEmpty, +} + +pub struct Context { + pub path: PathBuf, + pub mongo_config: MongoConfig, +} + +/// Run a command in a given directory. +pub async fn run(command: Command, context: &Context) -> anyhow::Result<()> { + match command { + // Command::Initialize => initialize(context_path)?, + Command::Update => update(context).await?, + }; + Ok(()) +} + + +/// Initialize an empty directory with an empty connector configuration. +/// +/// An empty configuration contains default settings and options, and is expected to be filled with +/// information such as the database connection string by the user, and later on metadata +/// information via introspection. +// fn initialize(context_path: &Path) -> anyhow::Result<()> { +// let configuration_file = context_path.join(CONFIGURATION_FILENAME); +// fs::create_dir_all(context_path)?; + +// // refuse to initialize the directory unless it is empty +// let mut items_in_dir = fs::read_dir(context_path)?; +// if items_in_dir.next().is_some() { +// Err(Error::DirectoryIsNotEmpty)?; +// } + +// let _writer = fs::File::create(configuration_file)?; +// // serde_json::to_writer_pretty(writer, &MongoConnector::make_empty_configuration())?; +// Ok(()) +// } + +/// Update the configuration in the current directory by introspecting the database. +/// +/// This expects a configuration with a valid connection URI. +async fn update(context: &Context) -> anyhow::Result<()> { + // let input_configuration: Configuration = configuration::read_directory(&context.path).await?; + let schema = get_schema(&context.mongo_config).await?; + + // let output_configuration = input_configuration; // XXX TODO: update configuration + // configuration::write_directory(&context.path, &output_configuration).await?; + + Ok(()) +} diff --git a/crates/cli/src/main.rs b/crates/cli/src/main.rs new file mode 100644 index 00000000..caf0c722 --- /dev/null +++ b/crates/cli/src/main.rs @@ -0,0 +1,54 @@ +//! The CLI application. This is used to configure a deployment of mongo-agent-v3. +//! +//! This is intended to be automatically downloaded and invoked via the Hasura CLI, as a plugin. +//! It is unlikely that end-users will use it directly. + +use anyhow::anyhow; +use std::env; +use std::path::PathBuf; + +use clap::Parser; +use mongodb_agent_v3::state::{try_init_state_from_uri, DATABASE_URI_ENV_VAR}; +use mongodb_cli_plugin::{run, Command, Context}; + +/// The command-line arguments. +#[derive(Debug, Parser)] +pub struct Args { + /// The path to the configuration. Defaults to the current directory. + #[arg( + long = "context", + env = "HASURA_PLUGIN_CONNECTOR_CONTEXT_PATH", + value_name = "DIRECTORY" + )] + pub context_path: Option, + + #[arg( + long = "connection-uri", + env = DATABASE_URI_ENV_VAR, + required = true, + value_name = "URI" + )] + pub connection_uri: String, + + /// The command to invoke. + #[command(subcommand)] + pub subcommand: Command, +} + +/// The application entrypoint. It pulls information from the environment and then calls the [run] +/// function. The library remains unaware of the environment, so that we can more easily test it. +#[tokio::main] +pub async fn main() -> anyhow::Result<()> { + let args = Args::parse(); + // Default the context path to the current directory. + let path = match args.context_path { + Some(path) => path, + None => env::current_dir()?, + }; + let mongo_config = try_init_state_from_uri(&args.connection_uri) + .await + .map_err(|e| anyhow!("Error initializing MongoDB state {}", e))?; + let context = Context { path, mongo_config }; + run(args.subcommand, &context).await?; + Ok(()) +} diff --git a/crates/configuration/src/configuration.rs b/crates/configuration/src/configuration.rs index c38671b1..625ec509 100644 --- a/crates/configuration/src/configuration.rs +++ b/crates/configuration/src/configuration.rs @@ -1,11 +1,11 @@ use std::{io, path::Path}; use schemars::JsonSchema; -use serde::Deserialize; +use serde::{Deserialize, Serialize}; use crate::{read_directory, Metadata}; -#[derive(Clone, Debug, Default, Deserialize, JsonSchema)] +#[derive(Clone, Debug, Default, Serialize, Deserialize, JsonSchema)] #[serde(rename_all = "camelCase")] pub struct Configuration { pub metadata: Metadata, diff --git a/crates/configuration/src/lib.rs b/crates/configuration/src/lib.rs index ba88399d..d75c6d17 100644 --- a/crates/configuration/src/lib.rs +++ b/crates/configuration/src/lib.rs @@ -5,3 +5,4 @@ mod read_directory; pub use crate::configuration::Configuration; pub use crate::metadata::Metadata; pub use crate::read_directory::read_directory; +pub use crate::read_directory::write_directory; diff --git a/crates/configuration/src/metadata/database.rs b/crates/configuration/src/metadata/database.rs index 8ea09ef4..c82942e5 100644 --- a/crates/configuration/src/metadata/database.rs +++ b/crates/configuration/src/metadata/database.rs @@ -1,9 +1,9 @@ use schemars::JsonSchema; -use serde::Deserialize; +use serde::{Deserialize, Serialize}; use mongodb_support::BsonScalarType; -#[derive(Debug, Clone, PartialEq, Eq, Deserialize, JsonSchema)] +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, JsonSchema)] #[serde(rename_all = "camelCase")] pub struct Collection { pub name: String, @@ -15,7 +15,7 @@ pub struct Collection { } /// The type of values that a column, field, or argument may take. -#[derive(Debug, Clone, PartialEq, Eq, Deserialize, JsonSchema)] +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, JsonSchema)] #[serde(rename_all = "camelCase")] pub enum Type { /// One of the predefined BSON scalar types @@ -27,7 +27,7 @@ pub enum Type { Nullable(Box), } -#[derive(Debug, Clone, PartialEq, Eq, Deserialize, JsonSchema)] +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, JsonSchema)] #[serde(rename_all = "camelCase")] pub struct ObjectType { pub name: String, @@ -37,7 +37,7 @@ pub struct ObjectType { } /// Information about an object type field. -#[derive(Debug, Clone, PartialEq, Eq, Deserialize, JsonSchema)] +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, JsonSchema)] #[serde(rename_all = "camelCase")] pub struct ObjectField { pub name: String, diff --git a/crates/configuration/src/metadata/mod.rs b/crates/configuration/src/metadata/mod.rs index 28751944..6326d2e9 100644 --- a/crates/configuration/src/metadata/mod.rs +++ b/crates/configuration/src/metadata/mod.rs @@ -1,11 +1,11 @@ mod database; use schemars::JsonSchema; -use serde::Deserialize; +use serde::{Deserialize, Serialize}; pub use self::database::{Collection, ObjectField, ObjectType, Type}; -#[derive(Clone, Debug, Default, Deserialize, JsonSchema)] +#[derive(Clone, Debug, Default, Serialize, Deserialize, JsonSchema)] #[serde(rename_all = "camelCase")] pub struct Metadata { #[serde(default)] diff --git a/crates/configuration/src/read_directory.rs b/crates/configuration/src/read_directory.rs index 3a72c5a6..0af1f8fa 100644 --- a/crates/configuration/src/read_directory.rs +++ b/crates/configuration/src/read_directory.rs @@ -11,6 +11,7 @@ use crate::Configuration; pub const CONFIGURATION_FILENAME: &str = "configuration"; pub const CONFIGURATION_EXTENSIONS: [(&str, FileFormat); 3] = [("json", JSON), ("yaml", YAML), ("yml", YAML)]; +pub const DEFAULT_EXTENSION: &str = "json"; #[derive(Clone, Copy, Debug)] pub enum FileFormat { @@ -80,3 +81,25 @@ where }; Ok(value) } + +pub async fn write_directory( + configuration_dir: impl AsRef, + configuration: &Configuration, +) -> io::Result<()> { + write_file(configuration_dir, CONFIGURATION_FILENAME, configuration).await +} + +fn default_file_path(configuration_dir: impl AsRef, basename: &str) -> PathBuf { + let dir = configuration_dir.as_ref(); + dir.join(format!("{basename}.{DEFAULT_EXTENSION}")) +} + +async fn write_file( + configuration_dir: impl AsRef, + basename: &str, + configuration: &Configuration, +) -> io::Result<()> { + let path = default_file_path(configuration_dir, basename); + let bytes = serde_json::to_vec_pretty(configuration)?; + fs::write(path, bytes).await +} diff --git a/crates/mongodb-connector/src/api_type_conversions/configuration.rs b/crates/mongodb-connector/src/api_type_conversions/configuration.rs new file mode 100644 index 00000000..0b9e5705 --- /dev/null +++ b/crates/mongodb-connector/src/api_type_conversions/configuration.rs @@ -0,0 +1,89 @@ +use configuration::{metadata::{Collection, ObjectField, ObjectType}, Configuration}; + +pub fn v2_schema_response_to_configuration( + response: dc_api_types::SchemaResponse, +) -> Configuration { + let metadata = v2_schema_response_to_metadata(response); + Configuration { metadata } +} + +fn v2_schema_response_to_metadata( + response: dc_api_types::SchemaResponse, +) -> configuration::Metadata { + let table_object_types = response.tables.iter().map(table_to_object_type); + let nested_object_types = + response + .object_types + .into_iter() + .map(|ot| ObjectType { + name: ot.name.to_string(), + description: ot.description, + fields: ot + .columns + .into_iter() + .map(column_info_to_object_field) + .collect(), + }); + let object_types = table_object_types.chain(nested_object_types).collect(); + + let collections = response + .tables + .into_iter() + .map(|table| table_to_collection(table)) + .collect(); + + configuration::Metadata { + collections, + object_types, + } +} + +fn column_info_to_object_field(column_info: dc_api_types::ColumnInfo) -> ObjectField { + let t = v2_to_v3_column_type(column_info.r#type); + let is_nullable = column_info.nullable; + ObjectField { + name: column_info.name, + description: column_info.description.flatten(), + r#type: maybe_nullable(t, is_nullable), + } +} + +fn maybe_nullable(t: configuration::metadata::Type, is_nullable: bool) -> configuration::metadata::Type { + todo!() +} + +fn v2_to_v3_column_type(r#type: dc_api_types::ColumnType) -> configuration::metadata::Type { + todo!() +} + +fn table_to_object_type(table: &dc_api_types::TableInfo) -> ObjectType { + let fields = table + .columns + .iter() + .map(|column_info| column_info_to_object_field(column_info.clone())) + .collect(); + ObjectType { + name: collection_type_name_from_table_name(table.name.clone()), + description: table.description.clone().flatten(), + fields, + } +} + +fn collection_type_name_from_table_name(clone: Vec) -> String { + todo!() +} + +fn table_to_collection( + table: dc_api_types::TableInfo, +) -> Collection { + let collection_type = collection_type_name_from_table_name(table.name.clone()); + Collection { + name: name_from_qualified_name(table.name), + description: table.description.flatten(), + r#type: todo!(), + } +} + +fn name_from_qualified_name(name: Vec) -> String { + todo!() +} diff --git a/crates/mongodb-connector/src/api_type_conversions/mod.rs b/crates/mongodb-connector/src/api_type_conversions/mod.rs index deb1d029..934324ae 100644 --- a/crates/mongodb-connector/src/api_type_conversions/mod.rs +++ b/crates/mongodb-connector/src/api_type_conversions/mod.rs @@ -1,4 +1,5 @@ mod capabilities; +mod configuration; mod conversion_error; mod helpers; mod json_response; @@ -9,6 +10,7 @@ mod query_traversal; #[allow(unused_imports)] pub use self::{ capabilities::v2_to_v3_scalar_type_capabilities, + configuration::v2_schema_response_to_configuration, conversion_error::ConversionError, json_response::map_unserialized, query_request::{v3_to_v2_query_request, QueryContext}, diff --git a/crates/mongodb-connector/src/lib.rs b/crates/mongodb-connector/src/lib.rs new file mode 100644 index 00000000..dd960b3b --- /dev/null +++ b/crates/mongodb-connector/src/lib.rs @@ -0,0 +1,6 @@ +pub mod api_type_conversions; +pub mod capabilities; +pub mod error_mapping; +pub mod mongo_connector; +pub mod schema; +pub mod state; diff --git a/crates/mongodb-connector/src/state.rs b/crates/mongodb-connector/src/state.rs index 912bcd96..faca22c6 100644 --- a/crates/mongodb-connector/src/state.rs +++ b/crates/mongodb-connector/src/state.rs @@ -9,7 +9,11 @@ pub const DATABASE_URI_ENV_VAR: &str = "MONGODB_DATABASE_URI"; pub async fn try_init_state() -> Result> { // Splitting this out of the `Connector` impl makes error translation easier let database_uri = env::var(DATABASE_URI_ENV_VAR)?; - let client = get_mongodb_client(&database_uri).await?; + try_init_state_from_uri(&database_uri).await +} + +pub async fn try_init_state_from_uri(database_uri: &str) -> Result> { + let client = get_mongodb_client(database_uri).await?; let database_name = match client.default_database() { Some(database) => Ok(database.name().to_owned()), None => Err(anyhow!( diff --git a/crates/mongodb-support/src/bson_type.rs b/crates/mongodb-support/src/bson_type.rs index efeae0ac..5f948553 100644 --- a/crates/mongodb-support/src/bson_type.rs +++ b/crates/mongodb-support/src/bson_type.rs @@ -1,7 +1,7 @@ use dc_api_types::GraphQlType; use enum_iterator::{all, Sequence}; use schemars::JsonSchema; -use serde::Deserialize; +use serde::{Deserialize, Serialize}; use crate::error::Error; @@ -80,7 +80,7 @@ impl<'de> Deserialize<'de> for BsonType { } } -#[derive(Copy, Clone, Debug, PartialEq, Eq, Sequence, Deserialize, JsonSchema)] +#[derive(Copy, Clone, Debug, PartialEq, Eq, Sequence, Serialize, Deserialize, JsonSchema)] #[serde(try_from = "BsonType", rename_all = "camelCase")] pub enum BsonScalarType { // numeric From 34a3f2da9b314c3b895e9047df13b87e30783f86 Mon Sep 17 00:00:00 2001 From: David Overton Date: Wed, 13 Mar 2024 11:44:48 +1100 Subject: [PATCH 02/12] Fix build after patch apply --- Cargo.lock | 152 +++++++++++++++++++++++------------------ crates/cli/Cargo.toml | 2 +- crates/cli/src/main.rs | 2 +- 3 files changed, 89 insertions(+), 67 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index dbce74e4..9e30612e 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -56,9 +56,9 @@ dependencies = [ [[package]] name = "anstream" -version = "0.6.4" +version = "0.6.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2ab91ebe16eb252986481c5b62f6098f3b698a45e34b5b98200cf20dd2484a44" +checksum = "d96bd03f33fe50a863e394ee9718a706f988b9079b20c3784fb726e7678b62fb" dependencies = [ "anstyle", "anstyle-parse", @@ -104,9 +104,9 @@ dependencies = [ [[package]] name = "anyhow" -version = "1.0.75" +version = "1.0.81" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a4668cab20f66d8d020e1fbc0ebe47217433c1b6c8f2040faf858554e394ace6" +checksum = "0952808a6c2afd1aa8947271f3a60f1a6763c7b912d210184c5149b5cf147247" [[package]] name = "async-trait" @@ -116,7 +116,7 @@ checksum = "a66537f1bb974b254c98ed142ff995236e81b9d0fe4db0575f46612cb15eb0f9" dependencies = [ "proc-macro2", "quote", - "syn 2.0.38", + "syn 2.0.52", ] [[package]] @@ -351,9 +351,9 @@ dependencies = [ [[package]] name = "clap" -version = "4.4.10" +version = "4.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "41fffed7514f420abec6d183b1d3acfd9099c79c3a10a06ade4f8203f1411272" +checksum = "b230ab84b0ffdf890d5a10abdbc8b83ae1c4918275daea1ab8801f71536b2651" dependencies = [ "clap_builder", "clap_derive", @@ -361,33 +361,33 @@ dependencies = [ [[package]] name = "clap_builder" -version = "4.4.9" +version = "4.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "63361bae7eef3771745f02d8d892bec2fee5f6e34af316ba556e7f97a7069ff1" +checksum = "ae129e2e766ae0ec03484e609954119f123cc1fe650337e155d03b022f24f7b4" dependencies = [ "anstream", "anstyle", "clap_lex", - "strsim", + "strsim 0.11.0", ] [[package]] name = "clap_derive" -version = "4.4.7" +version = "4.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cf9804afaaf59a91e75b022a30fb7229a7901f60c755489cc61c9b423b836442" +checksum = "307bc0538d5f0f83b8248db3087aa92fe504e4691294d0c96c0eabc33f47ba47" dependencies = [ "heck", "proc-macro2", "quote", - "syn 2.0.38", + "syn 2.0.52", ] [[package]] name = "clap_lex" -version = "0.6.0" +version = "0.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "702fc72eb24e5a1e48ce58027a675bc24edd52096d5397d4aea7c6dd9eca0bd1" +checksum = "98cc8fbded0c607b7ba9dd60cd98df59af97e84d24e49c8557331cfc26d301ce" [[package]] name = "colorchoice" @@ -519,7 +519,7 @@ dependencies = [ "ident_case", "proc-macro2", "quote", - "strsim", + "strsim 0.10.0", "syn 1.0.109", ] @@ -546,8 +546,8 @@ dependencies = [ "ident_case", "proc-macro2", "quote", - "strsim", - "syn 2.0.38", + "strsim 0.10.0", + "syn 2.0.52", ] [[package]] @@ -580,7 +580,7 @@ checksum = "836a9bbc7ad63342d6d6e7b815ccab164bc77a2d95d84bc3117a8c0d5c98e2d5" dependencies = [ "darling_core 0.20.3", "quote", - "syn 2.0.38", + "syn 2.0.52", ] [[package]] @@ -744,7 +744,7 @@ checksum = "eecf8589574ce9b895052fa12d69af7a233f99e6107f5cb8dd1044f2a17bfdcb" dependencies = [ "proc-macro2", "quote", - "syn 2.0.38", + "syn 2.0.52", ] [[package]] @@ -882,7 +882,7 @@ checksum = "89ca545a94061b6365f2c7355b4b32bd20df3ff95f02da9329b34ccc3bd6ee72" dependencies = [ "proc-macro2", "quote", - "syn 2.0.38", + "syn 2.0.52", ] [[package]] @@ -920,7 +920,7 @@ name = "gdc_rust_types" version = "1.0.2" source = "git+https://github.com/hasura/gdc_rust_types.git?rev=3273434#3273434068400f836cf12ea08c514505446821cb" dependencies = [ - "indexmap 2.1.0", + "indexmap 2.2.5", "openapiv3", "serde", "serde-enum-str", @@ -967,7 +967,7 @@ dependencies = [ "futures-sink", "futures-util", "http", - "indexmap 2.1.0", + "indexmap 2.2.5", "slab", "tokio", "tokio-util", @@ -1200,9 +1200,9 @@ dependencies = [ [[package]] name = "indexmap" -version = "2.1.0" +version = "2.2.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d530e1a18b1cb4c484e6e34556a0d948706958449fca0cab753d649f2bce3d1f" +checksum = "7b0b929d511467233429c45a44ac1dcaa21ba0f5ba11e4879e6ed28ddb4f9df4" dependencies = [ "equivalent", "hashbrown 0.14.1", @@ -1479,7 +1479,7 @@ dependencies = [ "sha2", "socket2 0.4.9", "stringprep", - "strsim", + "strsim 0.10.0", "take_mut", "thiserror", "tokio", @@ -1524,6 +1524,22 @@ dependencies = [ "tracing", ] +[[package]] +name = "mongodb-cli-plugin" +version = "0.1.0" +dependencies = [ + "anyhow", + "clap", + "configuration", + "mongodb", + "mongodb-agent-common", + "mongodb-connector", + "ndc-sdk", + "serde_json", + "thiserror", + "tokio", +] + [[package]] name = "mongodb-connector" version = "0.1.0" @@ -1536,7 +1552,7 @@ dependencies = [ "dc-api-types", "enum-iterator", "http", - "indexmap 2.1.0", + "indexmap 2.2.5", "itertools 0.10.5", "mongodb", "mongodb-agent-common", @@ -1588,7 +1604,7 @@ version = "0.1.0" source = "git+http://github.com/hasura/ndc-spec.git?tag=v0.1.0-rc.18#46ef35891198840a21653738cb386f97b069f56f" dependencies = [ "async-trait", - "indexmap 2.1.0", + "indexmap 2.2.5", "opentelemetry", "reqwest", "schemars", @@ -1611,7 +1627,7 @@ dependencies = [ "clap", "gdc_rust_types", "http", - "indexmap 2.1.0", + "indexmap 2.2.5", "mime", "ndc-client", "ndc-test", @@ -1642,7 +1658,7 @@ dependencies = [ "async-trait", "clap", "colored", - "indexmap 2.1.0", + "indexmap 2.2.5", "ndc-client", "proptest", "reqwest", @@ -1657,7 +1673,7 @@ dependencies = [ name = "ndc-test-helpers" version = "0.1.0" dependencies = [ - "indexmap 2.1.0", + "indexmap 2.2.5", "itertools 0.10.5", "ndc-sdk", "serde_json", @@ -1749,7 +1765,7 @@ version = "1.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "75e56d5c441965b6425165b7e3223cc933ca469834f4a8b4786817a1f9dc4f13" dependencies = [ - "indexmap 2.1.0", + "indexmap 1.9.3", "serde", "serde_json", ] @@ -1777,7 +1793,7 @@ checksum = "a948666b637a0f465e8564c73e89d4dde00d72d4d473cc972f390fc3dcee7d9c" dependencies = [ "proc-macro2", "quote", - "syn 2.0.38", + "syn 2.0.52", ] [[package]] @@ -1981,7 +1997,7 @@ checksum = "4359fd9c9171ec6e8c62926d6faaf553a8dc3f64e1507e76da7911b4f6a04405" dependencies = [ "proc-macro2", "quote", - "syn 2.0.38", + "syn 2.0.52", ] [[package]] @@ -2056,9 +2072,9 @@ dependencies = [ [[package]] name = "proc-macro2" -version = "1.0.69" +version = "1.0.79" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "134c189feb4956b20f6f547d2cf727d4c0fe06722b20a0eec87ed445a97f92da" +checksum = "e835ff2298f5721608eb1a980ecaee1aef2c132bf95ecc026a11b7bf3c01c02e" dependencies = [ "unicode-ident", ] @@ -2135,9 +2151,9 @@ checksum = "a1d01941d82fa2ab50be1e79e6714289dd7cde78eba4c074bc5a4374f650dfe0" [[package]] name = "quote" -version = "1.0.33" +version = "1.0.35" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5267fca4496028628a95160fc423a33e8b2e6af8a5302579e322e4b520293cae" +checksum = "291ec9ab5efd934aaf503a6466c5d5251535d108ee747472c3977cc5acc868ef" dependencies = [ "proc-macro2", ] @@ -2440,7 +2456,7 @@ checksum = "45a28f4c49489add4ce10783f7911893516f15afe45d015608d41faca6bc4d29" dependencies = [ "dyn-clone", "indexmap 1.9.3", - "indexmap 2.1.0", + "indexmap 2.2.5", "schemars_derive", "serde", "serde_json", @@ -2521,9 +2537,9 @@ checksum = "388a1df253eca08550bef6c72392cfe7c30914bf41df5269b68cbd6ff8f570a3" [[package]] name = "serde" -version = "1.0.193" +version = "1.0.197" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "25dd9975e68d0cb5aa1120c288333fc98731bd1dd12f561e468ea4728c042b89" +checksum = "3fb1c873e1b9b056a4dc4c0c198b24c3ffa059243875552b2bd0933b1aee4ce2" dependencies = [ "serde_derive", ] @@ -2569,13 +2585,13 @@ dependencies = [ [[package]] name = "serde_derive" -version = "1.0.193" +version = "1.0.197" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "43576ca501357b9b071ac53cdc7da8ef0cbd9493d8df094cd821777ea6e894d3" +checksum = "7eb0b34b42edc17f6b7cac84a52a1c5f0e1bb2227e997ca9011ea3dd34e8610b" dependencies = [ "proc-macro2", "quote", - "syn 2.0.38", + "syn 2.0.52", ] [[package]] @@ -2591,11 +2607,11 @@ dependencies = [ [[package]] name = "serde_json" -version = "1.0.108" +version = "1.0.114" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3d1c7e3eac408d115102c4c24ad393e0821bb3a5df4d506a80f85f7a742a526b" +checksum = "c5f09b1bd632ef549eaa9f60a1f8de742bdbc698e6cee2095fc84dde5f549ae0" dependencies = [ - "indexmap 2.1.0", + "indexmap 2.2.5", "itoa", "ryu", "serde", @@ -2659,7 +2675,7 @@ dependencies = [ "chrono", "hex", "indexmap 1.9.3", - "indexmap 2.1.0", + "indexmap 2.2.5", "serde", "serde_json", "serde_with_macros 3.4.0", @@ -2687,7 +2703,7 @@ dependencies = [ "darling 0.20.3", "proc-macro2", "quote", - "syn 2.0.38", + "syn 2.0.52", ] [[package]] @@ -2699,7 +2715,7 @@ dependencies = [ "darling 0.20.3", "proc-macro2", "quote", - "syn 2.0.38", + "syn 2.0.52", ] [[package]] @@ -2708,7 +2724,7 @@ version = "0.9.29" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a15e0ef66bf939a7c890a0bf6d5a733c70202225f9888a89ed5c62298b019129" dependencies = [ - "indexmap 2.1.0", + "indexmap 2.2.5", "itoa", "ryu", "serde", @@ -2845,6 +2861,12 @@ version = "0.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "73473c0e59e6d5812c5dfe2a064a6444949f089e20eec9a2e5506596494e4623" +[[package]] +name = "strsim" +version = "0.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5ee073c9e4cd00e28217186dbe12796d692868f432bf2e97ee73bed0c56dfa01" + [[package]] name = "subtle" version = "2.5.0" @@ -2864,9 +2886,9 @@ dependencies = [ [[package]] name = "syn" -version = "2.0.38" +version = "2.0.52" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e96b79aaa137db8f61e26363a0c9b47d8b4ec75da28b7d1d614c2303e232408b" +checksum = "b699d15b36d1f02c3e7c69f8ffef53de37aefae075d8488d4ba1a7788d574a07" dependencies = [ "proc-macro2", "quote", @@ -2933,22 +2955,22 @@ checksum = "3369f5ac52d5eb6ab48c6b4ffdc8efbcad6b89c765749064ba298f2c68a16a76" [[package]] name = "thiserror" -version = "1.0.50" +version = "1.0.58" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f9a7210f5c9a7156bb50aa36aed4c95afb51df0df00713949448cf9e97d382d2" +checksum = "03468839009160513471e86a034bb2c5c0e4baae3b43f79ffc55c4a5427b3297" dependencies = [ "thiserror-impl", ] [[package]] name = "thiserror-impl" -version = "1.0.50" +version = "1.0.58" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "266b2e40bc00e5a6c09c3584011e08b06f123c00362c92b975ba9843aaaa14b8" +checksum = "c61f3ba182994efc43764a46c018c347bc492c79f024e705f46567b418f6d4f7" dependencies = [ "proc-macro2", "quote", - "syn 2.0.38", + "syn 2.0.52", ] [[package]] @@ -3007,9 +3029,9 @@ checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" [[package]] name = "tokio" -version = "1.34.0" +version = "1.36.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d0c014766411e834f7af5b8f4cf46257aab4036ca95e9d2c144a10f59ad6f5b9" +checksum = "61285f6515fa018fb2d1e46eb21223fff441ee8db5d0f1435e8ab4f5cdb80931" dependencies = [ "backtrace", "bytes", @@ -3042,7 +3064,7 @@ checksum = "5b8a1e28f2deaa14e508979454cb3a223b10b938b45af148bc0986de36f1923b" dependencies = [ "proc-macro2", "quote", - "syn 2.0.38", + "syn 2.0.52", ] [[package]] @@ -3191,7 +3213,7 @@ checksum = "34704c8d6ebcbc939824180af020566b01a7c01f80641264eba0999f6c2b6be7" dependencies = [ "proc-macro2", "quote", - "syn 2.0.38", + "syn 2.0.52", ] [[package]] @@ -3470,7 +3492,7 @@ dependencies = [ "once_cell", "proc-macro2", "quote", - "syn 2.0.38", + "syn 2.0.52", "wasm-bindgen-shared", ] @@ -3504,7 +3526,7 @@ checksum = "54681b18a46765f095758388f2d0cf16eb8d4169b639ab575a8f5693af210c7b" dependencies = [ "proc-macro2", "quote", - "syn 2.0.38", + "syn 2.0.52", "wasm-bindgen-backend", "wasm-bindgen-shared", ] @@ -3689,5 +3711,5 @@ checksum = "9ce1b18ccd8e73a9321186f97e46f9f04b778851177567b1975109d26a08d2a6" dependencies = [ "proc-macro2", "quote", - "syn 2.0.38", + "syn 2.0.52", ] diff --git a/crates/cli/Cargo.toml b/crates/cli/Cargo.toml index 82a71388..3389960f 100644 --- a/crates/cli/Cargo.toml +++ b/crates/cli/Cargo.toml @@ -9,7 +9,7 @@ path = "./src/main.rs" [dependencies] configuration = { path = "../configuration" } -mongodb-agent-v3 = { path = "../mongodb-agent-v3" } +mongodb-connector = { path = "../mongodb-connector" } mongodb-agent-common = { path = "../mongodb-agent-common" } mongodb = "2.8" ndc-sdk = { git = "https://github.com/hasura/ndc-hub.git" } diff --git a/crates/cli/src/main.rs b/crates/cli/src/main.rs index caf0c722..b129eaf0 100644 --- a/crates/cli/src/main.rs +++ b/crates/cli/src/main.rs @@ -8,7 +8,7 @@ use std::env; use std::path::PathBuf; use clap::Parser; -use mongodb_agent_v3::state::{try_init_state_from_uri, DATABASE_URI_ENV_VAR}; +use mongodb_connector::state::{try_init_state_from_uri, DATABASE_URI_ENV_VAR}; use mongodb_cli_plugin::{run, Command, Context}; /// The command-line arguments. From 6472461ab0da467bab7dac8e4899ae8d16b8a75c Mon Sep 17 00:00:00 2001 From: Jesse Hallett Date: Wed, 13 Mar 2024 12:19:22 -0700 Subject: [PATCH 03/12] nix configuration for CLI plugin --- flake.nix | 11 +++++++++++ nix/docker-cli-plugin.nix | 12 ++++++++++++ 2 files changed, 23 insertions(+) create mode 100644 nix/docker-cli-plugin.nix diff --git a/flake.nix b/flake.nix index 4f35db9b..04b064e7 100644 --- a/flake.nix +++ b/flake.nix @@ -89,6 +89,7 @@ # arion-compose.nix. mongodb-connector-workspace = final.callPackage ./nix/mongodb-connector-workspace.nix { }; # builds all packages in this repo mongodb-connector = final.mongodb-connector-workspace.override { package = "mongodb-connector"; }; # override `package` to build one specific crate + mongodb-cli-plugin = final.mongodb-connector-workspace.override { package = "mongodb-cli-plugin"; }; v3-engine = final.callPackage ./nix/v3-engine.nix { src = v3-engine-source; }; v3-e2e-testing = final.callPackage ./nix/v3-e2e-testing.nix { src = v3-e2e-testing-source; database-to-test = "mongodb"; }; inherit v3-e2e-testing-source; # include this source so we can read files from it in arion-compose configs @@ -170,6 +171,16 @@ architecture = "arm64"; }; + # CLI plugin packages with cross-compilation options + mongodb-cli-plugin = pkgs.mongodb-cli-plugin.override { staticallyLinked = true; }; + mongodb-cli-plugin-x86_64-linux = pkgs.pkgsCross.x86_64-linux.mongodb-cli-plugin.override { staticallyLinked = true; }; + mongodb-cli-plugin-aarch64-linux = pkgs.pkgsCross.aarch64-linux.mongodb-cli-plugin.override { staticallyLinked = true; }; + + # CLI plugin docker images + mongodb-cli-plugin-docker = pkgs.callPackage ./nix/docker-cli-plugin.nix { }; + mongodb-cli-plugin-docker-x86_64-linux = pkgs.pkgsCross.x86_64-linux.callPackage ./nix/docker-cli-plugin.nix { }; + mongodb-cli-plugin-docker-aarch64-linux = pkgs.pkgsCross.aarch64-linux.callPackage ./nix/docker-cli-plugin.nix { }; + publish-docker-image = pkgs.writeShellApplication { name = "publish-docker-image"; runtimeInputs = with pkgs; [ coreutils skopeo ]; diff --git a/nix/docker-cli-plugin.nix b/nix/docker-cli-plugin.nix new file mode 100644 index 00000000..e28ae550 --- /dev/null +++ b/nix/docker-cli-plugin.nix @@ -0,0 +1,12 @@ +{ name ? "ghcr.io/hasura/mongodb-cli-plugin" +, mongodb-cli-plugin +, dockerTools +}: + +dockerTools.buildLayeredImage { + inherit name; + created = "now"; + config = { + Entrypoint = [ "${mongodb-cli-plugin}/bin/hasura-mongodb" ]; + }; +} From fb2adbe2ff12132744b56e3c27bc907651129485 Mon Sep 17 00:00:00 2001 From: David Overton Date: Thu, 14 Mar 2024 09:13:29 +1100 Subject: [PATCH 04/12] Finish implementing v2_schema_response_to_configuration --- Cargo.lock | 1 + crates/cli/src/lib.rs | 9 +- crates/mongodb-connector/Cargo.toml | 1 + .../src/api_type_conversions/configuration.rs | 85 ++++++++++++------- 4 files changed, 60 insertions(+), 36 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 9e30612e..c61788dc 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1556,6 +1556,7 @@ dependencies = [ "itertools 0.10.5", "mongodb", "mongodb-agent-common", + "mongodb-support", "ndc-sdk", "ndc-test-helpers", "pretty_assertions", diff --git a/crates/cli/src/lib.rs b/crates/cli/src/lib.rs index e40d88a3..f2ef85fc 100644 --- a/crates/cli/src/lib.rs +++ b/crates/cli/src/lib.rs @@ -8,7 +8,7 @@ use std::path::PathBuf; use clap::Subcommand; use mongodb_agent_common::{interface_types::MongoConfig, schema::get_schema}; -use configuration::Configuration; +use mongodb_connector::api_type_conversions::v2_schema_response_to_configuration; /// The command invoked by the user. #[derive(Debug, Clone, Subcommand)] @@ -40,7 +40,6 @@ pub async fn run(command: Command, context: &Context) -> anyhow::Result<()> { Ok(()) } - /// Initialize an empty directory with an empty connector configuration. /// /// An empty configuration contains default settings and options, and is expected to be filled with @@ -65,11 +64,11 @@ pub async fn run(command: Command, context: &Context) -> anyhow::Result<()> { /// /// This expects a configuration with a valid connection URI. async fn update(context: &Context) -> anyhow::Result<()> { - // let input_configuration: Configuration = configuration::read_directory(&context.path).await?; let schema = get_schema(&context.mongo_config).await?; - // let output_configuration = input_configuration; // XXX TODO: update configuration - // configuration::write_directory(&context.path, &output_configuration).await?; + let configuration = v2_schema_response_to_configuration(schema); + + configuration::write_directory(&context.path, &configuration).await?; Ok(()) } diff --git a/crates/mongodb-connector/Cargo.toml b/crates/mongodb-connector/Cargo.toml index 8c3b63cc..0632b67c 100644 --- a/crates/mongodb-connector/Cargo.toml +++ b/crates/mongodb-connector/Cargo.toml @@ -15,6 +15,7 @@ indexmap = { version = "2.1.0", features = ["serde"] } itertools = "^0.10" mongodb = "2.8" mongodb-agent-common = { path = "../mongodb-agent-common" } +mongodb-support = { path = "../mongodb-support" } ndc-sdk = { git = "https://github.com/hasura/ndc-hub.git" } prometheus = "*" # share version from ndc-sdk serde = { version = "1.0", features = ["derive"] } diff --git a/crates/mongodb-connector/src/api_type_conversions/configuration.rs b/crates/mongodb-connector/src/api_type_conversions/configuration.rs index 0b9e5705..80bf46a1 100644 --- a/crates/mongodb-connector/src/api_type_conversions/configuration.rs +++ b/crates/mongodb-connector/src/api_type_conversions/configuration.rs @@ -1,4 +1,7 @@ -use configuration::{metadata::{Collection, ObjectField, ObjectType}, Configuration}; +use configuration::{ + metadata::{Collection, ObjectField, ObjectType}, + Configuration, +}; pub fn v2_schema_response_to_configuration( response: dc_api_types::SchemaResponse, @@ -11,19 +14,15 @@ fn v2_schema_response_to_metadata( response: dc_api_types::SchemaResponse, ) -> configuration::Metadata { let table_object_types = response.tables.iter().map(table_to_object_type); - let nested_object_types = - response - .object_types + let nested_object_types = response.object_types.into_iter().map(|ot| ObjectType { + name: ot.name.to_string(), + description: ot.description, + fields: ot + .columns .into_iter() - .map(|ot| ObjectType { - name: ot.name.to_string(), - description: ot.description, - fields: ot - .columns - .into_iter() - .map(column_info_to_object_field) - .collect(), - }); + .map(column_info_to_object_field) + .collect(), + }); let object_types = table_object_types.chain(nested_object_types).collect(); let collections = response @@ -41,19 +40,41 @@ fn v2_schema_response_to_metadata( fn column_info_to_object_field(column_info: dc_api_types::ColumnInfo) -> ObjectField { let t = v2_to_v3_column_type(column_info.r#type); let is_nullable = column_info.nullable; - ObjectField { - name: column_info.name, - description: column_info.description.flatten(), - r#type: maybe_nullable(t, is_nullable), - } + ObjectField { + name: column_info.name, + description: column_info.description.flatten(), + r#type: maybe_nullable(t, is_nullable), + } } -fn maybe_nullable(t: configuration::metadata::Type, is_nullable: bool) -> configuration::metadata::Type { - todo!() +fn maybe_nullable( + t: configuration::metadata::Type, + is_nullable: bool, +) -> configuration::metadata::Type { + if is_nullable { + configuration::metadata::Type::Nullable(Box::new(t)) + } else { + t + } } -fn v2_to_v3_column_type(r#type: dc_api_types::ColumnType) -> configuration::metadata::Type { - todo!() +fn v2_to_v3_column_type(t: dc_api_types::ColumnType) -> configuration::metadata::Type { + match t { + dc_api_types::ColumnType::Scalar(name) => { + let bson_scalar_type = mongodb_support::BsonScalarType::from_bson_name(&name).unwrap(); // XXX TODO: handle error + configuration::metadata::Type::Scalar(bson_scalar_type) + } + dc_api_types::ColumnType::Object(name) => { + configuration::metadata::Type::Object(name.to_string()) + } + dc_api_types::ColumnType::Array { + element_type, + nullable, + } => configuration::metadata::Type::ArrayOf(Box::new(maybe_nullable( + v2_to_v3_column_type(*element_type), + nullable, + ))), + } } fn table_to_object_type(table: &dc_api_types::TableInfo) -> ObjectType { @@ -69,21 +90,23 @@ fn table_to_object_type(table: &dc_api_types::TableInfo) -> ObjectType { } } -fn collection_type_name_from_table_name(clone: Vec) -> String { - todo!() +fn collection_type_name_from_table_name(table_name: Vec) -> String { + name_from_qualified_name(table_name) } -fn table_to_collection( - table: dc_api_types::TableInfo, -) -> Collection { +fn table_to_collection(table: dc_api_types::TableInfo) -> Collection { let collection_type = collection_type_name_from_table_name(table.name.clone()); Collection { name: name_from_qualified_name(table.name), description: table.description.flatten(), - r#type: todo!(), - } + r#type: collection_type, + } } -fn name_from_qualified_name(name: Vec) -> String { - todo!() +// TODO: handle qualified names +pub fn name_from_qualified_name(qualified_name: Vec) -> String { + qualified_name + .into_iter() + .last() + .expect("qualified name vec is not empty") } From dc0774481b4786910e8e0b074663534aee0e776a Mon Sep 17 00:00:00 2001 From: David Overton Date: Thu, 14 Mar 2024 13:57:04 +1100 Subject: [PATCH 05/12] Move state module to common crate to avoid cli dependency on connector --- Cargo.lock | 1 - crates/cli/Cargo.toml | 1 - crates/cli/src/lib.rs | 8 +++++--- crates/cli/src/main.rs | 2 +- crates/configuration/src/read_directory.rs | 2 +- crates/mongodb-agent-common/src/lib.rs | 1 + .../src/state.rs | 2 +- crates/mongodb-connector/src/lib.rs | 1 - crates/mongodb-connector/src/main.rs | 1 - crates/mongodb-connector/src/mongo_connector.rs | 2 +- 10 files changed, 10 insertions(+), 11 deletions(-) rename crates/{mongodb-connector => mongodb-agent-common}/src/state.rs (90%) diff --git a/Cargo.lock b/Cargo.lock index c61788dc..69e0c073 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1533,7 +1533,6 @@ dependencies = [ "configuration", "mongodb", "mongodb-agent-common", - "mongodb-connector", "ndc-sdk", "serde_json", "thiserror", diff --git a/crates/cli/Cargo.toml b/crates/cli/Cargo.toml index 3389960f..0276c347 100644 --- a/crates/cli/Cargo.toml +++ b/crates/cli/Cargo.toml @@ -9,7 +9,6 @@ path = "./src/main.rs" [dependencies] configuration = { path = "../configuration" } -mongodb-connector = { path = "../mongodb-connector" } mongodb-agent-common = { path = "../mongodb-agent-common" } mongodb = "2.8" ndc-sdk = { git = "https://github.com/hasura/ndc-hub.git" } diff --git a/crates/cli/src/lib.rs b/crates/cli/src/lib.rs index f2ef85fc..915f1ade 100644 --- a/crates/cli/src/lib.rs +++ b/crates/cli/src/lib.rs @@ -3,12 +3,13 @@ //! The CLI can do a few things. This provides a central point where those things are routed and //! then done, making it easier to test this crate deterministically. + use std::path::PathBuf; use clap::Subcommand; +use configuration::Configuration; use mongodb_agent_common::{interface_types::MongoConfig, schema::get_schema}; -use mongodb_connector::api_type_conversions::v2_schema_response_to_configuration; /// The command invoked by the user. #[derive(Debug, Clone, Subcommand)] @@ -64,9 +65,10 @@ pub async fn run(command: Command, context: &Context) -> anyhow::Result<()> { /// /// This expects a configuration with a valid connection URI. async fn update(context: &Context) -> anyhow::Result<()> { - let schema = get_schema(&context.mongo_config).await?; + // TODO: Get metadata directly from DB introspection instead of going via v2 get_schema() + let _schema = get_schema(&context.mongo_config).await?; - let configuration = v2_schema_response_to_configuration(schema); + let configuration = Configuration::default(); // v2_schema_response_to_configuration(schema); configuration::write_directory(&context.path, &configuration).await?; diff --git a/crates/cli/src/main.rs b/crates/cli/src/main.rs index b129eaf0..81a79358 100644 --- a/crates/cli/src/main.rs +++ b/crates/cli/src/main.rs @@ -8,7 +8,7 @@ use std::env; use std::path::PathBuf; use clap::Parser; -use mongodb_connector::state::{try_init_state_from_uri, DATABASE_URI_ENV_VAR}; +use mongodb_agent_common::state::{try_init_state_from_uri, DATABASE_URI_ENV_VAR}; use mongodb_cli_plugin::{run, Command, Context}; /// The command-line arguments. diff --git a/crates/configuration/src/read_directory.rs b/crates/configuration/src/read_directory.rs index 0af1f8fa..87f8a100 100644 --- a/crates/configuration/src/read_directory.rs +++ b/crates/configuration/src/read_directory.rs @@ -8,7 +8,7 @@ use tokio::fs; use crate::Configuration; -pub const CONFIGURATION_FILENAME: &str = "configuration"; +pub const CONFIGURATION_FILENAME: &str = "metadata"; pub const CONFIGURATION_EXTENSIONS: [(&str, FileFormat); 3] = [("json", JSON), ("yaml", YAML), ("yml", YAML)]; pub const DEFAULT_EXTENSION: &str = "json"; diff --git a/crates/mongodb-agent-common/src/lib.rs b/crates/mongodb-agent-common/src/lib.rs index 57972fab..ab1585eb 100644 --- a/crates/mongodb-agent-common/src/lib.rs +++ b/crates/mongodb-agent-common/src/lib.rs @@ -8,3 +8,4 @@ pub mod mongodb_connection; pub mod query; pub mod scalar_types_capabilities; pub mod schema; +pub mod state; diff --git a/crates/mongodb-connector/src/state.rs b/crates/mongodb-agent-common/src/state.rs similarity index 90% rename from crates/mongodb-connector/src/state.rs rename to crates/mongodb-agent-common/src/state.rs index faca22c6..4ace391b 100644 --- a/crates/mongodb-connector/src/state.rs +++ b/crates/mongodb-agent-common/src/state.rs @@ -1,7 +1,7 @@ use std::{env, error::Error}; use anyhow::anyhow; -use mongodb_agent_common::{interface_types::MongoConfig, mongodb_connection::get_mongodb_client}; +use crate::{interface_types::MongoConfig, mongodb_connection::get_mongodb_client}; pub const DATABASE_URI_ENV_VAR: &str = "MONGODB_DATABASE_URI"; diff --git a/crates/mongodb-connector/src/lib.rs b/crates/mongodb-connector/src/lib.rs index dd960b3b..a9dc4eda 100644 --- a/crates/mongodb-connector/src/lib.rs +++ b/crates/mongodb-connector/src/lib.rs @@ -3,4 +3,3 @@ pub mod capabilities; pub mod error_mapping; pub mod mongo_connector; pub mod schema; -pub mod state; diff --git a/crates/mongodb-connector/src/main.rs b/crates/mongodb-connector/src/main.rs index d38f7ce1..26c46d0b 100644 --- a/crates/mongodb-connector/src/main.rs +++ b/crates/mongodb-connector/src/main.rs @@ -3,7 +3,6 @@ mod capabilities; mod error_mapping; mod mongo_connector; mod schema; -mod state; use std::error::Error; diff --git a/crates/mongodb-connector/src/mongo_connector.rs b/crates/mongodb-connector/src/mongo_connector.rs index 6a15e319..9edf4709 100644 --- a/crates/mongodb-connector/src/mongo_connector.rs +++ b/crates/mongodb-connector/src/mongo_connector.rs @@ -48,7 +48,7 @@ impl Connector for MongoConnector { _configuration: &Self::Configuration, _metrics: &mut prometheus::Registry, ) -> Result { - let state = crate::state::try_init_state().await?; + let state = mongodb_agent_common::state::try_init_state().await?; Ok(state) } From 632a40f1e303a8c8c58a44c427236a77da8615f7 Mon Sep 17 00:00:00 2001 From: David Overton Date: Thu, 14 Mar 2024 14:05:18 +1100 Subject: [PATCH 06/12] Remove ndc-sdk dependency from cli --- Cargo.lock | 1 - crates/cli/Cargo.toml | 1 - 2 files changed, 2 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 69e0c073..098d5494 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1533,7 +1533,6 @@ dependencies = [ "configuration", "mongodb", "mongodb-agent-common", - "ndc-sdk", "serde_json", "thiserror", "tokio", diff --git a/crates/cli/Cargo.toml b/crates/cli/Cargo.toml index 0276c347..28a75c79 100644 --- a/crates/cli/Cargo.toml +++ b/crates/cli/Cargo.toml @@ -11,7 +11,6 @@ path = "./src/main.rs" configuration = { path = "../configuration" } mongodb-agent-common = { path = "../mongodb-agent-common" } mongodb = "2.8" -ndc-sdk = { git = "https://github.com/hasura/ndc-hub.git" } anyhow = "1.0.80" clap = { version = "4.5.1", features = ["derive", "env"] } From 80c19d71c560a1a62e21d8a3bddbac416b5e5921 Mon Sep 17 00:00:00 2001 From: David Overton Date: Thu, 14 Mar 2024 17:44:52 +1100 Subject: [PATCH 07/12] Generate metadata directly instead of going via v2 schema response --- crates/cli/src/introspection.rs | 563 ++++++++++++++++++++++++++++++++ 1 file changed, 563 insertions(+) create mode 100644 crates/cli/src/introspection.rs diff --git a/crates/cli/src/introspection.rs b/crates/cli/src/introspection.rs new file mode 100644 index 00000000..1e53a834 --- /dev/null +++ b/crates/cli/src/introspection.rs @@ -0,0 +1,563 @@ +use configuration::{ + metadata::{Collection, ObjectField, ObjectType, Type}, + Metadata, +}; +use futures_util::{StreamExt, TryStreamExt}; +use indexmap::IndexMap; +use mongodb::bson::from_bson; +use mongodb::results::CollectionType; +use mongodb_support::{BsonScalarType, BsonType}; +use serde::Deserialize; + +use mongodb_agent_common::{ + interface_types::{MongoAgentError, MongoConfig}, + query::collection_name, +}; + +pub async fn get_metadata_from_validation_schema( + config: &MongoConfig, +) -> Result { + let db = config.client.database(&config.database); + let collections_cursor = db.list_collections(None, None).await?; + + let (object_types, collections) = collections_cursor + .into_stream() + .map( + |collection_spec| -> Result<(Vec, Collection), MongoAgentError> { + let collection_spec_value = collection_spec?; + let name = &collection_spec_value.name; + let collection_type = &collection_spec_value.collection_type; + let schema_bson_option = collection_spec_value + .options + .validator + .as_ref() + .and_then(|x| x.get("$jsonSchema")); + + let table_info = match schema_bson_option { + Some(schema_bson) => { + from_bson::(schema_bson.clone()).map_err(|err| { + MongoAgentError::BadCollectionSchema( + name.to_owned(), + schema_bson.clone(), + err, + ) + }) + } + None => Ok(ValidatorSchema { + bson_type: BsonType::Object, + description: None, + required: Vec::new(), + properties: IndexMap::new(), + }), + } + .map(|validator_schema| make_collection(name, collection_type, &validator_schema)); + table_info + }, + ) + .try_collect::<(Vec>, Vec)>() + .await?; + + Ok(Metadata { + collections, + object_types: object_types.concat(), + }) +} + +fn make_collection( + collection_name: &str, + collection_type: &CollectionType, + validator_schema: &ValidatorSchema, +) -> (Vec, Collection) { + let properties = &validator_schema.properties; + let required_labels = &validator_schema.required; + + let (mut object_type_defs, object_fields) = { + let type_prefix = format!("{collection_name}_"); + let id_field = ObjectField { + name: "_id".to_string(), + description: Some("primary key _id".to_string()), + r#type: Type::Scalar(BsonScalarType::ObjectId), + }; + let (object_type_defs, mut object_fields): (Vec>, Vec) = + properties + .iter() + .map(|prop| make_object_field(&type_prefix, required_labels, prop)) + .unzip(); + if !object_fields.iter().any(|info| info.name == "_id") { + // There should always be an _id field, so add it unless it was already specified in + // the validator. + object_fields.push(id_field); + } + (object_type_defs.concat(), object_fields) + }; + + let collection_type = ObjectType { + name: collection_name.to_string(), + description: Some(format!("Object type for collection {collection_name}")), + fields: object_fields, + }; + + object_type_defs.push(collection_type); + + let collection_info = Collection { + name: collection_name.to_string(), + description: validator_schema.description.clone(), + r#type: collection_name.to_string(), + }; + + (object_type_defs, collection_info) +} + +fn make_object_field( + type_prefix: &str, + required_labels: &[String], + (prop_name, prop_schema): (&String, &Property), +) -> (Vec, ObjectField) { + let description = get_property_description(prop_schema); + + let object_type_name = format!("{type_prefix}{prop_name}"); + let (collected_otds, field_type) = make_field_type(&object_type_name, prop_schema); + + let object_field = ObjectField { + name: prop_name.clone(), + description: description, + r#type: maybe_nullable(field_type, !required_labels.contains(prop_name)), + }; + + (collected_otds, object_field) +} + +fn maybe_nullable( + t: configuration::metadata::Type, + is_nullable: bool, +) -> configuration::metadata::Type { + if is_nullable { + configuration::metadata::Type::Nullable(Box::new(t)) + } else { + t + } +} + +fn make_field_type(object_type_name: &str, prop_schema: &Property) -> (Vec, Type) { + let mut collected_otds: Vec = vec![]; + + match prop_schema { + Property::Object { + bson_type: _, + description: _, + required, + properties, + } => { + let type_prefix = format!("{object_type_name}_"); + let (otds, otd_fields): (Vec>, Vec) = properties + .iter() + .map(|prop| make_object_field(&type_prefix, required, prop)) + .unzip(); + + let object_type_definition = ObjectType { + name: object_type_name.to_string(), + description: Some("generated from MongoDB validation schema".to_string()), + fields: otd_fields, + }; + + collected_otds.append(&mut otds.concat()); + collected_otds.push(object_type_definition); + + (collected_otds, Type::Object(object_type_name.to_string())) + } + Property::Array { + bson_type: _, + description: _, + items, + } => { + let item_schemas = *items.clone(); + + let (mut otds, element_type) = make_field_type(object_type_name, &item_schemas); + let column_type = Type::ArrayOf(Box::new(element_type)); + + collected_otds.append(&mut otds); + + (collected_otds, column_type) + } + Property::Scalar { + bson_type, + description: _, + } => (collected_otds, Type::Scalar(bson_type.to_owned())), + } +} + +#[derive(Debug, Deserialize)] +#[cfg_attr(test, derive(PartialEq))] +struct ValidatorSchema { + #[serde(rename = "bsonType", alias = "type", default = "default_bson_type")] + #[allow(dead_code)] + bson_type: BsonType, + #[serde(skip_serializing_if = "Option::is_none")] + description: Option, + #[serde(default)] + required: Vec, + #[serde(default)] + properties: IndexMap, +} + +#[derive(Clone, Debug, Deserialize)] +#[cfg_attr(test, derive(PartialEq))] +#[serde(untagged)] +enum Property { + Object { + #[serde(rename = "bsonType", default = "default_bson_type")] + #[allow(dead_code)] + bson_type: BsonType, + #[serde(skip_serializing_if = "Option::is_none")] + description: Option, + #[serde(skip_serializing_if = "Vec::is_empty", default)] + required: Vec, + properties: IndexMap, + }, + Array { + #[serde(rename = "bsonType", default = "default_bson_type")] + #[allow(dead_code)] + bson_type: BsonType, + #[serde(skip_serializing_if = "Option::is_none")] + description: Option, + items: Box, + }, + Scalar { + #[serde(rename = "bsonType", default = "default_bson_scalar_type")] + bson_type: BsonScalarType, + #[serde(skip_serializing_if = "Option::is_none")] + description: Option, + }, +} + +fn get_property_description(p: &Property) -> Option { + match p { + Property::Object { + bson_type: _, + description, + required: _, + properties: _, + } => description.clone(), + Property::Array { + bson_type: _, + description, + items: _, + } => description.clone(), + Property::Scalar { + bson_type: _, + description, + } => description.clone(), + } +} + +fn default_bson_scalar_type() -> BsonScalarType { + BsonScalarType::Undefined +} + +fn default_bson_type() -> BsonType { + BsonType::Scalar(default_bson_scalar_type()) +} + +#[cfg(test)] +mod test { + use indexmap::IndexMap; + use mongodb::bson::{bson, from_bson}; + + use mongodb_support::{BsonScalarType, BsonType}; + + use super::{Property, ValidatorSchema}; + + #[test] + fn parses_scalar_property() -> Result<(), anyhow::Error> { + let input = bson!({ + "bsonType": "string", + "description": "'title' must be a string and is required" + }); + + assert_eq!( + from_bson::(input)?, + Property::Scalar { + bson_type: BsonScalarType::String, + description: Some("'title' must be a string and is required".to_owned()) + } + ); + + Ok(()) + } + + #[test] + fn parses_object_property() -> Result<(), anyhow::Error> { + let input = bson!({ + "bsonType": "object", + "description": "Name of places", + "required": [ "name", "description" ], + "properties": { + "name": { + "bsonType": "string", + "description": "'name' must be a string and is required" + }, + "description": { + "bsonType": "string", + "description": "'description' must be a string and is required" + } + } + }); + + assert_eq!( + from_bson::(input)?, + Property::Object { + bson_type: BsonType::Object, + description: Some("Name of places".to_owned()), + required: vec!["name".to_owned(), "description".to_owned()], + properties: IndexMap::from([ + ( + "name".to_owned(), + Property::Scalar { + bson_type: BsonScalarType::String, + description: Some("'name' must be a string and is required".to_owned()) + } + ), + ( + "description".to_owned(), + Property::Scalar { + bson_type: BsonScalarType::String, + description: Some( + "'description' must be a string and is required".to_owned() + ) + } + ) + ]) + } + ); + + Ok(()) + } + + #[test] + fn parses_array_property() -> Result<(), anyhow::Error> { + let input = bson!({ + "bsonType": "array", + "description": "Location must be an array of objects", + "uniqueItems": true, + "items": { + "bsonType": "object", + "required": [ "name", "size" ], + "properties": { "name": { "bsonType": "string" }, "size": { "bsonType": "number" } } + } + }); + + assert_eq!( + from_bson::(input)?, + Property::Array { + bson_type: BsonType::Array, + description: Some("Location must be an array of objects".to_owned()), + items: Box::new(Property::Object { + bson_type: BsonType::Object, + description: None, + required: vec!["name".to_owned(), "size".to_owned()], + properties: IndexMap::from([ + ( + "name".to_owned(), + Property::Scalar { + bson_type: BsonScalarType::String, + description: None + } + ), + ( + "size".to_owned(), + Property::Scalar { + bson_type: BsonScalarType::Double, + description: None + } + ) + ]) + }), + } + ); + + Ok(()) + } + + #[test] + fn parses_validator_with_alias_field_name() -> Result<(), anyhow::Error> { + let input = bson!({ + "bsonType": "object", + "properties": { + "count": { + "bsonType": "number", + }, + }, + "required": ["count"], + }); + + assert_eq!( + from_bson::(input)?, + ValidatorSchema { + bson_type: BsonType::Object, + description: None, + required: vec!["count".to_owned()], + properties: IndexMap::from([( + "count".to_owned(), + Property::Scalar { + bson_type: BsonScalarType::Double, + description: None, + } + )]) + } + ); + Ok(()) + } + + #[test] + fn parses_validator_property_as_object() -> Result<(), anyhow::Error> { + let input = bson!({ + "bsonType": "object", + "properties": { + "counts": { + "bsonType": "object", + "properties": { + "xs": { "bsonType": "number" }, + "os": { "bsonType": "number" }, + }, + "required": ["xs"], + }, + }, + "required": ["counts"], + }); + + assert_eq!( + from_bson::(input)?, + ValidatorSchema { + bson_type: BsonType::Object, + description: None, + required: vec!["counts".to_owned()], + properties: IndexMap::from([( + "counts".to_owned(), + Property::Object { + bson_type: BsonType::Object, + description: None, + required: vec!["xs".to_owned()], + properties: IndexMap::from([ + ( + "xs".to_owned(), + Property::Scalar { + bson_type: BsonScalarType::Double, + description: None + } + ), + ( + "os".to_owned(), + Property::Scalar { + bson_type: BsonScalarType::Double, + description: None + } + ), + ]) + } + )]) + } + ); + Ok(()) + } + + /// This validator is from a test collection that the frontend team uses. + /// https://github.com/hasura/graphql-engine-mono/blob/main/frontend/docker/DataSources/mongo/init.js + #[test] + fn parses_frontend_team_test_validator_students() -> Result<(), anyhow::Error> { + let input = bson!({ + "bsonType": "object", + "title": "Student Object Validation", + "required": ["address", "gpa", "name", "year"], + "properties": { + "name": { + "bsonType": "string", + "description": "\"name\" must be a string and is required" + }, + "year": { + "bsonType": "int", + "minimum": 2017, + "maximum": 3017, + "description": "\"year\" must be an integer in [ 2017, 3017 ] and is required" + }, + "gpa": { + "bsonType": ["double"], + "description": "\"gpa\" must be a double if the field exists" + }, + "address": { + "bsonType": ["object"], + "properties": { + "city": { "bsonType": "string" }, + "street": { "bsonType": "string" } + }, + }, + }, + } + ); + assert_eq!( + from_bson::(input)?, + ValidatorSchema { + bson_type: BsonType::Object, + description: None, + required: ["address", "gpa", "name", "year"] + .into_iter() + .map(|s| s.to_owned()) + .collect(), + properties: IndexMap::from([ + ( + "name".to_owned(), + Property::Scalar { + bson_type: BsonScalarType::String, + description: Some( + "\"name\" must be a string and is required".to_owned() + ), + } + ), + ( + "year".to_owned(), + Property::Scalar { + bson_type: BsonScalarType::Int, + description: Some( + "\"year\" must be an integer in [ 2017, 3017 ] and is required" + .to_owned() + ), + } + ), + ( + "gpa".to_owned(), + Property::Scalar { + bson_type: BsonScalarType::Double, + description: Some( + "\"gpa\" must be a double if the field exists".to_owned() + ), + } + ), + ( + "address".to_owned(), + Property::Object { + bson_type: BsonType::Object, + description: None, + required: vec![], + properties: IndexMap::from([ + ( + "city".to_owned(), + Property::Scalar { + bson_type: BsonScalarType::String, + description: None, + } + ), + ( + "street".to_owned(), + Property::Scalar { + bson_type: BsonScalarType::String, + description: None, + } + ) + ]) + } + ) + ]), + } + ); + Ok(()) + } +} From 5d25128f99c1493bd6cfdc39715b248de7fd0955 Mon Sep 17 00:00:00 2001 From: David Overton Date: Thu, 14 Mar 2024 17:46:36 +1100 Subject: [PATCH 08/12] REmove configuration type conversions --- Cargo.lock | 4 + crates/cli/Cargo.toml | 4 + crates/cli/src/lib.rs | 8 +- .../src/api_type_conversions/configuration.rs | 112 ------------------ .../src/api_type_conversions/mod.rs | 1 - 5 files changed, 12 insertions(+), 117 deletions(-) delete mode 100644 crates/mongodb-connector/src/api_type_conversions/configuration.rs diff --git a/Cargo.lock b/Cargo.lock index 098d5494..6cb0c498 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1531,8 +1531,12 @@ dependencies = [ "anyhow", "clap", "configuration", + "futures-util", + "indexmap 1.9.3", "mongodb", "mongodb-agent-common", + "mongodb-support", + "serde", "serde_json", "thiserror", "tokio", diff --git a/crates/cli/Cargo.toml b/crates/cli/Cargo.toml index 28a75c79..a4564c46 100644 --- a/crates/cli/Cargo.toml +++ b/crates/cli/Cargo.toml @@ -11,9 +11,13 @@ path = "./src/main.rs" configuration = { path = "../configuration" } mongodb-agent-common = { path = "../mongodb-agent-common" } mongodb = "2.8" +mongodb-support = { path = "../mongodb-support" } anyhow = "1.0.80" clap = { version = "4.5.1", features = ["derive", "env"] } +futures-util = "0.3.28" +indexmap = { version = "1", features = ["serde"] } # must match the version that ndc-client uses +serde = { version = "1.0", features = ["derive"] } serde_json = { version = "1.0.113", features = ["raw_value"] } thiserror = "1.0.57" tokio = { version = "1.36.0", features = ["full"] } diff --git a/crates/cli/src/lib.rs b/crates/cli/src/lib.rs index 915f1ade..161768c6 100644 --- a/crates/cli/src/lib.rs +++ b/crates/cli/src/lib.rs @@ -4,6 +4,8 @@ //! then done, making it easier to test this crate deterministically. +mod introspection; + use std::path::PathBuf; use clap::Subcommand; @@ -65,10 +67,8 @@ pub async fn run(command: Command, context: &Context) -> anyhow::Result<()> { /// /// This expects a configuration with a valid connection URI. async fn update(context: &Context) -> anyhow::Result<()> { - // TODO: Get metadata directly from DB introspection instead of going via v2 get_schema() - let _schema = get_schema(&context.mongo_config).await?; - - let configuration = Configuration::default(); // v2_schema_response_to_configuration(schema); + let metadata = introspection::get_metadata_from_validation_schema(&context.mongo_config).await?; + let configuration = Configuration { metadata }; configuration::write_directory(&context.path, &configuration).await?; diff --git a/crates/mongodb-connector/src/api_type_conversions/configuration.rs b/crates/mongodb-connector/src/api_type_conversions/configuration.rs deleted file mode 100644 index 80bf46a1..00000000 --- a/crates/mongodb-connector/src/api_type_conversions/configuration.rs +++ /dev/null @@ -1,112 +0,0 @@ -use configuration::{ - metadata::{Collection, ObjectField, ObjectType}, - Configuration, -}; - -pub fn v2_schema_response_to_configuration( - response: dc_api_types::SchemaResponse, -) -> Configuration { - let metadata = v2_schema_response_to_metadata(response); - Configuration { metadata } -} - -fn v2_schema_response_to_metadata( - response: dc_api_types::SchemaResponse, -) -> configuration::Metadata { - let table_object_types = response.tables.iter().map(table_to_object_type); - let nested_object_types = response.object_types.into_iter().map(|ot| ObjectType { - name: ot.name.to_string(), - description: ot.description, - fields: ot - .columns - .into_iter() - .map(column_info_to_object_field) - .collect(), - }); - let object_types = table_object_types.chain(nested_object_types).collect(); - - let collections = response - .tables - .into_iter() - .map(|table| table_to_collection(table)) - .collect(); - - configuration::Metadata { - collections, - object_types, - } -} - -fn column_info_to_object_field(column_info: dc_api_types::ColumnInfo) -> ObjectField { - let t = v2_to_v3_column_type(column_info.r#type); - let is_nullable = column_info.nullable; - ObjectField { - name: column_info.name, - description: column_info.description.flatten(), - r#type: maybe_nullable(t, is_nullable), - } -} - -fn maybe_nullable( - t: configuration::metadata::Type, - is_nullable: bool, -) -> configuration::metadata::Type { - if is_nullable { - configuration::metadata::Type::Nullable(Box::new(t)) - } else { - t - } -} - -fn v2_to_v3_column_type(t: dc_api_types::ColumnType) -> configuration::metadata::Type { - match t { - dc_api_types::ColumnType::Scalar(name) => { - let bson_scalar_type = mongodb_support::BsonScalarType::from_bson_name(&name).unwrap(); // XXX TODO: handle error - configuration::metadata::Type::Scalar(bson_scalar_type) - } - dc_api_types::ColumnType::Object(name) => { - configuration::metadata::Type::Object(name.to_string()) - } - dc_api_types::ColumnType::Array { - element_type, - nullable, - } => configuration::metadata::Type::ArrayOf(Box::new(maybe_nullable( - v2_to_v3_column_type(*element_type), - nullable, - ))), - } -} - -fn table_to_object_type(table: &dc_api_types::TableInfo) -> ObjectType { - let fields = table - .columns - .iter() - .map(|column_info| column_info_to_object_field(column_info.clone())) - .collect(); - ObjectType { - name: collection_type_name_from_table_name(table.name.clone()), - description: table.description.clone().flatten(), - fields, - } -} - -fn collection_type_name_from_table_name(table_name: Vec) -> String { - name_from_qualified_name(table_name) -} - -fn table_to_collection(table: dc_api_types::TableInfo) -> Collection { - let collection_type = collection_type_name_from_table_name(table.name.clone()); - Collection { - name: name_from_qualified_name(table.name), - description: table.description.flatten(), - r#type: collection_type, - } -} - -// TODO: handle qualified names -pub fn name_from_qualified_name(qualified_name: Vec) -> String { - qualified_name - .into_iter() - .last() - .expect("qualified name vec is not empty") -} diff --git a/crates/mongodb-connector/src/api_type_conversions/mod.rs b/crates/mongodb-connector/src/api_type_conversions/mod.rs index 934324ae..6a576036 100644 --- a/crates/mongodb-connector/src/api_type_conversions/mod.rs +++ b/crates/mongodb-connector/src/api_type_conversions/mod.rs @@ -10,7 +10,6 @@ mod query_traversal; #[allow(unused_imports)] pub use self::{ capabilities::v2_to_v3_scalar_type_capabilities, - configuration::v2_schema_response_to_configuration, conversion_error::ConversionError, json_response::map_unserialized, query_request::{v3_to_v2_query_request, QueryContext}, From f6e54380f460ddd700c285468b7bbf18582e0c5a Mon Sep 17 00:00:00 2001 From: David Overton Date: Thu, 14 Mar 2024 17:55:20 +1100 Subject: [PATCH 09/12] Use ValidatorSchema from common lib --- crates/cli/src/introspection.rs | 390 +----------------- crates/cli/src/lib.rs | 2 +- crates/mongodb-agent-common/src/schema.rs | 14 +- .../src/api_type_conversions/mod.rs | 1 - 4 files changed, 12 insertions(+), 395 deletions(-) diff --git a/crates/cli/src/introspection.rs b/crates/cli/src/introspection.rs index 1e53a834..04f87e89 100644 --- a/crates/cli/src/introspection.rs +++ b/crates/cli/src/introspection.rs @@ -5,14 +5,10 @@ use configuration::{ use futures_util::{StreamExt, TryStreamExt}; use indexmap::IndexMap; use mongodb::bson::from_bson; -use mongodb::results::CollectionType; use mongodb_support::{BsonScalarType, BsonType}; -use serde::Deserialize; +use mongodb_agent_common::schema::{get_property_description, Property, ValidatorSchema}; -use mongodb_agent_common::{ - interface_types::{MongoAgentError, MongoConfig}, - query::collection_name, -}; +use mongodb_agent_common::interface_types::{MongoAgentError, MongoConfig}; pub async fn get_metadata_from_validation_schema( config: &MongoConfig, @@ -26,7 +22,6 @@ pub async fn get_metadata_from_validation_schema( |collection_spec| -> Result<(Vec, Collection), MongoAgentError> { let collection_spec_value = collection_spec?; let name = &collection_spec_value.name; - let collection_type = &collection_spec_value.collection_type; let schema_bson_option = collection_spec_value .options .validator @@ -50,7 +45,7 @@ pub async fn get_metadata_from_validation_schema( properties: IndexMap::new(), }), } - .map(|validator_schema| make_collection(name, collection_type, &validator_schema)); + .map(|validator_schema| make_collection(name, &validator_schema)); table_info }, ) @@ -65,7 +60,6 @@ pub async fn get_metadata_from_validation_schema( fn make_collection( collection_name: &str, - collection_type: &CollectionType, validator_schema: &ValidatorSchema, ) -> (Vec, Collection) { let properties = &validator_schema.properties; @@ -184,380 +178,4 @@ fn make_field_type(object_type_name: &str, prop_schema: &Property) -> (Vec (collected_otds, Type::Scalar(bson_type.to_owned())), } -} - -#[derive(Debug, Deserialize)] -#[cfg_attr(test, derive(PartialEq))] -struct ValidatorSchema { - #[serde(rename = "bsonType", alias = "type", default = "default_bson_type")] - #[allow(dead_code)] - bson_type: BsonType, - #[serde(skip_serializing_if = "Option::is_none")] - description: Option, - #[serde(default)] - required: Vec, - #[serde(default)] - properties: IndexMap, -} - -#[derive(Clone, Debug, Deserialize)] -#[cfg_attr(test, derive(PartialEq))] -#[serde(untagged)] -enum Property { - Object { - #[serde(rename = "bsonType", default = "default_bson_type")] - #[allow(dead_code)] - bson_type: BsonType, - #[serde(skip_serializing_if = "Option::is_none")] - description: Option, - #[serde(skip_serializing_if = "Vec::is_empty", default)] - required: Vec, - properties: IndexMap, - }, - Array { - #[serde(rename = "bsonType", default = "default_bson_type")] - #[allow(dead_code)] - bson_type: BsonType, - #[serde(skip_serializing_if = "Option::is_none")] - description: Option, - items: Box, - }, - Scalar { - #[serde(rename = "bsonType", default = "default_bson_scalar_type")] - bson_type: BsonScalarType, - #[serde(skip_serializing_if = "Option::is_none")] - description: Option, - }, -} - -fn get_property_description(p: &Property) -> Option { - match p { - Property::Object { - bson_type: _, - description, - required: _, - properties: _, - } => description.clone(), - Property::Array { - bson_type: _, - description, - items: _, - } => description.clone(), - Property::Scalar { - bson_type: _, - description, - } => description.clone(), - } -} - -fn default_bson_scalar_type() -> BsonScalarType { - BsonScalarType::Undefined -} - -fn default_bson_type() -> BsonType { - BsonType::Scalar(default_bson_scalar_type()) -} - -#[cfg(test)] -mod test { - use indexmap::IndexMap; - use mongodb::bson::{bson, from_bson}; - - use mongodb_support::{BsonScalarType, BsonType}; - - use super::{Property, ValidatorSchema}; - - #[test] - fn parses_scalar_property() -> Result<(), anyhow::Error> { - let input = bson!({ - "bsonType": "string", - "description": "'title' must be a string and is required" - }); - - assert_eq!( - from_bson::(input)?, - Property::Scalar { - bson_type: BsonScalarType::String, - description: Some("'title' must be a string and is required".to_owned()) - } - ); - - Ok(()) - } - - #[test] - fn parses_object_property() -> Result<(), anyhow::Error> { - let input = bson!({ - "bsonType": "object", - "description": "Name of places", - "required": [ "name", "description" ], - "properties": { - "name": { - "bsonType": "string", - "description": "'name' must be a string and is required" - }, - "description": { - "bsonType": "string", - "description": "'description' must be a string and is required" - } - } - }); - - assert_eq!( - from_bson::(input)?, - Property::Object { - bson_type: BsonType::Object, - description: Some("Name of places".to_owned()), - required: vec!["name".to_owned(), "description".to_owned()], - properties: IndexMap::from([ - ( - "name".to_owned(), - Property::Scalar { - bson_type: BsonScalarType::String, - description: Some("'name' must be a string and is required".to_owned()) - } - ), - ( - "description".to_owned(), - Property::Scalar { - bson_type: BsonScalarType::String, - description: Some( - "'description' must be a string and is required".to_owned() - ) - } - ) - ]) - } - ); - - Ok(()) - } - - #[test] - fn parses_array_property() -> Result<(), anyhow::Error> { - let input = bson!({ - "bsonType": "array", - "description": "Location must be an array of objects", - "uniqueItems": true, - "items": { - "bsonType": "object", - "required": [ "name", "size" ], - "properties": { "name": { "bsonType": "string" }, "size": { "bsonType": "number" } } - } - }); - - assert_eq!( - from_bson::(input)?, - Property::Array { - bson_type: BsonType::Array, - description: Some("Location must be an array of objects".to_owned()), - items: Box::new(Property::Object { - bson_type: BsonType::Object, - description: None, - required: vec!["name".to_owned(), "size".to_owned()], - properties: IndexMap::from([ - ( - "name".to_owned(), - Property::Scalar { - bson_type: BsonScalarType::String, - description: None - } - ), - ( - "size".to_owned(), - Property::Scalar { - bson_type: BsonScalarType::Double, - description: None - } - ) - ]) - }), - } - ); - - Ok(()) - } - - #[test] - fn parses_validator_with_alias_field_name() -> Result<(), anyhow::Error> { - let input = bson!({ - "bsonType": "object", - "properties": { - "count": { - "bsonType": "number", - }, - }, - "required": ["count"], - }); - - assert_eq!( - from_bson::(input)?, - ValidatorSchema { - bson_type: BsonType::Object, - description: None, - required: vec!["count".to_owned()], - properties: IndexMap::from([( - "count".to_owned(), - Property::Scalar { - bson_type: BsonScalarType::Double, - description: None, - } - )]) - } - ); - Ok(()) - } - - #[test] - fn parses_validator_property_as_object() -> Result<(), anyhow::Error> { - let input = bson!({ - "bsonType": "object", - "properties": { - "counts": { - "bsonType": "object", - "properties": { - "xs": { "bsonType": "number" }, - "os": { "bsonType": "number" }, - }, - "required": ["xs"], - }, - }, - "required": ["counts"], - }); - - assert_eq!( - from_bson::(input)?, - ValidatorSchema { - bson_type: BsonType::Object, - description: None, - required: vec!["counts".to_owned()], - properties: IndexMap::from([( - "counts".to_owned(), - Property::Object { - bson_type: BsonType::Object, - description: None, - required: vec!["xs".to_owned()], - properties: IndexMap::from([ - ( - "xs".to_owned(), - Property::Scalar { - bson_type: BsonScalarType::Double, - description: None - } - ), - ( - "os".to_owned(), - Property::Scalar { - bson_type: BsonScalarType::Double, - description: None - } - ), - ]) - } - )]) - } - ); - Ok(()) - } - - /// This validator is from a test collection that the frontend team uses. - /// https://github.com/hasura/graphql-engine-mono/blob/main/frontend/docker/DataSources/mongo/init.js - #[test] - fn parses_frontend_team_test_validator_students() -> Result<(), anyhow::Error> { - let input = bson!({ - "bsonType": "object", - "title": "Student Object Validation", - "required": ["address", "gpa", "name", "year"], - "properties": { - "name": { - "bsonType": "string", - "description": "\"name\" must be a string and is required" - }, - "year": { - "bsonType": "int", - "minimum": 2017, - "maximum": 3017, - "description": "\"year\" must be an integer in [ 2017, 3017 ] and is required" - }, - "gpa": { - "bsonType": ["double"], - "description": "\"gpa\" must be a double if the field exists" - }, - "address": { - "bsonType": ["object"], - "properties": { - "city": { "bsonType": "string" }, - "street": { "bsonType": "string" } - }, - }, - }, - } - ); - assert_eq!( - from_bson::(input)?, - ValidatorSchema { - bson_type: BsonType::Object, - description: None, - required: ["address", "gpa", "name", "year"] - .into_iter() - .map(|s| s.to_owned()) - .collect(), - properties: IndexMap::from([ - ( - "name".to_owned(), - Property::Scalar { - bson_type: BsonScalarType::String, - description: Some( - "\"name\" must be a string and is required".to_owned() - ), - } - ), - ( - "year".to_owned(), - Property::Scalar { - bson_type: BsonScalarType::Int, - description: Some( - "\"year\" must be an integer in [ 2017, 3017 ] and is required" - .to_owned() - ), - } - ), - ( - "gpa".to_owned(), - Property::Scalar { - bson_type: BsonScalarType::Double, - description: Some( - "\"gpa\" must be a double if the field exists".to_owned() - ), - } - ), - ( - "address".to_owned(), - Property::Object { - bson_type: BsonType::Object, - description: None, - required: vec![], - properties: IndexMap::from([ - ( - "city".to_owned(), - Property::Scalar { - bson_type: BsonScalarType::String, - description: None, - } - ), - ( - "street".to_owned(), - Property::Scalar { - bson_type: BsonScalarType::String, - description: None, - } - ) - ]) - } - ) - ]), - } - ); - Ok(()) - } -} +} \ No newline at end of file diff --git a/crates/cli/src/lib.rs b/crates/cli/src/lib.rs index 161768c6..301bca91 100644 --- a/crates/cli/src/lib.rs +++ b/crates/cli/src/lib.rs @@ -11,7 +11,7 @@ use std::path::PathBuf; use clap::Subcommand; use configuration::Configuration; -use mongodb_agent_common::{interface_types::MongoConfig, schema::get_schema}; +use mongodb_agent_common::interface_types::MongoConfig; /// The command invoked by the user. #[derive(Debug, Clone, Subcommand)] diff --git a/crates/mongodb-agent-common/src/schema.rs b/crates/mongodb-agent-common/src/schema.rs index 790d691d..a1acd963 100644 --- a/crates/mongodb-agent-common/src/schema.rs +++ b/crates/mongodb-agent-common/src/schema.rs @@ -202,22 +202,22 @@ fn make_column_type( #[derive(Debug, Deserialize)] #[cfg_attr(test, derive(PartialEq))] -struct ValidatorSchema { +pub struct ValidatorSchema { #[serde(rename = "bsonType", alias = "type", default = "default_bson_type")] #[allow(dead_code)] - bson_type: BsonType, + pub bson_type: BsonType, #[serde(skip_serializing_if = "Option::is_none")] - description: Option, + pub description: Option, #[serde(default)] - required: Vec, + pub required: Vec, #[serde(default)] - properties: IndexMap, + pub properties: IndexMap, } #[derive(Clone, Debug, Deserialize)] #[cfg_attr(test, derive(PartialEq))] #[serde(untagged)] -enum Property { +pub enum Property { Object { #[serde(rename = "bsonType", default = "default_bson_type")] #[allow(dead_code)] @@ -244,7 +244,7 @@ enum Property { }, } -fn get_property_description(p: &Property) -> Option { +pub fn get_property_description(p: &Property) -> Option { match p { Property::Object { bson_type: _, diff --git a/crates/mongodb-connector/src/api_type_conversions/mod.rs b/crates/mongodb-connector/src/api_type_conversions/mod.rs index 6a576036..deb1d029 100644 --- a/crates/mongodb-connector/src/api_type_conversions/mod.rs +++ b/crates/mongodb-connector/src/api_type_conversions/mod.rs @@ -1,5 +1,4 @@ mod capabilities; -mod configuration; mod conversion_error; mod helpers; mod json_response; From 7f5337b27de179e8892f78202025297c90d7b4ce Mon Sep 17 00:00:00 2001 From: David Overton Date: Fri, 15 Mar 2024 12:10:25 +1100 Subject: [PATCH 10/12] Clippy suggestion --- crates/cli/src/introspection.rs | 15 ++++---- crates/cli/src/lib.rs | 36 ------------------- crates/cli/src/main.rs | 2 +- .../src/{read_directory.rs => directory.rs} | 0 crates/configuration/src/lib.rs | 6 ++-- 5 files changed, 11 insertions(+), 48 deletions(-) rename crates/configuration/src/{read_directory.rs => directory.rs} (100%) diff --git a/crates/cli/src/introspection.rs b/crates/cli/src/introspection.rs index 04f87e89..7a8a60d4 100644 --- a/crates/cli/src/introspection.rs +++ b/crates/cli/src/introspection.rs @@ -5,8 +5,8 @@ use configuration::{ use futures_util::{StreamExt, TryStreamExt}; use indexmap::IndexMap; use mongodb::bson::from_bson; -use mongodb_support::{BsonScalarType, BsonType}; use mongodb_agent_common::schema::{get_property_description, Property, ValidatorSchema}; +use mongodb_support::{BsonScalarType, BsonType}; use mongodb_agent_common::interface_types::{MongoAgentError, MongoConfig}; @@ -28,7 +28,7 @@ pub async fn get_metadata_from_validation_schema( .as_ref() .and_then(|x| x.get("$jsonSchema")); - let table_info = match schema_bson_option { + match schema_bson_option { Some(schema_bson) => { from_bson::(schema_bson.clone()).map_err(|err| { MongoAgentError::BadCollectionSchema( @@ -45,8 +45,7 @@ pub async fn get_metadata_from_validation_schema( properties: IndexMap::new(), }), } - .map(|validator_schema| make_collection(name, &validator_schema)); - table_info + .map(|validator_schema| make_collection(name, &validator_schema)) }, ) .try_collect::<(Vec>, Vec)>() @@ -114,7 +113,7 @@ fn make_object_field( let object_field = ObjectField { name: prop_name.clone(), - description: description, + description, r#type: maybe_nullable(field_type, !required_labels.contains(prop_name)), }; @@ -167,15 +166,15 @@ fn make_field_type(object_type_name: &str, prop_schema: &Property) -> (Vec (collected_otds, Type::Scalar(bson_type.to_owned())), } -} \ No newline at end of file +} diff --git a/crates/cli/src/lib.rs b/crates/cli/src/lib.rs index 301bca91..40cc2697 100644 --- a/crates/cli/src/lib.rs +++ b/crates/cli/src/lib.rs @@ -1,8 +1,4 @@ //! The interpretation of the commands that the CLI can handle. -//! -//! The CLI can do a few things. This provides a central point where those things are routed and -//! then done, making it easier to test this crate deterministically. - mod introspection; @@ -16,19 +12,10 @@ use mongodb_agent_common::interface_types::MongoConfig; /// The command invoked by the user. #[derive(Debug, Clone, Subcommand)] pub enum Command { - /// Initialize a configuration in the current (empty) directory. - // Initialize, /// Update the configuration by introspecting the database, using the configuration options. Update, } -/// The set of errors that can go wrong _in addition to_ generic I/O or parsing errors. -#[derive(Debug, PartialEq, thiserror::Error)] -pub enum Error { - #[error("directory is not empty")] - DirectoryIsNotEmpty, -} - pub struct Context { pub path: PathBuf, pub mongo_config: MongoConfig, @@ -37,35 +24,12 @@ pub struct Context { /// Run a command in a given directory. pub async fn run(command: Command, context: &Context) -> anyhow::Result<()> { match command { - // Command::Initialize => initialize(context_path)?, Command::Update => update(context).await?, }; Ok(()) } -/// Initialize an empty directory with an empty connector configuration. -/// -/// An empty configuration contains default settings and options, and is expected to be filled with -/// information such as the database connection string by the user, and later on metadata -/// information via introspection. -// fn initialize(context_path: &Path) -> anyhow::Result<()> { -// let configuration_file = context_path.join(CONFIGURATION_FILENAME); -// fs::create_dir_all(context_path)?; - -// // refuse to initialize the directory unless it is empty -// let mut items_in_dir = fs::read_dir(context_path)?; -// if items_in_dir.next().is_some() { -// Err(Error::DirectoryIsNotEmpty)?; -// } - -// let _writer = fs::File::create(configuration_file)?; -// // serde_json::to_writer_pretty(writer, &MongoConnector::make_empty_configuration())?; -// Ok(()) -// } - /// Update the configuration in the current directory by introspecting the database. -/// -/// This expects a configuration with a valid connection URI. async fn update(context: &Context) -> anyhow::Result<()> { let metadata = introspection::get_metadata_from_validation_schema(&context.mongo_config).await?; let configuration = Configuration { metadata }; diff --git a/crates/cli/src/main.rs b/crates/cli/src/main.rs index 81a79358..8d3d40ba 100644 --- a/crates/cli/src/main.rs +++ b/crates/cli/src/main.rs @@ -16,7 +16,7 @@ use mongodb_cli_plugin::{run, Command, Context}; pub struct Args { /// The path to the configuration. Defaults to the current directory. #[arg( - long = "context", + long = "context-path", env = "HASURA_PLUGIN_CONNECTOR_CONTEXT_PATH", value_name = "DIRECTORY" )] diff --git a/crates/configuration/src/read_directory.rs b/crates/configuration/src/directory.rs similarity index 100% rename from crates/configuration/src/read_directory.rs rename to crates/configuration/src/directory.rs diff --git a/crates/configuration/src/lib.rs b/crates/configuration/src/lib.rs index d75c6d17..b4a239ce 100644 --- a/crates/configuration/src/lib.rs +++ b/crates/configuration/src/lib.rs @@ -1,8 +1,8 @@ mod configuration; pub mod metadata; -mod read_directory; +mod directory; pub use crate::configuration::Configuration; pub use crate::metadata::Metadata; -pub use crate::read_directory::read_directory; -pub use crate::read_directory::write_directory; +pub use crate::directory::read_directory; +pub use crate::directory::write_directory; From dbafd79ead4824e6f57d7a7d40059a8b74fff724 Mon Sep 17 00:00:00 2001 From: David Overton Date: Fri, 15 Mar 2024 12:22:24 +1100 Subject: [PATCH 11/12] Use schema.json instead of metadata.json for schema file name --- crates/configuration/src/directory.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/configuration/src/directory.rs b/crates/configuration/src/directory.rs index 87f8a100..2a7739eb 100644 --- a/crates/configuration/src/directory.rs +++ b/crates/configuration/src/directory.rs @@ -8,7 +8,7 @@ use tokio::fs; use crate::Configuration; -pub const CONFIGURATION_FILENAME: &str = "metadata"; +pub const CONFIGURATION_FILENAME: &str = "schema"; pub const CONFIGURATION_EXTENSIONS: [(&str, FileFormat); 3] = [("json", JSON), ("yaml", YAML), ("yml", YAML)]; pub const DEFAULT_EXTENSION: &str = "json"; From 4b9b6ab4985cbe9fa2b24e63882ee108158cf333 Mon Sep 17 00:00:00 2001 From: David Overton Date: Fri, 15 Mar 2024 13:53:31 +1100 Subject: [PATCH 12/12] Remove unused lib.rs file --- crates/mongodb-connector/src/lib.rs | 5 ----- 1 file changed, 5 deletions(-) delete mode 100644 crates/mongodb-connector/src/lib.rs diff --git a/crates/mongodb-connector/src/lib.rs b/crates/mongodb-connector/src/lib.rs deleted file mode 100644 index a9dc4eda..00000000 --- a/crates/mongodb-connector/src/lib.rs +++ /dev/null @@ -1,5 +0,0 @@ -pub mod api_type_conversions; -pub mod capabilities; -pub mod error_mapping; -pub mod mongo_connector; -pub mod schema;