这是indexloc提供的服务,不要输入任何密码
Skip to content

recreate supergraph fixture using latest cli version #134

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 23 commits into from
Dec 10, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
23 commits
Select commit Hold shift + click to select a range
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions arion-compose/e2e-testing.nix
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ in

connector = import ./services/connector.nix {
inherit pkgs;
configuration-dir = ../fixtures/hasura/chinook/connector;
configuration-dir = ../fixtures/hasura/app/connector/chinook;
database-uri = "mongodb://mongodb/chinook";
port = connector-port;
service.depends_on.mongodb.condition = "service_healthy";
Expand All @@ -38,7 +38,7 @@ in
inherit pkgs;
port = engine-port;
connectors.chinook = "http://connector:${connector-port}";
ddn-dirs = [ ../fixtures/hasura/chinook/metadata ];
ddn-dirs = [ ../fixtures/hasura/app/metadata ];
service.depends_on = {
auth-hook.condition = "service_started";
};
Expand Down
11 changes: 4 additions & 7 deletions arion-compose/integration-test-services.nix
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ in
{
connector = import ./services/connector.nix {
inherit pkgs otlp-endpoint;
configuration-dir = ../fixtures/hasura/sample_mflix/connector;
configuration-dir = ../fixtures/hasura/app/connector/sample_mflix;
database-uri = "mongodb://mongodb/sample_mflix";
port = connector-port;
hostPort = hostPort connector-port;
Expand All @@ -33,7 +33,7 @@ in

connector-chinook = import ./services/connector.nix {
inherit pkgs otlp-endpoint;
configuration-dir = ../fixtures/hasura/chinook/connector;
configuration-dir = ../fixtures/hasura/app/connector/chinook;
database-uri = "mongodb://mongodb/chinook";
port = connector-chinook-port;
hostPort = hostPort connector-chinook-port;
Expand All @@ -44,7 +44,7 @@ in

connector-test-cases = import ./services/connector.nix {
inherit pkgs otlp-endpoint;
configuration-dir = ../fixtures/hasura/test_cases/connector;
configuration-dir = ../fixtures/hasura/app/connector/test_cases;
database-uri = "mongodb://mongodb/test_cases";
port = connector-test-cases-port;
hostPort = hostPort connector-test-cases-port;
Expand Down Expand Up @@ -75,10 +75,7 @@ in
test_cases = "http://connector-test-cases:${connector-test-cases-port}";
};
ddn-dirs = [
../fixtures/hasura/chinook/metadata
../fixtures/hasura/sample_mflix/metadata
../fixtures/hasura/test_cases/metadata
../fixtures/hasura/common/metadata
../fixtures/hasura/app/metadata
];
service.depends_on = {
auth-hook.condition = "service_started";
Expand Down
2 changes: 1 addition & 1 deletion arion-compose/ndc-test.nix
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ in
# command = ["test" "--snapshots-dir" "/snapshots" "--seed" "1337_1337_1337_1337_1337_1337_13"];
# Replay and test the recorded snapshots
# command = ["replay" "--snapshots-dir" "/snapshots"];
configuration-dir = ../fixtures/hasura/chinook/connector;
configuration-dir = ../fixtures/hasura/app/connector/chinook;
database-uri = "mongodb://mongodb:${mongodb-port}/chinook";
service.depends_on.mongodb.condition = "service_healthy";
# Run the container as the current user so when it writes to the snapshots directory it doesn't write as root
Expand Down
2 changes: 1 addition & 1 deletion arion-compose/services/connector.nix
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@
, profile ? "dev" # Rust crate profile, usually either "dev" or "release"
, hostPort ? null
, command ? ["serve"]
, configuration-dir ? ../../fixtures/hasura/sample_mflix/connector
, configuration-dir ? ../../fixtures/hasura/app/connector/sample_mflix
, database-uri ? "mongodb://mongodb/sample_mflix"
, service ? { } # additional options to customize this service configuration
, otlp-endpoint ? null
Expand Down
2 changes: 1 addition & 1 deletion arion-compose/services/engine.nix
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
# a `DataConnectorLink.definition.name` value in one of the given `ddn-dirs`
# to correctly match up configuration to connector instances.
, connectors ? { sample_mflix = "http://connector:7130"; }
, ddn-dirs ? [ ../../fixtures/hasura/sample_mflix/metadata ]
, ddn-dirs ? [ ../../fixtures/hasura/app/metadata ]
, auth-webhook ? { url = "http://auth-hook:3050/validate-request"; }
, otlp-endpoint ? "http://jaeger:4317"
, service ? { } # additional options to customize this service configuration
Expand Down
6 changes: 5 additions & 1 deletion crates/cli/src/introspection/validation_schema.rs
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,11 @@ pub async fn get_metadata_from_validation_schema(
if let Some(schema_bson) = schema_bson_option {
let validator_schema =
from_bson::<ValidatorSchema>(schema_bson.clone()).map_err(|err| {
MongoAgentError::BadCollectionSchema(name.to_owned(), schema_bson.clone(), err)
MongoAgentError::BadCollectionSchema(Box::new((
name.to_owned(),
schema_bson.clone(),
err,
)))
})?;
let collection_schema = make_collection_schema(name, &validator_schema);
schemas.push(collection_schema);
Expand Down
2 changes: 1 addition & 1 deletion crates/configuration/src/with_name.rs
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,7 @@ pub struct WithNameRef<'a, N, T> {
pub value: &'a T,
}

impl<'a, N, T> WithNameRef<'a, N, T> {
impl<N, T> WithNameRef<'_, N, T> {
pub fn named<'b>(name: &'b N, value: &'b T) -> WithNameRef<'b, N, T> {
WithNameRef { name, value }
}
Expand Down
24 changes: 12 additions & 12 deletions crates/integration-tests/src/tests/aggregation.rs
Original file line number Diff line number Diff line change
Expand Up @@ -18,10 +18,10 @@ async fn runs_aggregation_over_top_level_fields() -> anyhow::Result<()> {
) {
_count
milliseconds {
_avg
_max
_min
_sum
avg
max
min
sum
}
unitPrice {
_count
Expand All @@ -48,11 +48,11 @@ async fn aggregates_extended_json_representing_mixture_of_numeric_types() -> any
filter_input: { where: { type: { _regex: $types } } }
) {
value {
_avg
avg
_count
_max
_min
_sum
max
min
sum
_count_distinct
}
}
Expand Down Expand Up @@ -80,11 +80,11 @@ async fn aggregates_mixture_of_numeric_and_null_values() -> anyhow::Result<()> {
filter_input: { where: { type: { _regex: $types } } }
) {
value {
_avg
avg
_count
_max
_min
_sum
max
min
sum
_count_distinct
}
}
Expand Down
4 changes: 2 additions & 2 deletions crates/integration-tests/src/tests/basic.rs
Original file line number Diff line number Diff line change
Expand Up @@ -77,7 +77,7 @@ async fn selects_field_names_that_require_escaping() -> anyhow::Result<()> {
graphql_query(
r#"
query {
testCases_weirdFieldNames(limit: 1, order_by: { invalidName: Asc }) {
weirdFieldNames(limit: 1, order_by: { invalidName: Asc }) {
invalidName
invalidObjectName {
validName
Expand All @@ -101,7 +101,7 @@ async fn selects_nested_field_with_dollar_sign_in_name() -> anyhow::Result<()> {
graphql_query(
r#"
query {
testCases_nestedFieldWithDollar(order_by: { configuration: Asc }) {
nestedFieldWithDollar(order_by: { configuration: Asc }) {
configuration {
schema
}
Expand Down
4 changes: 2 additions & 2 deletions crates/integration-tests/src/tests/expressions.rs
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ async fn evaluates_field_name_that_requires_escaping() -> anyhow::Result<()> {
graphql_query(
r#"
query {
testCases_weirdFieldNames(where: { invalidName: { _eq: 3 } }) {
weirdFieldNames(where: { invalidName: { _eq: 3 } }) {
invalidName
}
}
Expand All @@ -31,7 +31,7 @@ async fn evaluates_field_name_that_requires_escaping_in_complex_expression() ->
graphql_query(
r#"
query {
testCases_weirdFieldNames(
weirdFieldNames(
where: {
_and: [
{ invalidName: { _gt: 2 } },
Expand Down
21 changes: 1 addition & 20 deletions crates/integration-tests/src/tests/filtering.rs
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,7 @@ async fn filters_by_comparisons_on_elements_of_array_field() -> anyhow::Result<(
graphql_query(
r#"
query {
testCases_nestedCollection(
nestedCollection(
where: { staff: { name: { _eq: "Freeman" } } }
order_by: { institution: Asc }
) {
Expand All @@ -66,25 +66,6 @@ async fn filters_by_comparisons_on_elements_of_array_field() -> anyhow::Result<(
Ok(())
}

#[tokio::test]
async fn filters_by_comparisons_on_elements_of_array_of_scalars() -> anyhow::Result<()> {
assert_yaml_snapshot!(
graphql_query(
r#"
query MyQuery {
movies(where: { cast: { _eq: "Albert Austin" } }) {
title
cast
}
}
"#
)
.run()
.await?
);
Ok(())
}

#[tokio::test]
async fn filters_by_comparisons_on_elements_of_array_of_scalars_against_variable(
) -> anyhow::Result<()> {
Expand Down
4 changes: 2 additions & 2 deletions crates/integration-tests/src/tests/native_mutation.rs
Original file line number Diff line number Diff line change
Expand Up @@ -66,7 +66,7 @@ async fn accepts_predicate_argument() -> anyhow::Result<()> {
let mutation_resp = graphql_query(
r#"
mutation($albumId: Int!) {
chinook_updateTrackPrices(newPrice: "11.99", where: {albumId: {_eq: $albumId}}) {
updateTrackPrices(newPrice: "11.99", where: {albumId: {_eq: $albumId}}) {
n
ok
}
Expand All @@ -79,7 +79,7 @@ async fn accepts_predicate_argument() -> anyhow::Result<()> {

assert_eq!(mutation_resp.errors, None);
assert_json!(mutation_resp.data, {
"chinook_updateTrackPrices": {
"updateTrackPrices": {
"ok": 1.0,
"n": validators::i64(|n| if n > &0 {
Ok(())
Expand Down
6 changes: 3 additions & 3 deletions crates/integration-tests/src/tests/native_query.rs
Original file line number Diff line number Diff line change
Expand Up @@ -24,13 +24,13 @@ async fn runs_native_query_with_collection_representation() -> anyhow::Result<()
graphql_query(
r#"
query {
title_word_frequencies(
titleWordFrequency(
where: {count: {_eq: 2}}
order_by: {word: Asc}
order_by: {id: Asc}
offset: 100
limit: 25
) {
word
id
count
}
}
Expand Down
Original file line number Diff line number Diff line change
@@ -1,18 +1,18 @@
---
source: crates/integration-tests/src/tests/aggregation.rs
expression: "graphql_query(r#\"\n query ($types: String!) {\n extendedJsonTestDataAggregate(\n filter_input: { where: { type: { _regex: $types } } }\n ) {\n value {\n _avg\n _count\n _max\n _min\n _sum\n _count_distinct\n }\n }\n extendedJsonTestData(where: { type: { _regex: $types } }) {\n type\n value\n }\n }\n \"#).variables(json!({\n \"types\": \"decimal|double|int|long\"\n })).run().await?"
expression: "graphql_query(r#\"\n query ($types: String!) {\n extendedJsonTestDataAggregate(\n filter_input: { where: { type: { _regex: $types } } }\n ) {\n value {\n avg\n _count\n max\n min\n sum\n _count_distinct\n }\n }\n extendedJsonTestData(where: { type: { _regex: $types } }) {\n type\n value\n }\n }\n \"#).variables(json!({\n \"types\": \"decimal|double|int|long\"\n})).run().await?"
---
data:
extendedJsonTestDataAggregate:
value:
_avg:
avg:
$numberDecimal: "4.5"
_count: 8
_max:
max:
$numberLong: "8"
_min:
min:
$numberDecimal: "1"
_sum:
sum:
$numberDecimal: "36"
_count_distinct: 8
extendedJsonTestData:
Expand Down
Original file line number Diff line number Diff line change
@@ -1,18 +1,18 @@
---
source: crates/integration-tests/src/tests/aggregation.rs
expression: "graphql_query(r#\"\n query ($types: String!) {\n extendedJsonTestDataAggregate(\n filter_input: { where: { type: { _regex: $types } } }\n ) {\n value {\n _avg\n _count\n _max\n _min\n _sum\n _count_distinct\n }\n }\n extendedJsonTestData(where: { type: { _regex: $types } }) {\n type\n value\n }\n }\n \"#).variables(json!({\n \"types\": \"double|null\"\n })).run().await?"
expression: "graphql_query(r#\"\n query ($types: String!) {\n extendedJsonTestDataAggregate(\n filter_input: { where: { type: { _regex: $types } } }\n ) {\n value {\n avg\n _count\n max\n min\n sum\n _count_distinct\n }\n }\n extendedJsonTestData(where: { type: { _regex: $types } }) {\n type\n value\n }\n }\n \"#).variables(json!({\n \"types\": \"double|null\"\n})).run().await?"
---
data:
extendedJsonTestDataAggregate:
value:
_avg:
avg:
$numberDouble: "3.5"
_count: 2
_max:
max:
$numberDouble: "4.0"
_min:
min:
$numberDouble: "3.0"
_sum:
sum:
$numberDouble: "7.0"
_count_distinct: 2
extendedJsonTestData:
Expand Down
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
---
source: crates/integration-tests/src/tests/aggregation.rs
expression: "graphql_query(r#\"\n query($albumId: Int!) {\n track(order_by: { id: Asc }, where: { albumId: { _eq: $albumId } }) {\n milliseconds\n unitPrice\n }\n trackAggregate(\n filter_input: { order_by: { id: Asc }, where: { albumId: { _eq: $albumId } } }\n ) {\n _count\n milliseconds {\n _avg\n _max\n _min\n _sum\n }\n unitPrice {\n _count\n _count_distinct\n }\n }\n }\n \"#).variables(json!({\n \"albumId\": 9\n })).run().await?"
expression: "graphql_query(r#\"\n query($albumId: Int!) {\n track(order_by: { id: Asc }, where: { albumId: { _eq: $albumId } }) {\n milliseconds\n unitPrice\n }\n trackAggregate(\n filter_input: { order_by: { id: Asc }, where: { albumId: { _eq: $albumId } } }\n ) {\n _count\n milliseconds {\n avg\n max\n min\n sum\n }\n unitPrice {\n _count\n _count_distinct\n }\n }\n }\n \"#).variables(json!({\n \"albumId\": 9\n})).run().await?"
---
data:
track:
Expand All @@ -23,10 +23,10 @@ data:
trackAggregate:
_count: 8
milliseconds:
_avg: 333925.875
_max: 436453
_min: 221701
_sum: 2671407
avg: 333925.875
max: 436453
min: 221701
sum: 2671407
unitPrice:
_count: 8
_count_distinct: 1
Expand Down
Original file line number Diff line number Diff line change
@@ -1,9 +1,9 @@
---
source: crates/integration-tests/src/tests/basic.rs
expression: "graphql_query(r#\"\n query {\n testCases_weirdFieldNames(limit: 1, order_by: { invalidName: Asc }) {\n invalidName\n invalidObjectName {\n validName\n }\n validObjectName {\n invalidNestedName\n }\n }\n }\n \"#).run().await?"
expression: "graphql_query(r#\"\n query {\n weirdFieldNames(limit: 1, order_by: { invalidName: Asc }) {\n invalidName\n invalidObjectName {\n validName\n }\n validObjectName {\n invalidNestedName\n }\n }\n }\n \"#).run().await?"
---
data:
testCases_weirdFieldNames:
weirdFieldNames:
- invalidName: 1
invalidObjectName:
validName: 1
Expand Down
Original file line number Diff line number Diff line change
@@ -1,9 +1,9 @@
---
source: crates/integration-tests/src/tests/basic.rs
expression: "graphql_query(r#\"\n query {\n testCases_nestedFieldWithDollar(order_by: { configuration: Asc }) {\n configuration {\n schema\n }\n }\n }\n \"#).run().await?"
expression: "graphql_query(r#\"\n query {\n nestedFieldWithDollar(order_by: { configuration: Asc }) {\n configuration {\n schema\n }\n }\n }\n \"#).run().await?"
---
data:
testCases_nestedFieldWithDollar:
nestedFieldWithDollar:
- configuration:
schema: ~
- configuration:
Expand Down
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
---
source: crates/integration-tests/src/tests/expressions.rs
expression: "graphql_query(r#\"\n query {\n testCases_weirdFieldNames(where: { invalidName: { _eq: 3 } }) {\n invalidName\n }\n }\n \"#).run().await?"
expression: "graphql_query(r#\"\n query {\n weirdFieldNames(where: { invalidName: { _eq: 3 } }) {\n invalidName\n }\n }\n \"#).run().await?"
---
data:
testCases_weirdFieldNames:
weirdFieldNames:
- invalidName: 3
errors: ~
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
---
source: crates/integration-tests/src/tests/expressions.rs
expression: "graphql_query(r#\"\n query {\n testCases_weirdFieldNames(\n where: { \n _and: [\n { invalidName: { _gt: 2 } },\n { invalidName: { _lt: 4 } } \n ] \n }\n ) {\n invalidName\n }\n }\n \"#).run().await?"
expression: "graphql_query(r#\"\n query {\n weirdFieldNames(\n where: { \n _and: [\n { invalidName: { _gt: 2 } },\n { invalidName: { _lt: 4 } } \n ] \n }\n ) {\n invalidName\n }\n }\n \"#).run().await?"
---
data:
testCases_weirdFieldNames:
weirdFieldNames:
- invalidName: 3
errors: ~
Loading
Loading