diff --git a/federation-2/router-bridge/js-src/plan.ts b/federation-2/router-bridge/js-src/plan.ts index 19581d20b..1c5346652 100644 --- a/federation-2/router-bridge/js-src/plan.ts +++ b/federation-2/router-bridge/js-src/plan.ts @@ -1,22 +1,38 @@ import { + buildSupergraphSchema, + Operation, + operationFromDocument, + Schema, +} from "@apollo/federation-internals"; +import { QueryPlan, QueryPlanner } from "@apollo/query-planner"; +import { ReferencedFieldsForType } from "apollo-reporting-protobuf"; +import { Buffer } from "buffer"; +import { + DirectiveNode, DocumentNode, ExecutionResult, + FieldNode, + FloatValueNode, + FragmentDefinitionNode, + FragmentSpreadNode, GraphQLSchema, + InlineFragmentNode, + IntValueNode, isInterfaceType, + ListValueNode, + ObjectValueNode, + OperationDefinitionNode, parse, + print, + SelectionSetNode, + separateOperations, + StringValueNode, TypeInfo, validate, + visit, visitWithTypeInfo, } from "graphql"; -import { QueryPlanner, QueryPlan } from "@apollo/query-planner"; - -import { - buildSupergraphSchema, - operationFromDocument, - Operation, - Schema, -} from "@apollo/federation-internals"; -import { ReferencedFieldsForType } from "apollo-reporting-protobuf"; +import sortBy from "lodash.sortby"; const PARSE_FAILURE: string = "## GraphQLParseFailure\n"; const VALIDATION_FAILURE: string = "## GraphQLValidationFailure\n"; @@ -67,14 +83,24 @@ export class BridgeQueryPlanner { // Federation does some validation, but not all. We need to do // all default validations that are provided by GraphQL. - const validationErrors = validate(this.apiSchema, document); - if (validationErrors.length > 0) { + try { + const validationErrors = validate(this.apiSchema, document); + if (validationErrors.length > 0) { + return { + usageReporting: { + statsReportKey: VALIDATION_FAILURE, + referencedFieldsByType: {}, + }, + errors: validationErrors, + }; + } + } catch (e) { return { usageReporting: { statsReportKey: VALIDATION_FAILURE, referencedFieldsByType: {}, }, - errors: validationErrors, + errors: [e], }; } @@ -110,7 +136,6 @@ export class BridgeQueryPlanner { // https://github.com/apollographql/apollo-server/blob/444c403011209023b5d3b5162b8fb81991046b23/packages/apollo-server-core/src/requestPipeline.ts#L303 const operationName = operation?.name; - // I double checked, this function doesn't throw const operationDerivedData = getOperationDerivedData({ schema: this.apiSchema, document, @@ -121,12 +146,24 @@ export class BridgeQueryPlanner { operationDerivedData.signature }`; + let data: QueryPlan; + try { + data = this.planner.buildQueryPlan(operation); + } catch (e) { + return { + usageReporting: { + statsReportKey: VALIDATION_FAILURE, + referencedFieldsByType: {}, + }, + errors: [e], + }; + } return { usageReporting: { statsReportKey, referencedFieldsByType: operationDerivedData.referencedFieldsByType, }, - data: this.planner.buildQueryPlan(operation), + data, }; } } @@ -244,88 +281,6 @@ export function calculateReferencedFieldsByType({ return referencedFieldsByType; } -// --------------------- - -// Copied from here -// https://github.com/apollographql/apollo-server/blob/bf2f70d74b27b0862ad5d282a153f935e0bde5cc/packages/apollo-server-core/src/plugin/usageReporting/defaultUsageReportingSignature.ts - -// In Apollo Studio, we want to group requests making the same query together, -// and treat different queries distinctly. But what does it mean for two queries -// to be "the same"? And what if you don't want to send the full text of the -// query to Apollo's servers, either because it contains sensitive data or -// because it contains extraneous operations or fragments? -// -// To solve these problems, ApolloServerPluginUsageReporting has the concept of -// "signatures". We don't (by default) send the full query string of queries to -// Apollo's servers. Instead, each trace has its query string's "signature". -// -// You can technically specify any function mapping a GraphQL query AST -// (DocumentNode) to string as your signature algorithm by providing it as the -// 'calculateSignature' option to ApolloServerPluginUsageReporting. (This option -// is not recommended, because Apollo's servers make some assumptions about the -// semantics of your operation based on the signature.) This file defines the -// default function used for this purpose: defaultUsageReportingSignature -// (formerly known as defaultEngineReportingSignature). -// -// This module utilizes several AST transformations from the adjacent -// 'transforms' file. (You could use them to build your own `calculateSignature` -// callback, but as mentioned above, you shouldn't really define that callback, -// so they are not exported from the package.) - -// - dropUnusedDefinitions, which removes operations and fragments that aren't -// going to be used in execution -// - hideLiterals, which replaces all numeric and string literals as well as -// list and object input values with "empty" values -// - removeAliases, which removes field aliasing from the query -// - sortAST, which sorts the children of most multi-child nodes consistently -// - printWithReducedWhitespace, a variant on graphql-js's 'print' which gets -// rid of unneeded whitespace -// -// defaultUsageReportingSignature consists of applying all of these building -// blocks. -// -// Historical note: the default signature algorithm of the Go engineproxy -// performed all of the above operations, and Apollo's servers then re-ran a -// mostly identical signature implementation on received traces. This was -// primarily to deal with edge cases where some users used literal interpolation -// instead of GraphQL variables, included randomized alias names, etc. In -// addition, the servers relied on the fact that dropUnusedDefinitions had been -// called in order (and that the signature could be parsed as GraphQL) to -// extract the name of the operation for display. This caused confusion, as the -// query document shown in the Studio UI wasn't the same as the one actually -// sent. ApolloServerPluginUsageReporting (previously apollo-engine-reporting) -// uses a reporting API which requires it to explicitly include the operation -// name with each signature; this means that the server no longer needs to parse -// the signature or run its own signature algorithm on it, and the details of -// the signature algorithm are now up to the reporting agent. That said, not all -// Studio features will work properly if your signature function changes the -// signature in unexpected ways. -// -// This function used to live in the `apollo-graphql` package in the -// `apollo-tooling` repository. -// -// Note that this is not exactly the same algorithm as the operation registry -// signature function, which continues to live in `apollo-graphql`. -import { - DirectiveNode, - FieldNode, - FloatValueNode, - FragmentDefinitionNode, - FragmentSpreadNode, - InlineFragmentNode, - IntValueNode, - ListValueNode, - ObjectValueNode, - OperationDefinitionNode, - print, - SelectionSetNode, - separateOperations, - StringValueNode, - visit, -} from "graphql"; -import sortBy from "lodash.sortby"; -import { Buffer } from "buffer"; - export function defaultUsageReportingSignature( ast: DocumentNode, operationName: string diff --git a/federation-2/router-bridge/js-src/plan_worker.ts b/federation-2/router-bridge/js-src/plan_worker.ts index d7032a49f..5d3960478 100644 --- a/federation-2/router-bridge/js-src/plan_worker.ts +++ b/federation-2/router-bridge/js-src/plan_worker.ts @@ -76,8 +76,9 @@ type WorkerGraphQLError = { originalError?: Error; causes?: CauseError[]; }; -const isGraphQLErrorExt = (error: any): error is GraphQLErrorExt => - error.name === "GraphQLError" || error.name === "CheckFailed"; +const isGraphQLErrorExt = (error: any): error is GraphQLErrorExt => { + return error.name === "GraphQLError" || error.name === "CheckFailed"; +}; const intoSerializableError = (error: Error): JsError => { const { name, message, stack } = error; @@ -108,7 +109,7 @@ const intoSerializableGraphQLErrorExt = ( message, locations, path, - extensions, + extensions: extensions === undefined ? {} : extensions, // extensions || {} nodes, source, positions, @@ -121,7 +122,7 @@ const intoSerializableGraphQLErrorExt = ( }; const send = async (payload: WorkerResultWithId): Promise => { - logger.debug(`plan_worker: sending payload ${JSON.stringify(payload)}`); + logger.trace(`plan_worker: sending payload ${JSON.stringify(payload)}`); await Deno.core.opAsync("send", payload); }; const receive = async (): Promise => @@ -170,6 +171,7 @@ async function run() { break; case PlannerEventKind.Plan: const planResult = planner.plan(event.query, event.operationName); + logger.error(`planResult ${JSON.stringify(planResult)}`); await send({ id, payload: planResult }); break; case PlannerEventKind.Exit: diff --git a/federation-2/router-bridge/src/planner.rs b/federation-2/router-bridge/src/planner.rs index 62690283c..34cb5c151 100644 --- a/federation-2/router-bridge/src/planner.rs +++ b/federation-2/router-bridge/src/planner.rs @@ -50,7 +50,7 @@ pub struct OperationalContext { /// /// [`graphql-js`]: https://npm.im/graphql /// [`GraphQLError`]: https://github.com/graphql/graphql-js/blob/3869211/src/error/GraphQLError.js#L18-L75 -#[derive(Debug, Error, Serialize, Deserialize, PartialEq)] +#[derive(Debug, Error, Serialize, Deserialize, PartialEq, Clone)] pub struct PlanError { /// A human-readable description of the error that prevented planning. pub message: Option, @@ -108,7 +108,7 @@ impl Display for PlanError { } } -#[derive(Debug, Serialize, Deserialize, PartialEq)] +#[derive(Debug, Serialize, Deserialize, PartialEq, Clone)] /// Error codes pub struct PlanErrorExtensions { /// The error code @@ -137,7 +137,7 @@ pub struct BridgeSetupResult { pub errors: Option>, } -#[derive(Serialize, Deserialize, Debug, PartialEq)] +#[derive(Serialize, Deserialize, Debug, PartialEq, Clone)] /// The error location pub struct Location { /// The line number @@ -183,7 +183,7 @@ impl std::fmt::Display for PlannerError { /// WorkerError represents the non GraphQLErrors the deno worker can throw. /// We try to get as much data out of them. -#[derive(Serialize, Deserialize, Debug, PartialEq)] +#[derive(Serialize, Deserialize, Debug, PartialEq, Clone)] pub struct WorkerError { /// The error message pub message: Option, @@ -211,11 +211,26 @@ impl std::fmt::Display for WorkerError { } } +/// A representation of source input to GraphQL. The `name` and `locationOffset` parameters are +/// optional, but they are useful for clients who store GraphQL documents in source files. +/// For example, if the GraphQL input starts at line 40 in a file named `Foo.graphql`, it might +/// be useful for `name` to be `"Foo.graphql"` and location to be `{ line: 40, column: 1 }`. +/// The `line` and `column` properties in `locationOffset` are 1-indexed. +#[derive(Serialize, Deserialize, Debug, PartialEq, Clone)] +#[serde(rename_all = "camelCase")] +pub struct Source { + /// The identifier that caused the error + pub body: String, + /// The + pub name: Option, + /// The position of the error, relative to the source input + pub location_offset: Option, +} /// WorkerGraphQLError represents the GraphQLErrors the deno worker can throw. /// We try to get as much data out of them. /// While they mostly represent GraphQLErrors, they sometimes don't. /// See [`WorkerError`] -#[derive(Serialize, Deserialize, Debug, PartialEq)] +#[derive(Serialize, Deserialize, Debug, PartialEq, Clone)] #[serde(rename_all = "camelCase")] pub struct WorkerGraphQLError { /// The error kind @@ -229,7 +244,13 @@ pub struct WorkerGraphQLError { #[serde(default)] pub locations: Vec, /// [`PlanErrorExtensions`] + #[serde(deserialize_with = "none_only_if_value_is_null_or_empty_object")] pub extensions: Option, + /// [`Source`] + pub source: Option, + /// An array of character offsets within the source GraphQL document + /// which correspond to this error. + pub positions: Option>, /// The original error thrown from a field resolver during execution. pub original_error: Option>, /// The reasons why the error was triggered (useful for schema checks) @@ -281,19 +302,31 @@ pub struct UsageReporting { #[derive(Deserialize, Debug)] #[serde(rename_all = "camelCase")] +#[serde(untagged)] /// The result of a router bridge plan_worker invocation -pub struct PlanResult { - /// The data if the query was successfully run - pub data: Option, - /// Usage reporting related data such as the - /// operation signature and referenced fields - pub usage_reporting: UsageReporting, - /// The errors if the query failed - pub errors: Option>, +pub enum PlanResult { + /// The payload if the plan_worker invocation succeeded + Success(PlanSuccess), + /// The payload if the plan_worker invocation failed + Errors(PlanErrors), } +// #[derive(Deserialize, Debug)] +// #[serde(rename_all = "camelCase")] +// /// The result of a router bridge plan_worker invocation +// pub struct PlanResult { +// /// The data if the query was successfully run +// pub data: Option, +// /// Usage reporting related data such as the +// /// operation signature and referenced fields +// pub usage_reporting: UsageReporting, +// /// The errors if the query failed +// pub errors: Option>, +// } + /// The payload if the plan_worker invocation succeeded -#[derive(Debug)] +#[derive(Deserialize, Debug)] +#[serde(rename_all = "camelCase")] pub struct PlanSuccess { /// The payload you're looking for pub data: T, @@ -303,27 +336,84 @@ pub struct PlanSuccess { } /// The payload if the plan_worker invocation failed -#[derive(Debug, Clone)] -pub struct PlanErrors { - /// The errors the plan_worker invocation failed with - pub errors: Arc>, - /// Usage reporting related data such as the - /// operation signature and referenced fields - pub usage_reporting: UsageReporting, +#[derive(Deserialize, Debug, Clone)] +#[serde(untagged)] +#[serde(rename_all = "camelCase")] +pub enum PlanErrors { + /// These errors has been catched and contain an usage reporting key + GraphQLCaught { + /// Usage reporting related data such as the + /// operation signature and referenced fields + usage_reporting: UsageReporting, + /// The errors the plan_worker invocation failed with + errors: Vec, + }, + JSCaught { + /// Usage reporting related data such as the + /// operation signature and referenced fields + usage_reporting: UsageReporting, + /// The errors the plan_worker invocation failed with + errors: Vec, + }, + /// These errors are fatal and can't be categorized with an usage reporting key + Fatal { + /// The errors the plan_worker invocation failed with + errors: Vec, + }, } +impl PlanErrors { + pub fn graphql_errors(&self) -> &[WorkerGraphQLError] { + match self { + Self::GraphQLCaught { errors, .. } => errors.as_slice(), + _ => &[], + } + } + + pub fn worker_errors(&self) -> &[WorkerError] { + match self { + Self::JSCaught { errors, .. } | Self::Fatal { errors } => errors.as_slice(), + _ => &[], + } + } + + pub fn usage_reporting(&self) -> Option<&UsageReporting> { + match self { + Self::GraphQLCaught { + usage_reporting, .. + } | Self::JSCaught { + usage_reporting, .. + } => Some(usage_reporting), + _ => None, + } + } +} + +// /// The payload if the plan_worker invocation failed +// #[derive(Debug, Clone)] +// pub struct PlanErrors { +// /// The errors the plan_worker invocation failed with +// pub errors: Arc>, +// /// Usage reporting related data such as the +// /// operation signature and referenced fields +// pub usage_reporting: UsageReporting, +// } + impl std::fmt::Display for PlanErrors { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + let errors = self + .graphql_errors() + .iter() + .map(|e| e.message.clone()) + .chain(self.worker_errors().iter().map(|e| { + e.message + .clone() + .unwrap_or_else(|| "UNKNOWN ERROR".to_string()) + })); + f.write_fmt(format_args!( "query validation errors: {}", - self.errors - .iter() - .map(|e| e - .message - .clone() - .unwrap_or_else(|| "UNKNWON ERROR".to_string())) - .collect::>() - .join(", ") + errors.collect::>().join(", ") )) } } @@ -334,23 +424,12 @@ where { /// Turn a BridgeResult into an actual Result pub fn into_result(self) -> Result, PlanErrors> { - let usage_reporting = self.usage_reporting; - if let Some(data) = self.data { - Ok(PlanSuccess { - data, - usage_reporting, - }) - } else { - let errors = Arc::new(self.errors.unwrap_or_else(|| { - vec![PlanError { - message: Some("an unknown error occured".to_string()), - extensions: None, - }] - })); - Err(PlanErrors { - errors, - usage_reporting, - }) + match self { + PlanResult::Success(plan_success) => Ok(plan_success), + PlanResult::Errors(plan_errors) => { + println!("plan_errors {plan_errors:#?}"); + Err(plan_errors) + } } } } @@ -586,11 +665,11 @@ mod tests { assert_eq!( "Syntax Error: Unexpected Name \"this\".", - payload.errors[0].message.as_ref().clone().unwrap() + payload.graphql_errors()[0].message ); assert_eq!( "## GraphQLParseFailure\n", - payload.usage_reporting.stats_report_key + payload.usage_reporting().unwrap().stats_report_key ); } @@ -623,11 +702,11 @@ mod tests { assert_eq!( "Cannot spread fragment \"thatUserFragment1\" within itself via \"thatUserFragment2\".", - payload.errors[0].message.as_ref().clone().unwrap() + payload.graphql_errors()[0].message ); assert_eq!( "## GraphQLValidationFailure\n", - payload.usage_reporting.stats_report_key + payload.usage_reporting().unwrap().stats_report_key ); } @@ -649,11 +728,11 @@ mod tests { assert_eq!( "Unknown operation named \"ThisOperationNameDoesntExist\"", - payload.errors[0].message.as_ref().clone().unwrap() + payload.graphql_errors()[0].message ); assert_eq!( "## GraphQLUnknownOperationName\n", - payload.usage_reporting.stats_report_key + payload.usage_reporting().unwrap().stats_report_key ); } @@ -672,11 +751,11 @@ mod tests { assert_eq!( "Must provide operation name if query contains multiple operations.", - payload.errors[0].message.as_ref().clone().unwrap() + payload.graphql_errors()[0].message ); assert_eq!( "## GraphQLUnknownOperationName\n", - payload.usage_reporting.stats_report_key + payload.usage_reporting().unwrap().stats_report_key ); } @@ -695,11 +774,11 @@ mod tests { assert_eq!( "This anonymous operation must be the only defined operation.", - payload.errors[0].message.as_ref().clone().unwrap() + payload.graphql_errors()[0].message ); assert_eq!( "## GraphQLValidationFailure\n", - payload.usage_reporting.stats_report_key + payload.usage_reporting().unwrap().stats_report_key ); } @@ -718,11 +797,11 @@ mod tests { assert_eq!( "Fragment \"thatUserFragment1\" is never used.", - payload.errors[0].message.as_ref().clone().unwrap() + payload.graphql_errors()[0].message ); assert_eq!( "## GraphQLValidationFailure\n", - payload.usage_reporting.stats_report_key + payload.usage_reporting().unwrap().stats_report_key ); } @@ -735,12 +814,18 @@ mod tests { // expect to every show up in Federation's query planner validation. // This one is for the NoFragmentCyclesRule in graphql/validate async fn invalid_graphql_validation_1_is_caught() { - let errors= vec![PlanError { - message: Some("Cannot spread fragment \"thatUserFragment1\" within itself via \"thatUserFragment2\".".to_string()), - extensions: None, - }]; + let errors= vec![WorkerGraphQLError { + message: "Cannot spread fragment \"thatUserFragment1\" within itself via \"thatUserFragment2\".".to_string(), + extensions:None, + name: Default::default(), + locations: Default::default(), + source: Default::default(), + positions: Default::default(), + original_error: Default::default(), + causes: Default::default() + }]; - assert_errors( + assert_graphql_errors( errors, // These two fragments will spread themselves into a cycle, which is invalid per NoFragmentCyclesRule. "\ @@ -768,15 +853,18 @@ mod tests { // expect to every show up in Federation's query planner validation. // This one is for the ScalarLeafsRule in graphql/validate async fn invalid_graphql_validation_2_is_caught() { - let errors = vec![PlanError { - message: Some( - "Field \"id\" must not have a selection since type \"ID!\" has no subfields." - .to_string(), - ), - extensions: None, + let errors = vec![WorkerGraphQLError { + message: "Field \"id\" must not have a selection since type \"ID!\" has no subfields.".to_string(), + extensions:None, + name: Default::default(), + locations: Default::default(), + source: Default::default(), + positions: Default::default(), + original_error: Default::default(), + causes: Default::default() }]; - assert_errors( + assert_graphql_errors( errors, // This Book resolver requires a selection set, per the schema. "{ me { id { absolutelyNotAcceptableLeaf } } }".to_string(), @@ -794,12 +882,18 @@ mod tests { // expect to every show up in Federation's query planner validation. // This one is for NoUnusedFragmentsRule in graphql/validate async fn invalid_graphql_validation_3_is_caught() { - let errors = vec![PlanError { - message: Some("Fragment \"UnusedTestFragment\" is never used.".to_string()), - extensions: None, + let errors = vec![WorkerGraphQLError{ + message: "Fragment \"UnusedTestFragment\" is never used.".to_string(), + extensions:None, + name: Default::default(), + locations: Default::default(), + source: Default::default(), + positions: Default::default(), + original_error: Default::default(), + causes: Default::default() }]; - assert_errors( + assert_graphql_errors( errors, // This Book resolver requires a selection set, per the schema. "fragment UnusedTestFragment on User { id } query { me { id } }".to_string(), @@ -810,14 +904,18 @@ mod tests { #[tokio::test] async fn invalid_federation_validation_is_caught() { - let errors = vec![PlanError { - message: Some( - "Must provide operation name if query contains multiple operations.".to_string(), - ), - extensions: None, + let errors = vec![WorkerGraphQLError { + message: "Must provide operation name if query contains multiple operations.".to_string(), + extensions:None, + name: Default::default(), + locations: Default::default(), + source: Default::default(), + positions: Default::default(), + original_error: Default::default(), + causes: Default::default() }]; - assert_errors( + assert_graphql_errors( errors, // This requires passing an operation name (because there are multiple operations) // but we have not done that! Therefore, we expect a validation error from planning. "query Operation1 { me { id } } query Operation2 { me { id } }".to_string(), @@ -835,6 +933,12 @@ mod tests { locations: vec![Location { line: 1, column: 1 }], original_error: None, causes: vec![], + source: Some(Source { + body: "Garbage".to_string(), + name: Some("GraphQL request".to_string()), + location_offset: Some(Location { line: 1, column: 1 }), + }), + positions: Some(vec![0]), } .into()]; @@ -847,24 +951,36 @@ mod tests { #[tokio::test] async fn syntactically_incorrect_query_is_caught() { - let errors = vec![PlanError { - message: Some("Syntax Error: Unexpected Name \"Garbage\".".to_string()), - extensions: None, + let errors = vec![WorkerGraphQLError { + message: "Syntax Error: Unexpected Name \"Garbage\".".to_string(), + extensions:None, + name: Default::default(), + locations: Default::default(), + source: Default::default(), + positions: Default::default(), + original_error: Default::default(), + causes: Default::default() }]; - assert_errors(errors, "Garbage".to_string(), None).await; + assert_graphql_errors(errors, "Garbage".to_string(), None).await; } #[tokio::test] async fn query_missing_subfields() { let expected_error_message = r#"Field "reviews" of type "[Review]" must have a selection of subfields. Did you mean "reviews { ... }"?"#; - let errors = vec![PlanError { - message: Some(expected_error_message.to_string()), - extensions: None, + let errors = vec![WorkerGraphQLError { + message: expected_error_message.to_string(), + extensions:None, + name: Default::default(), + locations: Default::default(), + source: Default::default(), + positions: Default::default(), + original_error: Default::default(), + causes: Default::default() }]; - assert_errors( + assert_graphql_errors( errors, // This query contains reviews, which requires subfields "query ExampleQuery { me { id reviews } }".to_string(), @@ -875,13 +991,18 @@ mod tests { #[tokio::test] async fn query_field_that_doesnt_exist() { - let expected_error_message = r#"Cannot query field "thisDoesntExist" on type "Query"."#; - let errors = vec![PlanError { - message: Some(expected_error_message.to_string()), - extensions: None, + let errors = vec![WorkerGraphQLError { + message: r#"Cannot query field "thisDoesntExist" on type "Query"."#.to_string(), + extensions:None, + name: Default::default(), + locations: Default::default(), + source: Default::default(), + positions: Default::default(), + original_error: Default::default(), + causes: Default::default() }]; - assert_errors( + assert_graphql_errors( errors, // This query contains reviews, which requires subfields "query ExampleQuery { thisDoesntExist }".to_string(), @@ -890,8 +1011,28 @@ mod tests { .await; } - async fn assert_errors( - expected_errors: Vec, + async fn assert_graphql_errors( + expected_errors: Vec, + query: String, + operation_name: Option, + ) { + let planner = Planner::::new(SCHEMA.to_string()) + .await + .unwrap(); + + let actual = planner + .plan(query, operation_name) + .await + .unwrap() + .into_result() + .unwrap_err(); + + assert_eq!(0, actual.worker_errors().len()); + assert_eq!(expected_errors, actual.graphql_errors()); + } + + async fn assert_worker_errors( + expected_errors: Vec, query: String, operation_name: Option, ) { @@ -899,9 +1040,15 @@ mod tests { .await .unwrap(); - let actual = planner.plan(query, operation_name).await.unwrap(); + let actual = planner + .plan(query, operation_name) + .await + .unwrap() + .into_result() + .unwrap_err(); - assert_eq!(expected_errors, actual.errors.unwrap()); + assert_eq!(0, actual.graphql_errors().len()); + assert_eq!(expected_errors, actual.worker_errors()); } #[tokio::test] @@ -914,15 +1061,17 @@ mod tests { .plan(QUERY.to_string(), None) .await .unwrap() - .data - .unwrap(); + .into_result() + .unwrap() + .data; let query_2_response = planner .plan(QUERY2.to_string(), None) .await .unwrap() - .data - .unwrap(); + .into_result() + .unwrap() + .data; let all_futures = stream::iter((0..1000).map(|i| { let (query, fut) = if i % 2 == 0 { @@ -938,9 +1087,9 @@ mod tests { .for_each_concurrent(None, |fut| async { let (query, plan_result) = fut.await; if query == QUERY { - assert_eq!(query_1_response, plan_result.data.unwrap()); + assert_eq!(query_1_response, plan_result.into_result().unwrap().data); } else { - assert_eq!(query_2_response, plan_result.data.unwrap()); + assert_eq!(query_2_response, plan_result.into_result().unwrap().data); } }) .await; @@ -973,6 +1122,8 @@ mod tests { locations: vec![Location { line: 4, column: 1 }] }) ], + source: None, + positions: None, }.into() ]; let actual_errors = Planner::::new(CORE_IN_V0_1.to_string()) @@ -1011,6 +1162,8 @@ mod tests { locations: vec![Location { line: 4, column: 9 }] }), ], + source: None, + positions: None, }.into() ]; let actual_errors = @@ -1039,6 +1192,8 @@ mod tests { stack: None, locations: vec![Location { line: 4, column: 9 }] })], + source: None, + positions: None, } .into()]; let actual_errors = @@ -1179,6 +1334,8 @@ feature https://specs.apollo.dev/something-unsupported/v0.1 is for: SECURITY but locations: vec![Location { line: 4, column: 1 }] }) ], + source: None, + positions: None, }.into(); assert_eq!(expected.to_string(), error_to_display.to_string()); @@ -1207,6 +1364,8 @@ feature https://specs.apollo.dev/unsupported-feature/v0.1 is for: EXECUTION but locations: vec![Location { line: 4, column: 9 }] }), ], + source: None, + positions: None, }.into(); assert_eq!(expected.to_string(), error_to_display.to_string()); @@ -1235,6 +1394,8 @@ feature https://specs.apollo.dev/unsupported-feature/v0.1 is for: SECURITY but i stack: None, locations: vec![Location { line: 4, column: 9 }] })], + source: None, + positions: None, } .into(); diff --git a/federation-2/router-bridge/src/worker.rs b/federation-2/router-bridge/src/worker.rs index 8d108a488..060018452 100644 --- a/federation-2/router-bridge/src/worker.rs +++ b/federation-2/router-bridge/src/worker.rs @@ -158,6 +158,7 @@ impl JsWorker { let payload = receiver.await.map_err(|e| { Error::DenoRuntime(format!("request: couldn't receive response: {:?}", e)) })?; + println!("payload {:?}", payload); serde_json::from_value(payload).map_err(|e| Error::ParameterDeserialization { message: format!("deno: couldn't deserialize response : `{:?}`", e),