diff --git a/src/execution/IncrementalPublisher.ts b/src/execution/IncrementalPublisher.ts index 760043c78b..8a3acd5f33 100644 --- a/src/execution/IncrementalPublisher.ts +++ b/src/execution/IncrementalPublisher.ts @@ -8,7 +8,7 @@ import type { GraphQLFormattedError, } from '../error/GraphQLError.js'; -import type { GroupedFieldSet } from './collectFields.js'; +import type { DeferUsage, GroupedFieldSet } from './collectFields.js'; interface IncrementalUpdate> { pending: ReadonlyArray; @@ -786,6 +786,7 @@ export class DeferredGroupedFieldSetRecord { /** @internal */ export class DeferredFragmentRecord { path: ReadonlyArray; + deferUsage: DeferUsage; label: string | undefined; id: string | undefined; children: Set; @@ -795,9 +796,10 @@ export class DeferredFragmentRecord { pendingSent?: boolean; _pending: Set; - constructor(opts: { path: Path | undefined; label: string | undefined }) { + constructor(opts: { path: Path | undefined; deferUsage: DeferUsage }) { this.path = pathToArray(opts.path); - this.label = opts.label; + this.label = opts.deferUsage.label; + this.deferUsage = opts.deferUsage; this.children = new Set(); this.filtered = false; this.deferredGroupedFieldSetRecords = new Set(); diff --git a/src/execution/collectFields.ts b/src/execution/collectFields.ts index 1d0341b4cc..b054604500 100644 --- a/src/execution/collectFields.ts +++ b/src/execution/collectFields.ts @@ -1,7 +1,5 @@ import { AccumulatorMap } from '../jsutils/AccumulatorMap.js'; -import { getBySet } from '../jsutils/getBySet.js'; import { invariant } from '../jsutils/invariant.js'; -import { isSameSet } from '../jsutils/isSameSet.js'; import type { ObjMap } from '../jsutils/ObjMap.js'; import type { @@ -30,36 +28,14 @@ import { getDirectiveValues } from './values.js'; export interface DeferUsage { label: string | undefined; - ancestors: ReadonlyArray; + parent: DeferUsage | undefined; } -export const NON_DEFERRED_TARGET_SET: TargetSet = new Set([undefined]); +export type GroupedFieldSet = Map>; -export type Target = DeferUsage | undefined; -export type TargetSet = ReadonlySet; -export type DeferUsageSet = ReadonlySet; - -export interface FieldDetails { +export interface FieldDetail { node: FieldNode; - target: Target; -} - -export interface FieldGroup { - fields: ReadonlyArray; - targets: TargetSet; -} - -export type GroupedFieldSet = Map; - -export interface GroupedFieldSetDetails { - groupedFieldSet: GroupedFieldSet; - shouldInitiateDefer: boolean; -} - -export interface CollectFieldsResult { - groupedFieldSet: GroupedFieldSet; - newGroupedFieldSetDetails: Map; - newDeferUsages: ReadonlyArray; + deferUsage: DeferUsage | undefined; } interface CollectFieldsContext { @@ -68,9 +44,6 @@ interface CollectFieldsContext { variableValues: { [variable: string]: unknown }; operation: OperationDefinitionNode; runtimeType: GraphQLObjectType; - targetsByKey: Map>; - fieldsByTarget: Map>; - newDeferUsages: Array; visitedFragmentNames: Set; } @@ -89,25 +62,19 @@ export function collectFields( variableValues: { [variable: string]: unknown }, runtimeType: GraphQLObjectType, operation: OperationDefinitionNode, -): CollectFieldsResult { +): GroupedFieldSet { + const groupedFieldSet = new AccumulatorMap(); const context: CollectFieldsContext = { schema, fragments, variableValues, runtimeType, operation, - fieldsByTarget: new Map(), - targetsByKey: new Map(), - newDeferUsages: [], visitedFragmentNames: new Set(), }; - collectFieldsImpl(context, operation.selectionSet); - - return { - ...buildGroupedFieldSets(context.targetsByKey, context.fieldsByTarget), - newDeferUsages: context.newDeferUsages, - }; + collectFieldsImpl(context, operation.selectionSet, groupedFieldSet); + return groupedFieldSet; } /** @@ -127,42 +94,39 @@ export function collectSubfields( variableValues: { [variable: string]: unknown }, operation: OperationDefinitionNode, returnType: GraphQLObjectType, - fieldGroup: FieldGroup, -): CollectFieldsResult { + fieldDetails: ReadonlyArray, +): GroupedFieldSet { const context: CollectFieldsContext = { schema, fragments, variableValues, runtimeType: returnType, operation, - fieldsByTarget: new Map(), - targetsByKey: new Map(), - newDeferUsages: [], visitedFragmentNames: new Set(), }; + const subGroupedFieldSet = new AccumulatorMap(); - for (const fieldDetails of fieldGroup.fields) { - const node = fieldDetails.node; + for (const fieldDetail of fieldDetails) { + const node = fieldDetail.node; if (node.selectionSet) { - collectFieldsImpl(context, node.selectionSet, fieldDetails.target); + collectFieldsImpl( + context, + node.selectionSet, + subGroupedFieldSet, + fieldDetail.deferUsage, + ); } } - return { - ...buildGroupedFieldSets( - context.targetsByKey, - context.fieldsByTarget, - fieldGroup.targets, - ), - newDeferUsages: context.newDeferUsages, - }; + return subGroupedFieldSet; } function collectFieldsImpl( context: CollectFieldsContext, selectionSet: SelectionSetNode, - parentTarget?: Target, - newTarget?: Target, + groupedFieldSet: AccumulatorMap, + parentDeferUsage?: DeferUsage, + deferUsage?: DeferUsage, ): void { const { schema, @@ -170,9 +134,6 @@ function collectFieldsImpl( variableValues, runtimeType, operation, - targetsByKey, - fieldsByTarget, - newDeferUsages, visitedFragmentNames, } = context; @@ -182,20 +143,10 @@ function collectFieldsImpl( if (!shouldIncludeNode(variableValues, selection)) { continue; } - const key = getFieldEntryKey(selection); - const target = newTarget ?? parentTarget; - let keyTargets = targetsByKey.get(key); - if (keyTargets === undefined) { - keyTargets = new Set(); - targetsByKey.set(key, keyTargets); - } - keyTargets.add(target); - let targetFields = fieldsByTarget.get(target); - if (targetFields === undefined) { - targetFields = new AccumulatorMap(); - fieldsByTarget.set(target, targetFields); - } - targetFields.add(key, selection); + groupedFieldSet.add(getFieldEntryKey(selection), { + node: selection, + deferUsage: deferUsage ?? parentDeferUsage, + }); break; } case Kind.INLINE_FRAGMENT: { @@ -206,25 +157,19 @@ function collectFieldsImpl( continue; } - const defer = getDeferValues(operation, variableValues, selection); - - let target: Target; - if (!defer) { - target = newTarget; - } else { - const ancestors = - parentTarget === undefined - ? [parentTarget] - : [parentTarget, ...parentTarget.ancestors]; - target = { ...defer, ancestors }; - newDeferUsages.push(target); - } + const newDeferUsage = getDeferUsage( + operation, + variableValues, + selection, + parentDeferUsage, + ); collectFieldsImpl( context, selection.selectionSet, - parentTarget, - target, + groupedFieldSet, + parentDeferUsage, + newDeferUsage ?? deferUsage, ); break; @@ -232,12 +177,18 @@ function collectFieldsImpl( case Kind.FRAGMENT_SPREAD: { const fragName = selection.name.value; - if (!shouldIncludeNode(variableValues, selection)) { - continue; - } + const newDeferUsage = getDeferUsage( + operation, + variableValues, + selection, + parentDeferUsage, + ); - const defer = getDeferValues(operation, variableValues, selection); - if (visitedFragmentNames.has(fragName) && !defer) { + if ( + !newDeferUsage && + (visitedFragmentNames.has(fragName) || + !shouldIncludeNode(variableValues, selection)) + ) { continue; } @@ -248,21 +199,17 @@ function collectFieldsImpl( ) { continue; } - - let target: Target; - if (!defer) { + if (!newDeferUsage) { visitedFragmentNames.add(fragName); - target = newTarget; - } else { - const ancestors = - parentTarget === undefined - ? [parentTarget] - : [parentTarget, ...parentTarget.ancestors]; - target = { ...defer, ancestors }; - newDeferUsages.push(target); } - collectFieldsImpl(context, fragment.selectionSet, parentTarget, target); + collectFieldsImpl( + context, + fragment.selectionSet, + groupedFieldSet, + parentDeferUsage, + newDeferUsage ?? deferUsage, + ); break; } } @@ -274,11 +221,12 @@ function collectFieldsImpl( * deferred based on the experimental flag, defer directive present and * not disabled by the "if" argument. */ -function getDeferValues( +function getDeferUsage( operation: OperationDefinitionNode, variableValues: { [variable: string]: unknown }, node: FragmentSpreadNode | InlineFragmentNode, -): undefined | { label: string | undefined } { + parentDeferUsage: DeferUsage | undefined, +): DeferUsage | undefined { const defer = getDirectiveValues(GraphQLDeferDirective, node, variableValues); if (!defer) { @@ -296,6 +244,7 @@ function getDeferValues( return { label: typeof defer.label === 'string' ? defer.label : undefined, + parent: parentDeferUsage, }; } @@ -351,143 +300,3 @@ function doesFragmentConditionMatch( function getFieldEntryKey(node: FieldNode): string { return node.alias ? node.alias.value : node.name.value; } - -function buildGroupedFieldSets( - targetsByKey: Map>, - fieldsByTarget: Map>>, - parentTargets = NON_DEFERRED_TARGET_SET, -): { - groupedFieldSet: GroupedFieldSet; - newGroupedFieldSetDetails: Map; -} { - const { parentTargetKeys, targetSetDetailsMap } = getTargetSetDetails( - targetsByKey, - parentTargets, - ); - - const groupedFieldSet = - parentTargetKeys.size > 0 - ? getOrderedGroupedFieldSet( - parentTargetKeys, - parentTargets, - targetsByKey, - fieldsByTarget, - ) - : new Map(); - - const newGroupedFieldSetDetails = new Map< - DeferUsageSet, - GroupedFieldSetDetails - >(); - - for (const [maskingTargets, targetSetDetails] of targetSetDetailsMap) { - const { keys, shouldInitiateDefer } = targetSetDetails; - - const newGroupedFieldSet = getOrderedGroupedFieldSet( - keys, - maskingTargets, - targetsByKey, - fieldsByTarget, - ); - - // All TargetSets that causes new grouped field sets consist only of DeferUsages - // and have shouldInitiateDefer defined - newGroupedFieldSetDetails.set(maskingTargets as DeferUsageSet, { - groupedFieldSet: newGroupedFieldSet, - shouldInitiateDefer, - }); - } - - return { - groupedFieldSet, - newGroupedFieldSetDetails, - }; -} - -interface TargetSetDetails { - keys: Set; - shouldInitiateDefer: boolean; -} - -function getTargetSetDetails( - targetsByKey: Map>, - parentTargets: TargetSet, -): { - parentTargetKeys: ReadonlySet; - targetSetDetailsMap: Map; -} { - const parentTargetKeys = new Set(); - const targetSetDetailsMap = new Map(); - - for (const [responseKey, targets] of targetsByKey) { - const maskingTargetList: Array = []; - for (const target of targets) { - if ( - target === undefined || - target.ancestors.every((ancestor) => !targets.has(ancestor)) - ) { - maskingTargetList.push(target); - } - } - - const maskingTargets: TargetSet = new Set(maskingTargetList); - if (isSameSet(maskingTargets, parentTargets)) { - parentTargetKeys.add(responseKey); - continue; - } - - let targetSetDetails = getBySet(targetSetDetailsMap, maskingTargets); - if (targetSetDetails === undefined) { - targetSetDetails = { - keys: new Set(), - shouldInitiateDefer: maskingTargetList.some( - (deferUsage) => !parentTargets.has(deferUsage), - ), - }; - targetSetDetailsMap.set(maskingTargets, targetSetDetails); - } - targetSetDetails.keys.add(responseKey); - } - - return { - parentTargetKeys, - targetSetDetailsMap, - }; -} - -function getOrderedGroupedFieldSet( - keys: ReadonlySet, - maskingTargets: TargetSet, - targetsByKey: Map>, - fieldsByTarget: Map>>, -): GroupedFieldSet { - const groupedFieldSet = new Map< - string, - { fields: Array; targets: TargetSet } - >(); - - const firstTarget = maskingTargets.values().next().value as Target; - // eslint-disable-next-line @typescript-eslint/no-non-null-assertion - const firstFields = fieldsByTarget.get(firstTarget)!; - for (const [key] of firstFields) { - if (keys.has(key)) { - let fieldGroup = groupedFieldSet.get(key); - if (fieldGroup === undefined) { - fieldGroup = { fields: [], targets: maskingTargets }; - groupedFieldSet.set(key, fieldGroup); - } - // eslint-disable-next-line @typescript-eslint/no-non-null-assertion - for (const target of targetsByKey.get(key)!) { - // eslint-disable-next-line @typescript-eslint/no-non-null-assertion - const fieldsForTarget = fieldsByTarget.get(target)!; - // eslint-disable-next-line @typescript-eslint/no-non-null-assertion - const nodes = fieldsForTarget.get(key)!; - // the following line is an optional minor optimization - fieldsForTarget.delete(key); - fieldGroup.fields.push(...nodes.map((node) => ({ node, target }))); - } - } - } - - return groupedFieldSet; -} diff --git a/src/execution/execute.ts b/src/execution/execute.ts index a19a51a217..da697c0728 100644 --- a/src/execution/execute.ts +++ b/src/execution/execute.ts @@ -4,6 +4,7 @@ import { isAsyncIterable } from '../jsutils/isAsyncIterable.js'; import { isIterableObject } from '../jsutils/isIterableObject.js'; import { isObjectLike } from '../jsutils/isObjectLike.js'; import { isPromise } from '../jsutils/isPromise.js'; +import { isSameSet } from '../jsutils/isSameSet.js'; import type { Maybe } from '../jsutils/Maybe.js'; import { memoize3 } from '../jsutils/memoize3.js'; import type { ObjMap } from '../jsutils/ObjMap.js'; @@ -12,6 +13,8 @@ import { addPath, pathToArray } from '../jsutils/Path.js'; import { promiseForObject } from '../jsutils/promiseForObject.js'; import type { PromiseOrValue } from '../jsutils/PromiseOrValue.js'; import { promiseReduce } from '../jsutils/promiseReduce.js'; +import { setIsDisjointFrom } from '../jsutils/setIsDisjointFrom.js'; +import { setIsSubsetOf } from '../jsutils/setIsSubsetOf.js'; import { GraphQLError } from '../error/GraphQLError.js'; import { locatedError } from '../error/locatedError.js'; @@ -49,15 +52,12 @@ import { assertValidSchema } from '../type/validate.js'; import type { DeferUsage, - DeferUsageSet, - FieldGroup, + FieldDetail, GroupedFieldSet, - GroupedFieldSetDetails, } from './collectFields.js'; import { collectFields, collectSubfields as _collectSubfields, - NON_DEFERRED_TARGET_SET, } from './collectFields.js'; import type { ExecutionResult, @@ -92,7 +92,7 @@ const collectSubfields = memoize3( ( exeContext: ExecutionContext, returnType: GraphQLObjectType, - fieldGroup: FieldGroup, + fieldDetails: ReadonlyArray, ) => _collectSubfields( exeContext.schema, @@ -100,7 +100,7 @@ const collectSubfields = memoize3( exeContext.variableValues, exeContext.operation, returnType, - fieldGroup, + fieldDetails, ), ); @@ -158,7 +158,7 @@ export interface ExecutionArgs { export interface StreamUsage { label: string | undefined; initialCount: number; - fieldGroup: FieldGroup; + fieldDetails: ReadonlyArray; } const UNEXPECTED_EXPERIMENTAL_DIRECTIVES = @@ -389,14 +389,8 @@ function executeOperation( exeContext: ExecutionContext, initialResultRecord: InitialResultRecord, ): PromiseOrValue> { - const { - operation, - schema, - fragments, - variableValues, - rootValue, - incrementalPublisher, - } = exeContext; + const { operation, schema, fragments, variableValues, rootValue } = + exeContext; const rootType = schema.getRootType(operation.operation); if (rootType == null) { throw new GraphQLError( @@ -405,23 +399,23 @@ function executeOperation( ); } - const { groupedFieldSet, newGroupedFieldSetDetails, newDeferUsages } = - collectFields(schema, fragments, variableValues, rootType, operation); - - const newDeferMap = addNewDeferredFragments( - incrementalPublisher, - newDeferUsages, - initialResultRecord, + const groupedFieldSet = collectFields( + schema, + fragments, + variableValues, + rootType, + operation, ); const path = undefined; - const newDeferredGroupedFieldSetRecords = addNewDeferredGroupedFieldSets( - incrementalPublisher, - newGroupedFieldSetDetails, - newDeferMap, - path, - ); + const { currentGroup, deferredGroupedFieldSetRecords, newDeferMap } = + getGroupedFieldSetsByTarget( + exeContext, + groupedFieldSet, + path, + initialResultRecord, + ); let result; switch (operation.operation) { @@ -431,7 +425,7 @@ function executeOperation( rootType, rootValue, path, - groupedFieldSet, + currentGroup, initialResultRecord, newDeferMap, ); @@ -442,7 +436,7 @@ function executeOperation( rootType, rootValue, path, - groupedFieldSet, + currentGroup, initialResultRecord, newDeferMap, ); @@ -455,7 +449,7 @@ function executeOperation( rootType, rootValue, path, - groupedFieldSet, + currentGroup, initialResultRecord, newDeferMap, ); @@ -466,7 +460,7 @@ function executeOperation( rootType, rootValue, path, - newDeferredGroupedFieldSetRecords, + deferredGroupedFieldSetRecords, newDeferMap, ); @@ -532,13 +526,13 @@ function executeFields( let containsPromise = false; try { - for (const [responseName, fieldGroup] of groupedFieldSet) { + for (const [responseName, fieldDetails] of groupedFieldSet) { const fieldPath = addPath(path, responseName, parentType.name); const result = executeField( exeContext, parentType, sourceValue, - fieldGroup, + fieldDetails, fieldPath, incrementalDataRecord, deferMap, @@ -572,8 +566,10 @@ function executeFields( return promiseForObject(results); } -function toNodes(fieldGroup: FieldGroup): ReadonlyArray { - return fieldGroup.fields.map((fieldDetails) => fieldDetails.node); +function toNodes( + fieldDetails: ReadonlyArray, +): ReadonlyArray { + return fieldDetails.map((fieldDetail) => fieldDetail.node); } /** @@ -586,12 +582,12 @@ function executeField( exeContext: ExecutionContext, parentType: GraphQLObjectType, source: unknown, - fieldGroup: FieldGroup, + fieldDetails: ReadonlyArray, path: Path, incrementalDataRecord: IncrementalDataRecord, deferMap: ReadonlyMap, ): PromiseOrValue { - const fieldName = fieldGroup.fields[0].node.name.value; + const fieldName = fieldDetails[0].node.name.value; const fieldDef = exeContext.schema.getField(parentType, fieldName); if (!fieldDef) { return; @@ -603,7 +599,7 @@ function executeField( const info = buildResolveInfo( exeContext, fieldDef, - fieldGroup, + fieldDetails, parentType, path, ); @@ -615,7 +611,7 @@ function executeField( // TODO: find a way to memoize, in case this field is within a List type. const args = getArgumentValues( fieldDef, - fieldGroup.fields[0].node, + fieldDetails[0].node, exeContext.variableValues, ); @@ -630,7 +626,7 @@ function executeField( return completePromisedValue( exeContext, returnType, - fieldGroup, + fieldDetails, info, path, result, @@ -642,7 +638,7 @@ function executeField( const completed = completeValue( exeContext, returnType, - fieldGroup, + fieldDetails, info, path, result, @@ -658,7 +654,7 @@ function executeField( rawError, exeContext, returnType, - fieldGroup, + fieldDetails, path, incrementalDataRecord, ); @@ -672,7 +668,7 @@ function executeField( rawError, exeContext, returnType, - fieldGroup, + fieldDetails, path, incrementalDataRecord, ); @@ -688,7 +684,7 @@ function executeField( export function buildResolveInfo( exeContext: ExecutionContext, fieldDef: GraphQLField, - fieldGroup: FieldGroup, + fieldDetails: ReadonlyArray, parentType: GraphQLObjectType, path: Path, ): GraphQLResolveInfo { @@ -696,7 +692,7 @@ export function buildResolveInfo( // information about the current execution state. return { fieldName: fieldDef.name, - fieldNodes: toNodes(fieldGroup), + fieldNodes: toNodes(fieldDetails), returnType: fieldDef.type, parentType, path, @@ -712,11 +708,15 @@ function handleFieldError( rawError: unknown, exeContext: ExecutionContext, returnType: GraphQLOutputType, - fieldGroup: FieldGroup, + fieldDetails: ReadonlyArray, path: Path, incrementalDataRecord: IncrementalDataRecord, ): void { - const error = locatedError(rawError, toNodes(fieldGroup), pathToArray(path)); + const error = locatedError( + rawError, + toNodes(fieldDetails), + pathToArray(path), + ); // If the field type is non-nullable, then it is resolved without any // protection from errors, however it still properly locates the error. @@ -753,7 +753,7 @@ function handleFieldError( function completeValue( exeContext: ExecutionContext, returnType: GraphQLOutputType, - fieldGroup: FieldGroup, + fieldDetails: ReadonlyArray, info: GraphQLResolveInfo, path: Path, result: unknown, @@ -771,7 +771,7 @@ function completeValue( const completed = completeValue( exeContext, returnType.ofType, - fieldGroup, + fieldDetails, info, path, result, @@ -796,7 +796,7 @@ function completeValue( return completeListValue( exeContext, returnType, - fieldGroup, + fieldDetails, info, path, result, @@ -817,7 +817,7 @@ function completeValue( return completeAbstractValue( exeContext, returnType, - fieldGroup, + fieldDetails, info, path, result, @@ -831,7 +831,7 @@ function completeValue( return completeObjectValue( exeContext, returnType, - fieldGroup, + fieldDetails, info, path, result, @@ -850,7 +850,7 @@ function completeValue( async function completePromisedValue( exeContext: ExecutionContext, returnType: GraphQLOutputType, - fieldGroup: FieldGroup, + fieldDetails: ReadonlyArray, info: GraphQLResolveInfo, path: Path, result: Promise, @@ -862,7 +862,7 @@ async function completePromisedValue( let completed = completeValue( exeContext, returnType, - fieldGroup, + fieldDetails, info, path, resolved, @@ -878,7 +878,7 @@ async function completePromisedValue( rawError, exeContext, returnType, - fieldGroup, + fieldDetails, path, incrementalDataRecord, ); @@ -894,7 +894,7 @@ async function completePromisedValue( */ function getStreamUsage( exeContext: ExecutionContext, - fieldGroup: FieldGroup, + fieldDetails: ReadonlyArray, path: Path, ): StreamUsage | undefined { // do not stream inner lists of multi-dimensional lists @@ -905,10 +905,10 @@ function getStreamUsage( // TODO: add test for this case (a streamed list nested under a list). /* c8 ignore next 7 */ if ( - (fieldGroup as unknown as { _streamUsage: StreamUsage })._streamUsage !== + (fieldDetails as unknown as { _streamUsage: StreamUsage })._streamUsage !== undefined ) { - return (fieldGroup as unknown as { _streamUsage: StreamUsage }) + return (fieldDetails as unknown as { _streamUsage: StreamUsage }) ._streamUsage; } @@ -916,7 +916,7 @@ function getStreamUsage( // safe to only check the first fieldNode for the stream directive const stream = getDirectiveValues( GraphQLStreamDirective, - fieldGroup.fields[0].node, + fieldDetails[0].node, exeContext.variableValues, ); @@ -943,21 +943,20 @@ function getStreamUsage( '`@stream` directive not supported on subscription operations. Disable `@stream` by setting the `if` argument to `false`.', ); - const streamedFieldGroup: FieldGroup = { - fields: fieldGroup.fields.map((fieldDetails) => ({ - node: fieldDetails.node, - target: undefined, - })), - targets: NON_DEFERRED_TARGET_SET, - }; + const streamedFieldDetails: Array = fieldDetails.map( + (fieldDetail) => ({ + node: fieldDetail.node, + deferUsage: undefined, + }), + ); const streamUsage = { initialCount: stream.initialCount, label: typeof stream.label === 'string' ? stream.label : undefined, - fieldGroup: streamedFieldGroup, + fieldDetails: streamedFieldDetails, }; - (fieldGroup as unknown as { _streamUsage: StreamUsage })._streamUsage = + (fieldDetails as unknown as { _streamUsage: StreamUsage })._streamUsage = streamUsage; return streamUsage; @@ -969,14 +968,14 @@ function getStreamUsage( async function completeAsyncIteratorValue( exeContext: ExecutionContext, itemType: GraphQLOutputType, - fieldGroup: FieldGroup, + fieldDetails: ReadonlyArray, info: GraphQLResolveInfo, path: Path, asyncIterator: AsyncIterator, incrementalDataRecord: IncrementalDataRecord, deferMap: ReadonlyMap, ): Promise> { - const streamUsage = getStreamUsage(exeContext, fieldGroup, path); + const streamUsage = getStreamUsage(exeContext, fieldDetails, path); let containsPromise = false; const completedResults: Array = []; let index = 0; @@ -997,7 +996,7 @@ async function completeAsyncIteratorValue( index, asyncIterator, exeContext, - streamUsage.fieldGroup, + streamUsage.fieldDetails, info, itemType, path, @@ -1016,7 +1015,7 @@ async function completeAsyncIteratorValue( break; } } catch (rawError) { - throw locatedError(rawError, toNodes(fieldGroup), pathToArray(path)); + throw locatedError(rawError, toNodes(fieldDetails), pathToArray(path)); } if ( @@ -1025,7 +1024,7 @@ async function completeAsyncIteratorValue( completedResults, exeContext, itemType, - fieldGroup, + fieldDetails, info, itemPath, incrementalDataRecord, @@ -1046,7 +1045,7 @@ async function completeAsyncIteratorValue( function completeListValue( exeContext: ExecutionContext, returnType: GraphQLList, - fieldGroup: FieldGroup, + fieldDetails: ReadonlyArray, info: GraphQLResolveInfo, path: Path, result: unknown, @@ -1061,7 +1060,7 @@ function completeListValue( return completeAsyncIteratorValue( exeContext, itemType, - fieldGroup, + fieldDetails, info, path, asyncIterator, @@ -1076,7 +1075,7 @@ function completeListValue( ); } - const streamUsage = getStreamUsage(exeContext, fieldGroup, path); + const streamUsage = getStreamUsage(exeContext, fieldDetails, path); // This is specified as a simple map, however we're optimizing the path // where the list contains no Promises by avoiding creating another Promise. @@ -1099,7 +1098,7 @@ function completeListValue( itemPath, item, exeContext, - streamUsage.fieldGroup, + streamUsage.fieldDetails, info, itemType, currentParents, @@ -1115,7 +1114,7 @@ function completeListValue( completedResults, exeContext, itemType, - fieldGroup, + fieldDetails, info, itemPath, incrementalDataRecord, @@ -1147,7 +1146,7 @@ function completeListItemValue( completedResults: Array, exeContext: ExecutionContext, itemType: GraphQLOutputType, - fieldGroup: FieldGroup, + fieldDetails: ReadonlyArray, info: GraphQLResolveInfo, itemPath: Path, incrementalDataRecord: IncrementalDataRecord, @@ -1158,7 +1157,7 @@ function completeListItemValue( completePromisedValue( exeContext, itemType, - fieldGroup, + fieldDetails, info, itemPath, item, @@ -1174,7 +1173,7 @@ function completeListItemValue( const completedItem = completeValue( exeContext, itemType, - fieldGroup, + fieldDetails, info, itemPath, item, @@ -1191,7 +1190,7 @@ function completeListItemValue( rawError, exeContext, itemType, - fieldGroup, + fieldDetails, itemPath, incrementalDataRecord, ); @@ -1212,7 +1211,7 @@ function completeListItemValue( rawError, exeContext, itemType, - fieldGroup, + fieldDetails, itemPath, incrementalDataRecord, ); @@ -1248,7 +1247,7 @@ function completeLeafValue( function completeAbstractValue( exeContext: ExecutionContext, returnType: GraphQLAbstractType, - fieldGroup: FieldGroup, + fieldDetails: ReadonlyArray, info: GraphQLResolveInfo, path: Path, result: unknown, @@ -1267,11 +1266,11 @@ function completeAbstractValue( resolvedRuntimeType, exeContext, returnType, - fieldGroup, + fieldDetails, info, result, ), - fieldGroup, + fieldDetails, info, path, result, @@ -1287,11 +1286,11 @@ function completeAbstractValue( runtimeType, exeContext, returnType, - fieldGroup, + fieldDetails, info, result, ), - fieldGroup, + fieldDetails, info, path, result, @@ -1304,14 +1303,14 @@ function ensureValidRuntimeType( runtimeTypeName: unknown, exeContext: ExecutionContext, returnType: GraphQLAbstractType, - fieldGroup: FieldGroup, + fieldDetails: ReadonlyArray, info: GraphQLResolveInfo, result: unknown, ): GraphQLObjectType { if (runtimeTypeName == null) { throw new GraphQLError( `Abstract type "${returnType.name}" must resolve to an Object type at runtime for field "${info.parentType.name}.${info.fieldName}". Either the "${returnType.name}" type should provide a "resolveType" function or each possible type should provide an "isTypeOf" function.`, - { nodes: toNodes(fieldGroup) }, + { nodes: toNodes(fieldDetails) }, ); } @@ -1334,21 +1333,21 @@ function ensureValidRuntimeType( if (runtimeType == null) { throw new GraphQLError( `Abstract type "${returnType.name}" was resolved to a type "${runtimeTypeName}" that does not exist inside the schema.`, - { nodes: toNodes(fieldGroup) }, + { nodes: toNodes(fieldDetails) }, ); } if (!isObjectType(runtimeType)) { throw new GraphQLError( `Abstract type "${returnType.name}" was resolved to a non-object type "${runtimeTypeName}".`, - { nodes: toNodes(fieldGroup) }, + { nodes: toNodes(fieldDetails) }, ); } if (!exeContext.schema.isSubType(returnType, runtimeType)) { throw new GraphQLError( `Runtime Object type "${runtimeType.name}" is not a possible type for "${returnType.name}".`, - { nodes: toNodes(fieldGroup) }, + { nodes: toNodes(fieldDetails) }, ); } @@ -1361,7 +1360,7 @@ function ensureValidRuntimeType( function completeObjectValue( exeContext: ExecutionContext, returnType: GraphQLObjectType, - fieldGroup: FieldGroup, + fieldDetails: ReadonlyArray, info: GraphQLResolveInfo, path: Path, result: unknown, @@ -1377,12 +1376,12 @@ function completeObjectValue( if (isPromise(isTypeOf)) { return isTypeOf.then((resolvedIsTypeOf) => { if (!resolvedIsTypeOf) { - throw invalidReturnTypeError(returnType, result, fieldGroup); + throw invalidReturnTypeError(returnType, result, fieldDetails); } return collectAndExecuteSubfields( exeContext, returnType, - fieldGroup, + fieldDetails, path, result, incrementalDataRecord, @@ -1392,14 +1391,14 @@ function completeObjectValue( } if (!isTypeOf) { - throw invalidReturnTypeError(returnType, result, fieldGroup); + throw invalidReturnTypeError(returnType, result, fieldDetails); } } return collectAndExecuteSubfields( exeContext, returnType, - fieldGroup, + fieldDetails, path, result, incrementalDataRecord, @@ -1410,11 +1409,11 @@ function completeObjectValue( function invalidReturnTypeError( returnType: GraphQLObjectType, result: unknown, - fieldGroup: FieldGroup, + fieldDetails: ReadonlyArray, ): GraphQLError { return new GraphQLError( `Expected value of type "${returnType.name}" but got: ${inspect(result)}.`, - { nodes: toNodes(fieldGroup) }, + { nodes: toNodes(fieldDetails) }, ); } @@ -1455,8 +1454,7 @@ function addNewDeferredFragments( // For each new deferUsage object: for (const newDeferUsage of newDeferUsages) { - // DeferUsage objects track their parent targets; the immediate parent is always the first member of this list. - const parentTarget = newDeferUsage.ancestors[0]; + const parentTarget = newDeferUsage.parent; // If the parent target is defined, the parent target is a DeferUsage object and // the parent result record is the DeferredFragmentRecord corresponding to that DeferUsage. @@ -1468,20 +1466,22 @@ function addNewDeferredFragments( ? (incrementalDataRecord as InitialResultRecord | StreamItemsRecord) : deferredFragmentRecordFromDeferUsage(parentTarget, newDeferMap); - // Instantiate the new record. - const deferredFragmentRecord = new DeferredFragmentRecord({ - path, - label: newDeferUsage.label, - }); + if (!newDeferMap.has(newDeferUsage)) { + // Instantiate the new record. + const deferredFragmentRecord = new DeferredFragmentRecord({ + path, + deferUsage: newDeferUsage, + }); - // Report the new record to the Incremental Publisher. - incrementalPublisher.reportNewDeferFragmentRecord( - deferredFragmentRecord, - parent, - ); + // Report the new record to the Incremental Publisher. + incrementalPublisher.reportNewDeferFragmentRecord( + deferredFragmentRecord, + parent, + ); - // Update the map. - newDeferMap.set(newDeferUsage, deferredFragmentRecord); + // Update the map. + newDeferMap.set(newDeferUsage, deferredFragmentRecord); + } } return newDeferMap; @@ -1491,87 +1491,134 @@ function deferredFragmentRecordFromDeferUsage( deferUsage: DeferUsage, deferMap: ReadonlyMap, ): DeferredFragmentRecord { - // eslint-disable-next-line @typescript-eslint/no-non-null-assertion - return deferMap.get(deferUsage)!; + const deferredFragmentRecord = deferMap.get(deferUsage); + invariant( + deferredFragmentRecord != null, + 'deferredFragmentRecord must be in deferMap', + ); + return deferredFragmentRecord; } -function addNewDeferredGroupedFieldSets( - incrementalPublisher: IncrementalPublisher, - newGroupedFieldSetDetails: Map, - deferMap: ReadonlyMap, - path?: Path | undefined, -): ReadonlyArray { - const newDeferredGroupedFieldSetRecords: Array = - []; - - for (const [ - newGroupedFieldSetDeferUsages, - { groupedFieldSet, shouldInitiateDefer }, - ] of newGroupedFieldSetDetails) { - const deferredFragmentRecords = getDeferredFragmentRecords( - newGroupedFieldSetDeferUsages, - deferMap, - ); +function getGroupedFieldSetsByTarget( + exeContext: ExecutionContext, + groupedFieldSet: GroupedFieldSet, + path: Path | undefined, + incrementalDataRecord: IncrementalDataRecord, + deferMap?: ReadonlyMap, +): { + currentGroup: GroupedFieldSet; + deferredGroupedFieldSetRecords: Array; + newDeferMap: ReadonlyMap; +} { + const currentGroup = new Map>(); + const newDeferUsages = new Set(); + const deferredGroups: Array<{ + deferUsageSet: Set; + shouldInitiateDefer: boolean; + groupedFieldSet: GroupedFieldSet; + }> = []; + + for (const [key, fieldDetails] of groupedFieldSet) { + const deferUsageSet = new Set(); + let everyNodeIsDeferred = true; + for (const fieldDetail of fieldDetails) { + if (fieldDetail.deferUsage != null) { + deferUsageSet.add(fieldDetail.deferUsage); + newDeferUsages.add(fieldDetail.deferUsage); + } else { + everyNodeIsDeferred = false; + } + } + const parentDeferUsageSet: Set = + 'deferredFragmentRecords' in incrementalDataRecord + ? new Set( + incrementalDataRecord.deferredFragmentRecords.map( + (d) => d.deferUsage, + ), + ) + : new Set(); + if ( + !everyNodeIsDeferred || + (parentDeferUsageSet.size !== 0 && + setIsSubsetOf(parentDeferUsageSet, deferUsageSet)) + ) { + currentGroup.set(key, fieldDetails); + } else { + let deferredGroup = deferredGroups.find((group) => + isSameSet(group.deferUsageSet, deferUsageSet), + ); + if (!deferredGroup) { + deferredGroup = { + deferUsageSet, + shouldInitiateDefer: setIsDisjointFrom( + deferUsageSet, + parentDeferUsageSet, + ), + groupedFieldSet: new Map>(), + }; + deferredGroups.push(deferredGroup); + } + deferredGroup.groupedFieldSet.set(key, fieldDetails); + } + } + + const deferredGroupedFieldSetRecords = []; + const newDeferMap = addNewDeferredFragments( + exeContext.incrementalPublisher, + Array.from(newDeferUsages), + incrementalDataRecord, + deferMap, + path, + ); + for (const deferredGroup of deferredGroups) { const deferredGroupedFieldSetRecord = new DeferredGroupedFieldSetRecord({ path, - deferredFragmentRecords, - groupedFieldSet, - shouldInitiateDefer, + deferredFragmentRecords: Array.from(deferredGroup.deferUsageSet).map( + (d) => deferredFragmentRecordFromDeferUsage(d, newDeferMap), + ), + shouldInitiateDefer: deferredGroup.shouldInitiateDefer, + groupedFieldSet: deferredGroup.groupedFieldSet, }); - incrementalPublisher.reportNewDeferredGroupedFieldSetRecord( + exeContext.incrementalPublisher.reportNewDeferredGroupedFieldSetRecord( deferredGroupedFieldSetRecord, ); - newDeferredGroupedFieldSetRecords.push(deferredGroupedFieldSetRecord); + deferredGroupedFieldSetRecords.push(deferredGroupedFieldSetRecord); } - return newDeferredGroupedFieldSetRecords; -} - -function getDeferredFragmentRecords( - deferUsages: DeferUsageSet, - deferMap: ReadonlyMap, -): ReadonlyArray { - return Array.from(deferUsages).map((deferUsage) => - deferredFragmentRecordFromDeferUsage(deferUsage, deferMap), - ); + return { currentGroup, deferredGroupedFieldSetRecords, newDeferMap }; } function collectAndExecuteSubfields( exeContext: ExecutionContext, returnType: GraphQLObjectType, - fieldGroup: FieldGroup, + fieldDetails: ReadonlyArray, path: Path, result: unknown, incrementalDataRecord: IncrementalDataRecord, deferMap: ReadonlyMap, ): PromiseOrValue> { // Collect sub-fields to execute to complete this value. - const { groupedFieldSet, newGroupedFieldSetDetails, newDeferUsages } = - collectSubfields(exeContext, returnType, fieldGroup); - - const incrementalPublisher = exeContext.incrementalPublisher; - - const newDeferMap = addNewDeferredFragments( - incrementalPublisher, - newDeferUsages, - incrementalDataRecord, - deferMap, - path, + const groupedFieldSet = collectSubfields( + exeContext, + returnType, + fieldDetails, ); - const newDeferredGroupedFieldSetRecords = addNewDeferredGroupedFieldSets( - incrementalPublisher, - newGroupedFieldSetDetails, - newDeferMap, - path, - ); + const { currentGroup, deferredGroupedFieldSetRecords, newDeferMap } = + getGroupedFieldSetsByTarget( + exeContext, + groupedFieldSet, + path, + incrementalDataRecord, + deferMap, + ); const subFields = executeFields( exeContext, returnType, result, path, - groupedFieldSet, + currentGroup, incrementalDataRecord, newDeferMap, ); @@ -1581,7 +1628,7 @@ function collectAndExecuteSubfields( returnType, result, path, - newDeferredGroupedFieldSetRecords, + deferredGroupedFieldSetRecords, newDeferMap, ); @@ -1801,7 +1848,7 @@ function executeSubscription( ); } - const { groupedFieldSet } = collectFields( + const groupedFieldSet = collectFields( schema, fragments, variableValues, @@ -1811,16 +1858,16 @@ function executeSubscription( const firstRootField = groupedFieldSet.entries().next().value as [ string, - FieldGroup, + ReadonlyArray, ]; - const [responseName, fieldGroup] = firstRootField; - const fieldName = fieldGroup.fields[0].node.name.value; + const [responseName, fieldDetails] = firstRootField; + const fieldName = fieldDetails[0].node.name.value; const fieldDef = schema.getField(rootType, fieldName); if (!fieldDef) { throw new GraphQLError( `The subscription field "${fieldName}" is not defined.`, - { nodes: toNodes(fieldGroup) }, + { nodes: toNodes(fieldDetails) }, ); } @@ -1828,7 +1875,7 @@ function executeSubscription( const info = buildResolveInfo( exeContext, fieldDef, - fieldGroup, + fieldDetails, rootType, path, ); @@ -1841,7 +1888,7 @@ function executeSubscription( // variables scope to fulfill any variable references. const args = getArgumentValues( fieldDef, - fieldGroup.fields[0].node, + fieldDetails[0].node, variableValues, ); @@ -1857,13 +1904,13 @@ function executeSubscription( if (isPromise(result)) { return result.then(assertEventStream).then(undefined, (error) => { - throw locatedError(error, toNodes(fieldGroup), pathToArray(path)); + throw locatedError(error, toNodes(fieldDetails), pathToArray(path)); }); } return assertEventStream(result); } catch (error) { - throw locatedError(error, toNodes(fieldGroup), pathToArray(path)); + throw locatedError(error, toNodes(fieldDetails), pathToArray(path)); } } @@ -1970,7 +2017,7 @@ function executeStreamField( itemPath: Path, item: PromiseOrValue, exeContext: ExecutionContext, - fieldGroup: FieldGroup, + fieldDetails: ReadonlyArray, info: GraphQLResolveInfo, itemType: GraphQLOutputType, incrementalDataRecord: IncrementalDataRecord, @@ -1990,7 +2037,7 @@ function executeStreamField( completePromisedValue( exeContext, itemType, - fieldGroup, + fieldDetails, info, itemPath, item, @@ -2019,7 +2066,7 @@ function executeStreamField( completedItem = completeValue( exeContext, itemType, - fieldGroup, + fieldDetails, info, itemPath, item, @@ -2031,7 +2078,7 @@ function executeStreamField( rawError, exeContext, itemType, - fieldGroup, + fieldDetails, itemPath, streamItemsRecord, ); @@ -2051,7 +2098,7 @@ function executeStreamField( rawError, exeContext, itemType, - fieldGroup, + fieldDetails, itemPath, streamItemsRecord, ); @@ -2084,7 +2131,7 @@ function executeStreamField( async function executeStreamAsyncIteratorItem( asyncIterator: AsyncIterator, exeContext: ExecutionContext, - fieldGroup: FieldGroup, + fieldDetails: ReadonlyArray, info: GraphQLResolveInfo, itemType: GraphQLOutputType, streamItemsRecord: StreamItemsRecord, @@ -2106,7 +2153,7 @@ async function executeStreamAsyncIteratorItem( } catch (rawError) { throw locatedError( rawError, - toNodes(fieldGroup), + toNodes(fieldDetails), streamItemsRecord.streamRecord.path, ); } @@ -2115,7 +2162,7 @@ async function executeStreamAsyncIteratorItem( completedItem = completeValue( exeContext, itemType, - fieldGroup, + fieldDetails, info, itemPath, item, @@ -2129,7 +2176,7 @@ async function executeStreamAsyncIteratorItem( rawError, exeContext, itemType, - fieldGroup, + fieldDetails, itemPath, streamItemsRecord, ); @@ -2143,7 +2190,7 @@ async function executeStreamAsyncIteratorItem( rawError, exeContext, itemType, - fieldGroup, + fieldDetails, itemPath, streamItemsRecord, ); @@ -2156,7 +2203,7 @@ async function executeStreamAsyncIterator( initialIndex: number, asyncIterator: AsyncIterator, exeContext: ExecutionContext, - fieldGroup: FieldGroup, + fieldDetails: ReadonlyArray, info: GraphQLResolveInfo, itemType: GraphQLOutputType, path: Path, @@ -2184,7 +2231,7 @@ async function executeStreamAsyncIterator( iteration = await executeStreamAsyncIteratorItem( asyncIterator, exeContext, - fieldGroup, + fieldDetails, info, itemType, streamItemsRecord, diff --git a/src/jsutils/__tests__/setIsDisjointFrom-test.ts b/src/jsutils/__tests__/setIsDisjointFrom-test.ts new file mode 100644 index 0000000000..8fe5a733b0 --- /dev/null +++ b/src/jsutils/__tests__/setIsDisjointFrom-test.ts @@ -0,0 +1,30 @@ +import { expect } from 'chai'; +import { describe, it } from 'mocha'; + +import { setIsDisjointFrom } from '../setIsDisjointFrom.js'; + +describe('setIsDisjointFrom', () => { + it('setA is smaller and disjointed', () => { + const setA = new Set(['C']); + const setB = new Set(['A', 'B']); + expect(setIsDisjointFrom(setA, setB)).to.equal(true); + }); + + it('setA is smaller and not disjointed', () => { + const setA = new Set(['B']); + const setB = new Set(['A', 'B']); + expect(setIsDisjointFrom(setA, setB)).to.equal(false); + }); + + it('setA is larger and disjointed', () => { + const setA = new Set(['C', 'D', 'E']); + const setB = new Set(['A', 'B']); + expect(setIsDisjointFrom(setA, setB)).to.equal(false); + }); + + it('setA is larger and not disjointed', () => { + const setA = new Set(['C', 'D', 'B']); + const setB = new Set(['A', 'B']); + expect(setIsDisjointFrom(setA, setB)).to.equal(false); + }); +}); diff --git a/src/jsutils/__tests__/setIsSubsetOf-test.ts b/src/jsutils/__tests__/setIsSubsetOf-test.ts new file mode 100644 index 0000000000..519ecc0a5d --- /dev/null +++ b/src/jsutils/__tests__/setIsSubsetOf-test.ts @@ -0,0 +1,24 @@ +import { expect } from 'chai'; +import { describe, it } from 'mocha'; + +import { setIsSubsetOf } from '../setIsSubsetOf.js'; + +describe('setIsSubsetOf', () => { + it('setA is larger', () => { + const setA = new Set(['A', 'B']); + const setB = new Set(['A']); + expect(setIsSubsetOf(setA, setB)).to.equal(false); + }); + + it('setA is smaller and a subset', () => { + const setA = new Set(['A']); + const setB = new Set(['A', 'B']); + expect(setIsSubsetOf(setA, setB)).to.equal(true); + }); + + it('setA is smaller and not a subset', () => { + const setA = new Set(['C']); + const setB = new Set(['A', 'B']); + expect(setIsSubsetOf(setA, setB)).to.equal(false); + }); +}); diff --git a/src/jsutils/getBySet.ts b/src/jsutils/getBySet.ts deleted file mode 100644 index 4ddabd3002..0000000000 --- a/src/jsutils/getBySet.ts +++ /dev/null @@ -1,13 +0,0 @@ -import { isSameSet } from './isSameSet.js'; - -export function getBySet( - map: ReadonlyMap, U>, - setToMatch: ReadonlySet, -): U | undefined { - for (const set of map.keys()) { - if (isSameSet(set, setToMatch)) { - return map.get(set); - } - } - return undefined; -} diff --git a/src/jsutils/setIsDisjointFrom.ts b/src/jsutils/setIsDisjointFrom.ts new file mode 100644 index 0000000000..dce1881761 --- /dev/null +++ b/src/jsutils/setIsDisjointFrom.ts @@ -0,0 +1,26 @@ +/** + * Returns a boolean indicating if setA has no elements in common with setB. + * + * See: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Set/isDisjointFrom + * + */ + +export function setIsDisjointFrom( + setA: ReadonlySet, + setB: ReadonlySet, +): boolean { + if (setA.size <= setB.size) { + for (const item of setA) { + if (setB.has(item)) { + return false; + } + } + } else { + for (const item of setB) { + if (!setA.has(item)) { + return false; + } + } + } + return true; +} diff --git a/src/jsutils/setIsSubsetOf.ts b/src/jsutils/setIsSubsetOf.ts new file mode 100644 index 0000000000..076b8cf771 --- /dev/null +++ b/src/jsutils/setIsSubsetOf.ts @@ -0,0 +1,20 @@ +/** + * Returns a boolean indicating if all elements of setA are in setB + * + * See: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Set/isSubsetOf + * + */ +export function setIsSubsetOf( + setA: ReadonlySet, + setB: ReadonlySet, +): boolean { + if (setA.size > setB.size) { + return false; + } + for (const item of setA) { + if (!setB.has(item)) { + return false; + } + } + return true; +} diff --git a/src/validation/rules/SingleFieldSubscriptionsRule.ts b/src/validation/rules/SingleFieldSubscriptionsRule.ts index c0d1031103..66ea57edba 100644 --- a/src/validation/rules/SingleFieldSubscriptionsRule.ts +++ b/src/validation/rules/SingleFieldSubscriptionsRule.ts @@ -10,13 +10,15 @@ import type { import { Kind } from '../../language/kinds.js'; import type { ASTVisitor } from '../../language/visitor.js'; -import type { FieldGroup } from '../../execution/collectFields.js'; +import type { FieldDetail } from '../../execution/collectFields.js'; import { collectFields } from '../../execution/collectFields.js'; import type { ValidationContext } from '../ValidationContext.js'; -function toNodes(fieldGroup: FieldGroup): ReadonlyArray { - return fieldGroup.fields.map((fieldDetails) => fieldDetails.node); +function toNodes( + fieldDetails: ReadonlyArray, +): ReadonlyArray { + return fieldDetails.map((fieldDetail) => fieldDetail.node); } /** @@ -47,7 +49,7 @@ export function SingleFieldSubscriptionsRule( fragments[definition.name.value] = definition; } } - const { groupedFieldSet } = collectFields( + const groupedFieldSet = collectFields( schema, fragments, variableValues, @@ -55,9 +57,9 @@ export function SingleFieldSubscriptionsRule( node, ); if (groupedFieldSet.size > 1) { - const fieldGroups = [...groupedFieldSet.values()]; - const extraFieldGroups = fieldGroups.slice(1); - const extraFieldSelections = extraFieldGroups.flatMap( + const fieldSelectionLists = [...groupedFieldSet.values()]; + const extraFieldSelectionLists = fieldSelectionLists.slice(1); + const extraFieldSelections = extraFieldSelectionLists.flatMap( (fieldGroup) => toNodes(fieldGroup), ); context.reportError(