diff --git a/build.gradle b/build.gradle index 89e65c14c1..0d183d4a89 100644 --- a/build.gradle +++ b/build.gradle @@ -90,7 +90,7 @@ def getDevelopmentVersion() { gitRevParse.waitForProcessOutput(gitRevParseOutput, gitRevParseError) def branchName = gitRevParseOutput.toString().trim().replaceAll('[/\\\\]', '-') - return makeDevelopmentVersion(["0.0.0", sanitizedBranchName, "SNAPSHOT"]) + return makeDevelopmentVersion(["0.0.0", branchName, "SNAPSHOT"]) } def reactiveStreamsVersion = '1.0.3' diff --git a/src/main/java/graphql/GraphQL.java b/src/main/java/graphql/GraphQL.java index 1441238b89..d3c7e45f9f 100644 --- a/src/main/java/graphql/GraphQL.java +++ b/src/main/java/graphql/GraphQL.java @@ -23,9 +23,12 @@ import graphql.execution.preparsed.NoOpPreparsedDocumentProvider; import graphql.execution.preparsed.PreparsedDocumentEntry; import graphql.execution.preparsed.PreparsedDocumentProvider; +import graphql.introspection.GoodFaithIntrospection; import graphql.language.Document; import graphql.schema.GraphQLSchema; +import graphql.validation.GoodFaithIntrospectionExceeded; import graphql.validation.OperationValidationRule; +import graphql.validation.QueryComplexityLimits; import graphql.validation.ValidationError; import org.jspecify.annotations.NullMarked; import org.jspecify.annotations.NullUnmarked; @@ -567,7 +570,12 @@ private PreparsedDocumentEntry parseAndValidate(AtomicReference executionInput = executionInput.transform(builder -> builder.variables(parseResult.getVariables())); executionInputRef.set(executionInput); - final List errors = validate(executionInput, assertNotNull(document, "Document cannot be null when parse succeeded"), graphQLSchema, instrumentationState); + final List errors; + try { + errors = validate(executionInput, assertNotNull(document, "Document cannot be null when parse succeeded"), graphQLSchema, instrumentationState); + } catch (GoodFaithIntrospectionExceeded e) { + return new PreparsedDocumentEntry(document, List.of(e.toBadFaithError())); + } if (!errors.isEmpty()) { return new PreparsedDocumentEntry(document, errors); } @@ -601,7 +609,15 @@ private List validate(ExecutionInput executionInput, Document d Predicate validationRulePredicate = executionInput.getGraphQLContext().getOrDefault(ParseAndValidate.INTERNAL_VALIDATION_PREDICATE_HINT, r -> true); Locale locale = executionInput.getLocale(); - List validationErrors = ParseAndValidate.validate(graphQLSchema, document, validationRulePredicate, locale); + QueryComplexityLimits limits = executionInput.getGraphQLContext().get(QueryComplexityLimits.KEY); + + // Good Faith Introspection: disable the rule if good faith is off + if (!GoodFaithIntrospection.isEnabled(executionInput.getGraphQLContext())) { + Predicate existing = validationRulePredicate; + validationRulePredicate = rule -> rule != OperationValidationRule.GOOD_FAITH_INTROSPECTION && existing.test(rule); + } + + List validationErrors = ParseAndValidate.validate(graphQLSchema, document, validationRulePredicate, locale, limits); validationCtx.onCompleted(validationErrors, null); return validationErrors; diff --git a/src/main/java/graphql/ParseAndValidate.java b/src/main/java/graphql/ParseAndValidate.java index 12af6af3a8..a092f28fc0 100644 --- a/src/main/java/graphql/ParseAndValidate.java +++ b/src/main/java/graphql/ParseAndValidate.java @@ -7,10 +7,12 @@ import graphql.parser.ParserOptions; import graphql.schema.GraphQLSchema; import graphql.validation.OperationValidationRule; +import graphql.validation.QueryComplexityLimits; import graphql.validation.ValidationError; import graphql.validation.Validator; import org.jspecify.annotations.NonNull; import org.jspecify.annotations.NullMarked; +import org.jspecify.annotations.Nullable; import java.util.List; import java.util.Locale; @@ -118,8 +120,23 @@ public static List validate(@NonNull GraphQLSchema graphQLSchem * @return a result object that indicates how this operation went */ public static List validate(@NonNull GraphQLSchema graphQLSchema, @NonNull Document parsedDocument, @NonNull Predicate rulePredicate, @NonNull Locale locale) { + return validate(graphQLSchema, parsedDocument, rulePredicate, locale, null); + } + + /** + * This can be called to validate a parsed graphql query. + * + * @param graphQLSchema the graphql schema to validate against + * @param parsedDocument the previously parsed document + * @param rulePredicate this predicate is used to decide what validation rules will be applied + * @param locale the current locale + * @param limits optional query complexity limits to enforce + * + * @return a result object that indicates how this operation went + */ + public static List validate(GraphQLSchema graphQLSchema, Document parsedDocument, Predicate rulePredicate, Locale locale, @Nullable QueryComplexityLimits limits) { Validator validator = new Validator(); - return validator.validateDocument(graphQLSchema, parsedDocument, rulePredicate, locale); + return validator.validateDocument(graphQLSchema, parsedDocument, rulePredicate, locale, limits); } /** diff --git a/src/main/java/graphql/introspection/GoodFaithIntrospection.java b/src/main/java/graphql/introspection/GoodFaithIntrospection.java index ae7da12569..2d40425cee 100644 --- a/src/main/java/graphql/introspection/GoodFaithIntrospection.java +++ b/src/main/java/graphql/introspection/GoodFaithIntrospection.java @@ -1,31 +1,19 @@ package graphql.introspection; -import com.google.common.collect.ImmutableList; -import com.google.common.collect.ImmutableListMultimap; import graphql.ErrorClassification; -import graphql.ExecutionResult; import graphql.GraphQLContext; import graphql.GraphQLError; import graphql.PublicApi; -import graphql.execution.AbortExecutionException; -import graphql.execution.ExecutionContext; import graphql.language.SourceLocation; -import graphql.normalized.ExecutableNormalizedField; -import graphql.normalized.ExecutableNormalizedOperation; -import graphql.schema.FieldCoordinates; +import graphql.validation.QueryComplexityLimits; +import org.jspecify.annotations.NullMarked; +import org.jspecify.annotations.Nullable; import java.util.List; -import java.util.Map; -import java.util.Optional; import java.util.concurrent.atomic.AtomicBoolean; -import static graphql.normalized.ExecutableNormalizedOperationFactory.Options; -import static graphql.normalized.ExecutableNormalizedOperationFactory.createExecutableNormalizedOperation; -import static graphql.schema.FieldCoordinates.coordinates; - /** - * This {@link graphql.execution.instrumentation.Instrumentation} ensure that a submitted introspection query is done in - * good faith. + * Good Faith Introspection ensures that introspection queries are not abused to cause denial of service. *

* There are attack vectors where a crafted introspection query can cause the engine to spend too much time * producing introspection data. This is especially true on large schemas with lots of types and fields. @@ -33,11 +21,18 @@ * Schemas form a cyclic graph and hence it's possible to send in introspection queries that can reference those cycles * and in large schemas this can be expensive and perhaps a "denial of service". *

- * This instrumentation only allows one __schema field or one __type field to be present, and it does not allow the `__Type` fields - * to form a cycle, i.e., that can only be present once. This allows the standard and common introspection queries to work - * so tooling such as graphiql can work. + * When enabled, the validation layer enforces that: + *

    + *
  • Only one {@code __schema} and one {@code __type} field can appear per operation
  • + *
  • The {@code __Type} fields {@code fields}, {@code inputFields}, {@code interfaces}, and {@code possibleTypes} + * can each only appear once (preventing cyclic traversals)
  • + *
  • The query complexity is limited to {@link #GOOD_FAITH_MAX_FIELDS_COUNT} fields and + * {@link #GOOD_FAITH_MAX_DEPTH_COUNT} depth
  • + *
+ * This allows the standard and common introspection queries to work so tooling such as graphiql can work. */ @PublicApi +@NullMarked public class GoodFaithIntrospection { /** @@ -74,67 +69,36 @@ public static boolean enabledJvmWide(boolean flag) { return ENABLED_STATE.getAndSet(flag); } - private static final Map ALLOWED_FIELD_INSTANCES = Map.of( - coordinates("Query", "__schema"), 1 - , coordinates("Query", "__type"), 1 - - , coordinates("__Type", "fields"), 1 - , coordinates("__Type", "inputFields"), 1 - , coordinates("__Type", "interfaces"), 1 - , coordinates("__Type", "possibleTypes"), 1 - ); - - public static Optional checkIntrospection(ExecutionContext executionContext) { - if (isIntrospectionEnabled(executionContext.getGraphQLContext())) { - ExecutableNormalizedOperation operation; - try { - operation = mkOperation(executionContext); - } catch (AbortExecutionException e) { - BadFaithIntrospectionError error = BadFaithIntrospectionError.tooBigOperation(e.getMessage()); - return Optional.of(ExecutionResult.newExecutionResult().addError(error).build()); - } - ImmutableListMultimap coordinatesToENFs = operation.getCoordinatesToNormalizedFields(); - for (Map.Entry entry : ALLOWED_FIELD_INSTANCES.entrySet()) { - FieldCoordinates coordinates = entry.getKey(); - Integer allowSize = entry.getValue(); - ImmutableList normalizedFields = coordinatesToENFs.get(coordinates); - if (normalizedFields.size() > allowSize) { - BadFaithIntrospectionError error = BadFaithIntrospectionError.tooManyFields(coordinates.toString()); - return Optional.of(ExecutionResult.newExecutionResult().addError(error).build()); - } - } - } - return Optional.empty(); - } - /** - * This makes an executable operation limited in size then which suits a good faith introspection query. This helps guard - * against malicious queries. + * Checks whether Good Faith Introspection is enabled for the given request context. * - * @param executionContext the execution context + * @param graphQLContext the per-request context * - * @return an executable operation + * @return true if good faith introspection checks should be applied */ - private static ExecutableNormalizedOperation mkOperation(ExecutionContext executionContext) throws AbortExecutionException { - Options options = Options.defaultOptions() - .maxFieldsCount(GOOD_FAITH_MAX_FIELDS_COUNT) - .maxChildrenDepth(GOOD_FAITH_MAX_DEPTH_COUNT) - .locale(executionContext.getLocale()) - .graphQLContext(executionContext.getGraphQLContext()); - - return createExecutableNormalizedOperation(executionContext.getGraphQLSchema(), - executionContext.getOperationDefinition(), - executionContext.getFragmentsByName(), - executionContext.getCoercedVariables(), - options); - - } - - private static boolean isIntrospectionEnabled(GraphQLContext graphQlContext) { + public static boolean isEnabled(GraphQLContext graphQLContext) { if (!isEnabledJvmWide()) { return false; } - return !graphQlContext.getBoolean(GOOD_FAITH_INTROSPECTION_DISABLED, false); + return !graphQLContext.getBoolean(GOOD_FAITH_INTROSPECTION_DISABLED, false); + } + + /** + * Returns query complexity limits that are the minimum of the existing limits and the + * good faith introspection limits. This ensures introspection queries are bounded + * without overriding tighter user-specified limits. + * + * @param existing the existing complexity limits (may be null, in which case defaults are used) + * + * @return complexity limits with good faith bounds applied + */ + public static QueryComplexityLimits goodFaithLimits(QueryComplexityLimits existing) { + int maxFields = Math.min(existing.getMaxFieldsCount(), GOOD_FAITH_MAX_FIELDS_COUNT); + int maxDepth = Math.min(existing.getMaxDepth(), GOOD_FAITH_MAX_DEPTH_COUNT); + return QueryComplexityLimits.newLimits() + .maxFieldsCount(maxFields) + .maxDepth(maxDepth) + .build(); } public static class BadFaithIntrospectionError implements GraphQLError { @@ -163,7 +127,7 @@ public ErrorClassification getErrorType() { } @Override - public List getLocations() { + public @Nullable List getLocations() { return null; } diff --git a/src/main/java/graphql/introspection/Introspection.java b/src/main/java/graphql/introspection/Introspection.java index a455ec9d78..e8c7173e68 100644 --- a/src/main/java/graphql/introspection/Introspection.java +++ b/src/main/java/graphql/introspection/Introspection.java @@ -116,7 +116,6 @@ public static boolean isEnabledJvmWide() { public static Optional isIntrospectionSensible(MergedSelectionSet mergedSelectionSet, ExecutionContext executionContext) { GraphQLContext graphQLContext = executionContext.getGraphQLContext(); - boolean isIntrospection = false; for (String key : mergedSelectionSet.getKeys()) { String fieldName = mergedSelectionSet.getSubField(key).getName(); if (fieldName.equals(SchemaMetaFieldDef.getName()) @@ -124,13 +123,9 @@ public static Optional isIntrospectionSensible(MergedSelectionS if (!isIntrospectionEnabled(graphQLContext)) { return mkDisabledError(mergedSelectionSet.getSubField(key)); } - isIntrospection = true; break; } } - if (isIntrospection) { - return GoodFaithIntrospection.checkIntrospection(executionContext); - } return Optional.empty(); } diff --git a/src/main/java/graphql/validation/FragmentComplexityInfo.java b/src/main/java/graphql/validation/FragmentComplexityInfo.java new file mode 100644 index 0000000000..ae04911e75 --- /dev/null +++ b/src/main/java/graphql/validation/FragmentComplexityInfo.java @@ -0,0 +1,44 @@ +package graphql.validation; + +import graphql.Internal; +import org.jspecify.annotations.NullMarked; + +/** + * Holds pre-calculated complexity metrics for a fragment definition. + * This is used to efficiently track query complexity when fragments are spread + * at multiple locations in a query. + */ +@Internal +@NullMarked +class FragmentComplexityInfo { + + private final int fieldCount; + private final int maxDepth; + + FragmentComplexityInfo(int fieldCount, int maxDepth) { + this.fieldCount = fieldCount; + this.maxDepth = maxDepth; + } + + /** + * @return the total number of fields in this fragment, including fields from nested fragments + */ + int getFieldCount() { + return fieldCount; + } + + /** + * @return the maximum depth of fields within this fragment + */ + int getMaxDepth() { + return maxDepth; + } + + @Override + public String toString() { + return "FragmentComplexityInfo{" + + "fieldCount=" + fieldCount + + ", maxDepth=" + maxDepth + + '}'; + } +} diff --git a/src/main/java/graphql/validation/GoodFaithIntrospectionExceeded.java b/src/main/java/graphql/validation/GoodFaithIntrospectionExceeded.java new file mode 100644 index 0000000000..8da4f09d4a --- /dev/null +++ b/src/main/java/graphql/validation/GoodFaithIntrospectionExceeded.java @@ -0,0 +1,45 @@ +package graphql.validation; + +import graphql.Internal; +import graphql.introspection.GoodFaithIntrospection; +import org.jspecify.annotations.NullMarked; + +/** + * Exception thrown when a good-faith introspection check fails during validation. + * This exception is NOT caught by the Validator — it propagates up to GraphQL.parseAndValidate() + * where it is converted to a {@link GoodFaithIntrospection.BadFaithIntrospectionError}. + */ +@Internal +@NullMarked +public class GoodFaithIntrospectionExceeded extends RuntimeException { + + private final boolean tooBig; + private final String detail; + + private GoodFaithIntrospectionExceeded(boolean tooBig, String detail) { + super(detail); + this.tooBig = tooBig; + this.detail = detail; + } + + public static GoodFaithIntrospectionExceeded tooManyFields(String fieldCoordinate) { + return new GoodFaithIntrospectionExceeded(false, fieldCoordinate); + } + + public static GoodFaithIntrospectionExceeded tooBigOperation(String message) { + return new GoodFaithIntrospectionExceeded(true, message); + } + + public GoodFaithIntrospection.BadFaithIntrospectionError toBadFaithError() { + if (tooBig) { + return GoodFaithIntrospection.BadFaithIntrospectionError.tooBigOperation(detail); + } + return GoodFaithIntrospection.BadFaithIntrospectionError.tooManyFields(detail); + } + + @Override + public synchronized Throwable fillInStackTrace() { + // No stack trace for performance - this is a control flow exception + return this; + } +} diff --git a/src/main/java/graphql/validation/OperationValidationRule.java b/src/main/java/graphql/validation/OperationValidationRule.java index d5645aa5af..aa4214f0d9 100644 --- a/src/main/java/graphql/validation/OperationValidationRule.java +++ b/src/main/java/graphql/validation/OperationValidationRule.java @@ -184,4 +184,7 @@ public enum OperationValidationRule { /** Defer directive must not be used in subscription operations. Requires operation context. */ DEFER_DIRECTIVE_ON_VALID_OPERATION, + + /** Good faith introspection check. */ + GOOD_FAITH_INTROSPECTION, } diff --git a/src/main/java/graphql/validation/OperationValidator.java b/src/main/java/graphql/validation/OperationValidator.java index 07e7e8ea74..27a1bfc18b 100644 --- a/src/main/java/graphql/validation/OperationValidator.java +++ b/src/main/java/graphql/validation/OperationValidator.java @@ -14,6 +14,8 @@ import graphql.execution.TypeFromAST; import graphql.execution.ValuesResolver; import graphql.i18n.I18nMsg; +import graphql.introspection.GoodFaithIntrospection; +import graphql.introspection.Introspection; import graphql.introspection.Introspection.DirectiveLocation; import graphql.language.Argument; import graphql.language.AstComparator; @@ -328,6 +330,20 @@ public class OperationValidator implements DocumentVisitor { // --- State: SubscriptionUniqueRootField --- private final FieldCollector fieldCollector = new FieldCollector(); + // --- State: Query Complexity Limits --- + private int fieldCount = 0; + private int currentFieldDepth = 0; + private int maxFieldDepthSeen = 0; + private QueryComplexityLimits complexityLimits; + // Fragment complexity calculated lazily during first spread + private final Map fragmentComplexityMap = new HashMap<>(); + // Max depth seen during current fragment traversal (for calculating fragment's internal depth) + private int fragmentTraversalMaxDepth = 0; + + // --- State: Good Faith Introspection --- + private final Map introspectionFieldCounts = new HashMap<>(); + private boolean introspectionQueryDetected = false; + // --- Track whether we're in a context where fragment spread rules should run --- // fragmentRetraversalDepth == 0 means we're NOT inside a manually-traversed fragment => run non-fragment-spread checks // operationScope means we're inside an operation => can trigger fragment traversal @@ -340,6 +356,7 @@ public OperationValidator(ValidationContext validationContext, ValidationErrorCo this.validationUtil = new ValidationUtil(); this.rulePredicate = rulePredicate; this.allRulesEnabled = detectAllRulesEnabled(rulePredicate); + this.complexityLimits = validationContext.getQueryComplexityLimits(); prepareFragmentSpreadsMap(); } @@ -388,6 +405,37 @@ private boolean shouldRunOperationScopedRules() { return operationScope; } + // ==================== Query Complexity Limit Helpers ==================== + + private void checkFieldCountLimit() { + if (fieldCount > complexityLimits.getMaxFieldsCount()) { + if (introspectionQueryDetected) { + throw GoodFaithIntrospectionExceeded.tooBigOperation( + "Query has " + fieldCount + " fields which exceeds maximum allowed " + complexityLimits.getMaxFieldsCount()); + } + throw new QueryComplexityLimitsExceeded( + ValidationErrorType.MaxQueryFieldsExceeded, + complexityLimits.getMaxFieldsCount(), + fieldCount); + } + } + + private void checkDepthLimit(int depth) { + if (depth > maxFieldDepthSeen) { + maxFieldDepthSeen = depth; + if (maxFieldDepthSeen > complexityLimits.getMaxDepth()) { + if (introspectionQueryDetected) { + throw GoodFaithIntrospectionExceeded.tooBigOperation( + "Query depth " + maxFieldDepthSeen + " exceeds maximum allowed depth " + complexityLimits.getMaxDepth()); + } + throw new QueryComplexityLimitsExceeded( + ValidationErrorType.MaxQueryDepthExceeded, + complexityLimits.getMaxDepth(), + maxFieldDepthSeen); + } + } + } + @Override public void enter(Node node, List ancestors) { validationContext.getTraversalContext().enter(node, ancestors); @@ -401,6 +449,17 @@ public void enter(Node node, List ancestors) { } else if (node instanceof VariableDefinition) { checkVariableDefinition((VariableDefinition) node); } else if (node instanceof Field) { + // Track complexity only during operation scope + if (operationScope) { + fieldCount++; + currentFieldDepth++; + checkFieldCountLimit(); + checkDepthLimit(currentFieldDepth); + // Track max depth during fragment traversal for storing later + if (fragmentRetraversalDepth > 0 && currentFieldDepth > fragmentTraversalMaxDepth) { + fragmentTraversalMaxDepth = currentFieldDepth; + } + } checkField((Field) node); } else if (node instanceof InlineFragment) { checkInlineFragment((InlineFragment) node); @@ -433,6 +492,10 @@ public void leave(Node node, List ancestors) { leaveSelectionSet(); } else if (node instanceof FragmentDefinition) { leaveFragmentDefinition(); + } else if (node instanceof Field) { + if (operationScope) { + currentFieldDepth--; + } } } @@ -554,6 +617,53 @@ private void checkField(Field field) { validateUniqueDirectiveNamesPerLocation(field, field.getDirectives()); } } + // Good Faith Introspection: runs during fragment spread traversal too (operationScope) + if (operationScope && isRuleEnabled(OperationValidationRule.GOOD_FAITH_INTROSPECTION)) { + checkGoodFaithIntrospection(field); + } + } + + // --- GoodFaithIntrospection --- + private void checkGoodFaithIntrospection(Field field) { + GraphQLCompositeType parentType = validationContext.getParentType(); + if (parentType == null) { + return; + } + String fieldName = field.getName(); + String key = null; + + // Check query-level introspection fields (__schema, __type). + // Only counted at the structural level (not during fragment traversal) to match ENO merging + // behavior where the same field from a direct selection and a fragment spread merge into one. + if (shouldRunDocumentLevelRules()) { + GraphQLObjectType queryType = validationContext.getSchema().getQueryType(); + if (parentType.getName().equals(queryType.getName())) { + if (Introspection.SchemaMetaFieldDef.getName().equals(fieldName) || Introspection.TypeMetaFieldDef.getName().equals(fieldName)) { + key = parentType.getName() + "." + fieldName; + if (!introspectionQueryDetected) { + introspectionQueryDetected = true; + complexityLimits = GoodFaithIntrospection.goodFaithLimits(complexityLimits); + } + } + } + } + + // Check __Type fields that can form cycles. + // Counted during ALL traversals (including fragment spreads) because each occurrence + // at a different depth represents a separate cycle risk. + if (Introspection.__Type.getName().equals(parentType.getName())) { + if ("fields".equals(fieldName) || "inputFields".equals(fieldName) + || "interfaces".equals(fieldName) || "possibleTypes".equals(fieldName)) { + key = "__Type." + fieldName; + } + } + + if (key != null) { + int count = introspectionFieldCounts.merge(key, 1, Integer::sum); + if (count > 1) { + throw GoodFaithIntrospectionExceeded.tooManyFields(key); + } + } } private void checkInlineFragment(InlineFragment inlineFragment) { @@ -611,14 +721,50 @@ private void checkFragmentSpread(FragmentSpread node, List ancestors) { } } - // Manually traverse into fragment definition during operation scope + // Handle complexity tracking and fragment traversal if (operationScope) { - FragmentDefinition fragment = validationContext.getFragment(node.getName()); - if (fragment != null && !visitedFragmentSpreads.contains(node.getName())) { - visitedFragmentSpreads.add(node.getName()); + String fragmentName = node.getName(); + FragmentDefinition fragment = validationContext.getFragment(fragmentName); + + if (visitedFragmentSpreads.contains(fragmentName)) { + // Subsequent spread - add stored complexity (don't traverse again) + FragmentComplexityInfo info = fragmentComplexityMap.get(fragmentName); + if (info != null) { + fieldCount += info.getFieldCount(); + checkFieldCountLimit(); + int potentialDepth = currentFieldDepth + info.getMaxDepth(); + checkDepthLimit(potentialDepth); + // Update max depth if we're inside a fragment traversal + if (fragmentRetraversalDepth > 0 && potentialDepth > fragmentTraversalMaxDepth) { + fragmentTraversalMaxDepth = potentialDepth; + } + } + } else if (fragment != null) { + // First spread - traverse and track complexity + visitedFragmentSpreads.add(fragmentName); + + int fieldCountBefore = fieldCount; + int depthAtEntry = currentFieldDepth; + int previousFragmentMaxDepth = fragmentTraversalMaxDepth; + + // Initialize max depth tracking for this fragment + fragmentTraversalMaxDepth = currentFieldDepth; + fragmentRetraversalDepth++; new LanguageTraversal(ancestors).traverse(fragment, this); fragmentRetraversalDepth--; + + // Calculate and store fragment complexity + int fragmentFieldCount = fieldCount - fieldCountBefore; + int fragmentMaxInternalDepth = fragmentTraversalMaxDepth - depthAtEntry; + + fragmentComplexityMap.put(fragmentName, + new FragmentComplexityInfo(fragmentFieldCount, fragmentMaxInternalDepth)); + + // Restore max depth for outer fragment (if nested) + if (fragmentRetraversalDepth > 0 && previousFragmentMaxDepth > fragmentTraversalMaxDepth) { + fragmentTraversalMaxDepth = previousFragmentMaxDepth; + } } } } @@ -724,6 +870,13 @@ private void leaveOperationDefinition() { } } } + + // Reset complexity counters for next operation + fieldCount = 0; + currentFieldDepth = 0; + maxFieldDepthSeen = 0; + fragmentTraversalMaxDepth = 0; + introspectionFieldCounts.clear(); } private void leaveSelectionSet() { diff --git a/src/main/java/graphql/validation/QueryComplexityLimits.java b/src/main/java/graphql/validation/QueryComplexityLimits.java new file mode 100644 index 0000000000..f46020cedf --- /dev/null +++ b/src/main/java/graphql/validation/QueryComplexityLimits.java @@ -0,0 +1,171 @@ +package graphql.validation; + +import graphql.PublicApi; +import org.jspecify.annotations.NullMarked; + +/** + * Configuration class for query complexity limits enforced during validation. + * This provides a lightweight alternative to ExecutableNormalizedOperation (ENO) for tracking + * query depth and field count. + * + *

By default, validation enforces limits (maxDepth=100, maxFieldsCount=100000). + * To customize limits per-request, put a custom instance in the GraphQLContext: + *

{@code
+ * QueryComplexityLimits limits = QueryComplexityLimits.newLimits()
+ *     .maxDepth(10)
+ *     .maxFieldsCount(100)
+ *     .build();
+ *
+ * ExecutionInput executionInput = ExecutionInput.newExecutionInput()
+ *     .query(query)
+ *     .graphQLContext(ctx -> ctx.put(QueryComplexityLimits.KEY, limits))
+ *     .build();
+ * }
+ * + *

To disable limits for a request, use {@link #NONE}: + *

{@code
+ * executionInput.getGraphQLContext().put(QueryComplexityLimits.KEY, QueryComplexityLimits.NONE);
+ * }
+ * + *

To change the default limits globally (e.g., for testing), use {@link #setDefaultLimits(QueryComplexityLimits)}: + *

{@code
+ * QueryComplexityLimits.setDefaultLimits(QueryComplexityLimits.NONE); // disable for tests
+ * }
+ */ +@PublicApi +@NullMarked +public class QueryComplexityLimits { + + /** + * Default maximum query depth. + */ + public static final int DEFAULT_MAX_DEPTH = 100; + + /** + * Default maximum field count. + */ + public static final int DEFAULT_MAX_FIELDS_COUNT = 100_000; + + /** + * The key used to store QueryComplexityLimits in GraphQLContext. + */ + public static final String KEY = "graphql.validation.QueryComplexityLimits"; + + /** + * Standard limits (maxDepth=100, maxFieldsCount=100000). + */ + public static final QueryComplexityLimits DEFAULT = new QueryComplexityLimits(DEFAULT_MAX_DEPTH, DEFAULT_MAX_FIELDS_COUNT); + + /** + * No limits (all limits set to Integer.MAX_VALUE). Use this to disable complexity checking. + */ + public static final QueryComplexityLimits NONE = new QueryComplexityLimits(Integer.MAX_VALUE, Integer.MAX_VALUE); + + private static volatile QueryComplexityLimits defaultLimits = DEFAULT; + + /** + * Sets the default limits used when no limits are specified in GraphQLContext. + * This is useful for testing or for applications that want different global defaults. + * + * @param limits the default limits to use (use {@link #NONE} to disable, {@link #DEFAULT} to restore) + */ + public static void setDefaultLimits(QueryComplexityLimits limits) { + defaultLimits = limits; + } + + /** + * Returns the current default limits. + * + * @return the default limits + */ + public static QueryComplexityLimits getDefaultLimits() { + return defaultLimits; + } + + private final int maxDepth; + private final int maxFieldsCount; + + private QueryComplexityLimits(int maxDepth, int maxFieldsCount) { + this.maxDepth = maxDepth; + this.maxFieldsCount = maxFieldsCount; + } + + /** + * @return the maximum allowed depth for queries, where depth is measured as the number of nested Field nodes + */ + public int getMaxDepth() { + return maxDepth; + } + + /** + * @return the maximum allowed number of fields in a query, counting fields at each fragment spread site + */ + public int getMaxFieldsCount() { + return maxFieldsCount; + } + + /** + * @return a new builder for creating QueryComplexityLimits + */ + public static Builder newLimits() { + return new Builder(); + } + + @Override + public String toString() { + return "QueryComplexityLimits{" + + "maxDepth=" + maxDepth + + ", maxFieldsCount=" + maxFieldsCount + + '}'; + } + + /** + * Builder for QueryComplexityLimits. + */ + @PublicApi + @NullMarked + public static class Builder { + private int maxDepth = Integer.MAX_VALUE; + private int maxFieldsCount = Integer.MAX_VALUE; + + private Builder() { + } + + /** + * Sets the maximum allowed depth for queries. + * Depth is measured as the number of nested Field nodes. + * + * @param maxDepth the maximum depth (must be positive) + * @return this builder + */ + public Builder maxDepth(int maxDepth) { + if (maxDepth <= 0) { + throw new IllegalArgumentException("maxDepth must be positive"); + } + this.maxDepth = maxDepth; + return this; + } + + /** + * Sets the maximum allowed number of fields in a query. + * Fields inside fragments are counted at each spread site. + * + * @param maxFieldsCount the maximum field count (must be positive) + * @return this builder + */ + public Builder maxFieldsCount(int maxFieldsCount) { + if (maxFieldsCount <= 0) { + throw new IllegalArgumentException("maxFieldsCount must be positive"); + } + this.maxFieldsCount = maxFieldsCount; + return this; + } + + /** + * @return a new QueryComplexityLimits instance + */ + public QueryComplexityLimits build() { + return new QueryComplexityLimits(maxDepth, maxFieldsCount); + } + } +} diff --git a/src/main/java/graphql/validation/QueryComplexityLimitsExceeded.java b/src/main/java/graphql/validation/QueryComplexityLimitsExceeded.java new file mode 100644 index 0000000000..adaffba2af --- /dev/null +++ b/src/main/java/graphql/validation/QueryComplexityLimitsExceeded.java @@ -0,0 +1,42 @@ +package graphql.validation; + +import graphql.Internal; +import org.jspecify.annotations.NullMarked; + +/** + * Exception thrown when query complexity limits (depth or field count) are exceeded during validation. + * This exception is caught by the Validator and converted to a ValidationError. + */ +@Internal +@NullMarked +public class QueryComplexityLimitsExceeded extends RuntimeException { + + private final ValidationErrorType errorType; + private final int limit; + private final int actual; + + public QueryComplexityLimitsExceeded(ValidationErrorType errorType, int limit, int actual) { + super(errorType.name() + ": limit=" + limit + ", actual=" + actual); + this.errorType = errorType; + this.limit = limit; + this.actual = actual; + } + + public ValidationErrorType getErrorType() { + return errorType; + } + + public int getLimit() { + return limit; + } + + public int getActual() { + return actual; + } + + @Override + public synchronized Throwable fillInStackTrace() { + // No stack trace for performance - this is a control flow exception + return this; + } +} diff --git a/src/main/java/graphql/validation/ValidationContext.java b/src/main/java/graphql/validation/ValidationContext.java index 873783785f..9440931027 100644 --- a/src/main/java/graphql/validation/ValidationContext.java +++ b/src/main/java/graphql/validation/ValidationContext.java @@ -33,13 +33,19 @@ public class ValidationContext { private final Map fragmentDefinitionMap = new LinkedHashMap<>(); private final I18n i18n; private final GraphQLContext graphQLContext; + private final QueryComplexityLimits queryComplexityLimits; public ValidationContext(GraphQLSchema schema, Document document, I18n i18n) { + this(schema, document, i18n, null); + } + + public ValidationContext(GraphQLSchema schema, Document document, I18n i18n, @Nullable QueryComplexityLimits limits) { this.schema = schema; this.document = document; this.traversalContext = new TraversalContext(schema); this.i18n = i18n; this.graphQLContext = GraphQLContext.newContext().of(Locale.class, i18n.getLocale()).build(); + this.queryComplexityLimits = limits != null ? limits : QueryComplexityLimits.getDefaultLimits(); buildFragmentMap(); } @@ -109,6 +115,10 @@ public GraphQLContext getGraphQLContext() { return graphQLContext; } + public QueryComplexityLimits getQueryComplexityLimits() { + return queryComplexityLimits; + } + /** * Creates an I18N message using the key and arguments * diff --git a/src/main/java/graphql/validation/ValidationErrorType.java b/src/main/java/graphql/validation/ValidationErrorType.java index 59d5c3ac0f..afd75cda54 100644 --- a/src/main/java/graphql/validation/ValidationErrorType.java +++ b/src/main/java/graphql/validation/ValidationErrorType.java @@ -45,5 +45,7 @@ public enum ValidationErrorType implements ValidationErrorClassification { SubscriptionMultipleRootFields, SubscriptionIntrospectionRootField, UniqueObjectFieldName, - UnknownOperation + UnknownOperation, + MaxQueryDepthExceeded, + MaxQueryFieldsExceeded } diff --git a/src/main/java/graphql/validation/Validator.java b/src/main/java/graphql/validation/Validator.java index 654eec5cef..d6237bf131 100644 --- a/src/main/java/graphql/validation/Validator.java +++ b/src/main/java/graphql/validation/Validator.java @@ -37,8 +37,12 @@ public List validateDocument(GraphQLSchema schema, Document doc } public List validateDocument(GraphQLSchema schema, Document document, Predicate rulePredicate, Locale locale) { + return validateDocument(schema, document, rulePredicate, locale, null); + } + + public List validateDocument(GraphQLSchema schema, Document document, Predicate rulePredicate, Locale locale, QueryComplexityLimits limits) { I18n i18n = I18n.i18n(I18n.BundleType.Validation, locale); - ValidationContext validationContext = new ValidationContext(schema, document, i18n); + ValidationContext validationContext = new ValidationContext(schema, document, i18n, limits); ValidationErrorCollector validationErrorCollector = new ValidationErrorCollector(MAX_VALIDATION_ERRORS); OperationValidator operationValidator = new OperationValidator(validationContext, validationErrorCollector, rulePredicate); @@ -47,6 +51,12 @@ public List validateDocument(GraphQLSchema schema, Document doc languageTraversal.traverse(document, operationValidator); } catch (ValidationErrorCollector.MaxValidationErrorsReached ignored) { // if we have generated enough errors, then we can shortcut out + } catch (QueryComplexityLimitsExceeded e) { + String message = i18n.msg(e.getErrorType().name() + ".message", e.getLimit(), e.getActual()); + validationErrorCollector.addError(ValidationError.newValidationError() + .validationErrorType(e.getErrorType()) + .description(message) + .build()); } return validationErrorCollector.getErrors(); diff --git a/src/main/resources/i18n/Validation.properties b/src/main/resources/i18n/Validation.properties index a9403bea5b..8f2ad5715c 100644 --- a/src/main/resources/i18n/Validation.properties +++ b/src/main/resources/i18n/Validation.properties @@ -110,4 +110,7 @@ ArgumentValidationUtil.handleMissingFieldsError=Validation error ({0}) : argumen ArgumentValidationUtil.handleExtraFieldError=Validation error ({0}) : argument ''{1}'' with value ''{2}'' contains a field not in ''{3}'': ''{4}'' # suppress inspection "UnusedProperty" # suppress inspection "UnusedMessageFormatParameter" -ArgumentValidationUtil.extraOneOfFieldsError=Validation error ({0}) : Exactly one key must be specified for OneOf type ''{3}''. \ No newline at end of file +ArgumentValidationUtil.extraOneOfFieldsError=Validation error ({0}) : Exactly one key must be specified for OneOf type ''{3}''. +# +MaxQueryDepthExceeded.message=Query depth {1} exceeds maximum allowed depth {0} +MaxQueryFieldsExceeded.message=Query has {1} fields which exceeds maximum allowed {0} \ No newline at end of file diff --git a/src/test/groovy/graphql/archunit/JSpecifyAnnotationsCheck.groovy b/src/test/groovy/graphql/archunit/JSpecifyAnnotationsCheck.groovy index 3b3ddd5e4e..beca6436b9 100644 --- a/src/test/groovy/graphql/archunit/JSpecifyAnnotationsCheck.groovy +++ b/src/test/groovy/graphql/archunit/JSpecifyAnnotationsCheck.groovy @@ -86,7 +86,6 @@ class JSpecifyAnnotationsCheck extends Specification { "graphql.incremental.IncrementalExecutionResultImpl", "graphql.incremental.IncrementalPayload", "graphql.incremental.StreamPayload", - "graphql.introspection.GoodFaithIntrospection", "graphql.introspection.Introspection", "graphql.introspection.IntrospectionQuery", "graphql.introspection.IntrospectionQueryBuilder", diff --git a/src/test/groovy/graphql/introspection/GoodFaithIntrospectionTest.groovy b/src/test/groovy/graphql/introspection/GoodFaithIntrospectionTest.groovy index c2d9b2dc87..9da21c42be 100644 --- a/src/test/groovy/graphql/introspection/GoodFaithIntrospectionTest.groovy +++ b/src/test/groovy/graphql/introspection/GoodFaithIntrospectionTest.groovy @@ -2,10 +2,13 @@ package graphql.introspection import graphql.ExecutionInput import graphql.ExecutionResult +import graphql.ParseAndValidate import graphql.TestUtil import graphql.execution.CoercedVariables import graphql.language.Document import graphql.normalized.ExecutableNormalizedOperationFactory +import graphql.validation.OperationValidationRule +import graphql.validation.QueryComplexityLimits import spock.lang.Specification class GoodFaithIntrospectionTest extends Specification { @@ -142,6 +145,24 @@ class GoodFaithIntrospectionTest extends Specification { er.errors.isEmpty() } + def "disabling good faith composes with custom validation rule predicates"() { + given: + // Custom predicate that disables a specific rule + def customPredicate = { OperationValidationRule rule -> rule != OperationValidationRule.KNOWN_ARGUMENT_NAMES } as java.util.function.Predicate + + when: + def context = [ + (GoodFaithIntrospection.GOOD_FAITH_INTROSPECTION_DISABLED) : true, + (ParseAndValidate.INTERNAL_VALIDATION_PREDICATE_HINT) : customPredicate + ] + ExecutionInput executionInput = ExecutionInput.newExecutionInput("{ normalField }") + .graphQLContext(context).build() + ExecutionResult er = graphql.execute(executionInput) + + then: + er.errors.isEmpty() + } + def "can be disabled per request"() { when: def context = [(GoodFaithIntrospection.GOOD_FAITH_INTROSPECTION_DISABLED): true] @@ -188,6 +209,100 @@ class GoodFaithIntrospectionTest extends Specification { 100 | GoodFaithIntrospection.BadFaithIntrospectionError.class } + def "introspection via inline fragment on Query is detected as bad faith"() { + def query = """ + query badActor { + ...on Query { + __schema{types{fields{type{fields{type{fields{type{fields{type{name}}}}}}}}}} + } + } + """ + + when: + ExecutionResult er = graphql.execute(query) + + then: + !er.errors.isEmpty() + er.errors[0] instanceof GoodFaithIntrospection.BadFaithIntrospectionError + } + + def "introspection via fragment spread is detected as bad faith"() { + def query = """ + query badActor { + ...IntrospectionFragment + } + fragment IntrospectionFragment on Query { + __schema{types{fields{type{fields{type{fields{type{fields{type{name}}}}}}}}}} + } + """ + + when: + ExecutionResult er = graphql.execute(query) + + then: + !er.errors.isEmpty() + er.errors[0] instanceof GoodFaithIntrospection.BadFaithIntrospectionError + } + + def "good faith limits are applied on top of custom user limits"() { + given: + def limits = QueryComplexityLimits.newLimits().maxFieldsCount(200).maxDepth(15).build() + def executionInput = ExecutionInput.newExecutionInput(IntrospectionQuery.INTROSPECTION_QUERY) + .graphQLContext([(QueryComplexityLimits.KEY): limits]) + .build() + + when: + ExecutionResult er = graphql.execute(executionInput) + + then: + er.errors.isEmpty() + } + + def "introspection query exceeding field count limit is detected as bad faith"() { + given: + // Build a wide introspection query that exceeds GOOD_FAITH_MAX_FIELDS_COUNT (500) + // using non-cycle-forming fields (aliases of 'name') so the tooManyFields check + // does not fire first, exercising the tooBigOperation code path instead + def sb = new StringBuilder() + sb.append("query { __schema { types { ") + for (int i = 0; i < 510; i++) { + sb.append("a${i}: name ") + } + sb.append("} } }") + + when: + ExecutionResult er = graphql.execute(sb.toString()) + + then: + !er.errors.isEmpty() + er.errors[0] instanceof GoodFaithIntrospection.BadFaithIntrospectionError + er.errors[0].message.contains("too big") + } + + def "introspection query exceeding depth limit is detected as bad faith"() { + given: + // Build a deep introspection query using ofType (not a cycle-forming field) + // that exceeds GOOD_FAITH_MAX_DEPTH_COUNT (20) + def sb = new StringBuilder() + sb.append("query { __schema { types { ") + for (int i = 0; i < 20; i++) { + sb.append("ofType { ") + } + sb.append("name ") + for (int i = 0; i < 20; i++) { + sb.append("} ") + } + sb.append("} } }") + + when: + ExecutionResult er = graphql.execute(sb.toString()) + + then: + !er.errors.isEmpty() + er.errors[0] instanceof GoodFaithIntrospection.BadFaithIntrospectionError + er.errors[0].message.contains("too big") + } + String createDeepQuery(int depth = 25) { def result = """ query test { diff --git a/src/test/groovy/graphql/normalized/ExecutableNormalizedOperationFactoryTest.groovy b/src/test/groovy/graphql/normalized/ExecutableNormalizedOperationFactoryTest.groovy index a04c74f954..7463031d49 100644 --- a/src/test/groovy/graphql/normalized/ExecutableNormalizedOperationFactoryTest.groovy +++ b/src/test/groovy/graphql/normalized/ExecutableNormalizedOperationFactoryTest.groovy @@ -20,6 +20,7 @@ import graphql.util.TraversalControl import graphql.util.Traverser import graphql.util.TraverserContext import graphql.util.TraverserVisitorStub +import graphql.validation.QueryComplexityLimits import spock.lang.Specification import java.util.stream.Collectors @@ -33,6 +34,14 @@ import static graphql.schema.FieldCoordinates.coordinates class ExecutableNormalizedOperationFactoryTest extends Specification { static boolean deferSupport + def setup() { + // Disable validation complexity limits so ENO limits can be tested + QueryComplexityLimits.setDefaultLimits(QueryComplexityLimits.NONE) + } + + def cleanup() { + QueryComplexityLimits.setDefaultLimits(QueryComplexityLimits.DEFAULT) + } def "test"() { String schema = """ diff --git a/src/test/groovy/graphql/validation/OverlappingFieldsCanBeMergedBenchmarkTest.groovy b/src/test/groovy/graphql/validation/OverlappingFieldsCanBeMergedBenchmarkTest.groovy index 1e2cdbbe33..af09868cc6 100644 --- a/src/test/groovy/graphql/validation/OverlappingFieldsCanBeMergedBenchmarkTest.groovy +++ b/src/test/groovy/graphql/validation/OverlappingFieldsCanBeMergedBenchmarkTest.groovy @@ -53,7 +53,7 @@ class OverlappingFieldsCanBeMergedBenchmarkTest extends Specification { private List validateQuery(GraphQLSchema schema, Document document) { ValidationErrorCollector errorCollector = new ValidationErrorCollector() I18n i18n = I18n.i18n(I18n.BundleType.Validation, Locale.ENGLISH) - ValidationContext validationContext = new ValidationContext(schema, document, i18n) + ValidationContext validationContext = new ValidationContext(schema, document, i18n, QueryComplexityLimits.NONE) OperationValidator operationValidator = new OperationValidator(validationContext, errorCollector, { r -> r == OperationValidationRule.OVERLAPPING_FIELDS_CAN_BE_MERGED }) LanguageTraversal languageTraversal = new LanguageTraversal() @@ -74,7 +74,11 @@ class OverlappingFieldsCanBeMergedBenchmarkTest extends Specification { def "large schema query executes without errors"() { when: GraphQL graphQL = GraphQL.newGraphQL(schema).build() - ExecutionResult executionResult = graphQL.execute(loadResource("large-schema-4-query.graphql")) + def executionInput = graphql.ExecutionInput.newExecutionInput() + .query(loadResource("large-schema-4-query.graphql")) + .graphQLContext([(QueryComplexityLimits.KEY): QueryComplexityLimits.NONE]) + .build() + ExecutionResult executionResult = graphQL.execute(executionInput) then: executionResult.errors.size() == 0 diff --git a/src/test/groovy/graphql/validation/QueryComplexityLimitsTest.groovy b/src/test/groovy/graphql/validation/QueryComplexityLimitsTest.groovy new file mode 100644 index 0000000000..ece8cde7a4 --- /dev/null +++ b/src/test/groovy/graphql/validation/QueryComplexityLimitsTest.groovy @@ -0,0 +1,458 @@ +package graphql.validation + +import graphql.TestUtil +import graphql.parser.Parser + +class QueryComplexityLimitsTest extends SpecValidationBase { + + // ==================== ENO Parity Tests ==================== + // These tests verify that our complexity tracking matches ExecutableNormalizedOperation (ENO) + + def "ENO parity - depth and field count match ENO calculation"() { + // This test mirrors ExecutableNormalizedOperationFactoryTest."can capture depth and field count" + // ENO reports: depth=7, fieldCount=8 + def schema = TestUtil.schema(""" + type Query { + foo: Foo + } + type Foo { + stop : String + bar : Bar + } + type Bar { + stop : String + foo : Foo + } + """) + + def query = "{ foo { bar { foo { bar { foo { stop bar { stop }}}}}}}" + def document = new Parser().parseDocument(query) + + when: "we set limits that would fail if counts don't match ENO" + // ENO says fieldCount=8, so limit of 7 should fail + def limitsFieldCount = QueryComplexityLimits.newLimits() + .maxFieldsCount(7) + .build() + def errorsFieldCount = new Validator().validateDocument(schema, document, { r -> true }, Locale.ENGLISH, limitsFieldCount) + + then: "field count of 8 exceeds limit of 7" + errorsFieldCount.size() == 1 + errorsFieldCount[0].validationErrorType == ValidationErrorType.MaxQueryFieldsExceeded + errorsFieldCount[0].message.contains("8") + + when: "we set limits that match ENO exactly" + def limitsExact = QueryComplexityLimits.newLimits() + .maxFieldsCount(8) + .maxDepth(7) + .build() + def errorsExact = new Validator().validateDocument(schema, document, { r -> true }, Locale.ENGLISH, limitsExact) + + then: "validation passes with exact ENO counts" + errorsExact.isEmpty() + + when: "depth limit of 6 should fail (ENO says depth=7)" + def limitsDepth = QueryComplexityLimits.newLimits() + .maxDepth(6) + .build() + def errorsDepth = new Validator().validateDocument(schema, document, { r -> true }, Locale.ENGLISH, limitsDepth) + + then: "depth of 7 exceeds limit of 6" + errorsDepth.size() == 1 + errorsDepth[0].validationErrorType == ValidationErrorType.MaxQueryDepthExceeded + errorsDepth[0].message.contains("7") + } + + def "ENO parity - fragment spread counts fields at each site"() { + // This test mirrors ExecutableNormalizedOperationFactoryTest."query with fragment definition" + // Query: {foo { ...fooData moreFoos { ...fooData }}} fragment fooData on Foo { subFoo } + // ENO output: ['Query.foo', 'Foo.subFoo', 'Foo.moreFoos', 'Foo.subFoo'] + // So subFoo is counted TWICE (once per spread) = 4 total fields + def schema = TestUtil.schema(""" + type Query { + foo: Foo + } + type Foo { + subFoo: String + moreFoos: Foo + } + """) + + def query = "{foo { ...fooData moreFoos { ...fooData }}} fragment fooData on Foo { subFoo }" + def document = new Parser().parseDocument(query) + + when: "limit of 3 should fail (ENO counts 4 fields)" + def limits = QueryComplexityLimits.newLimits() + .maxFieldsCount(3) + .build() + def errors = new Validator().validateDocument(schema, document, { r -> true }, Locale.ENGLISH, limits) + + then: + errors.size() == 1 + errors[0].validationErrorType == ValidationErrorType.MaxQueryFieldsExceeded + errors[0].message.contains("4") // foo + subFoo + moreFoos + subFoo = 4 + + when: "limit of 4 should pass" + def limitsPass = QueryComplexityLimits.newLimits() + .maxFieldsCount(4) + .build() + def errorsPass = new Validator().validateDocument(schema, document, { r -> true }, Locale.ENGLISH, limitsPass) + + then: + errorsPass.isEmpty() + } + + def "ENO parity - deeply nested fragments multiply field counts"() { + // Similar to ExecutableNormalizedOperationFactoryTest."factory has a default max node count" + // Each fragment spreads 3 times, creating exponential growth + def schema = TestUtil.schema(""" + type Query { + foo: Foo + } + type Foo { + foo: Foo + name: String + } + """) + + // F1 spreads F2 three times, F2 has just 'name' + // F1 contributes: 3 * F2's fields = 3 * 1 = 3 fields + // Query: foo + F1's fields = 1 + 3 = 4 fields + def query = """ + { foo { ...F1 }} + fragment F1 on Foo { + a: foo { ...F2 } + b: foo { ...F2 } + c: foo { ...F2 } + } + fragment F2 on Foo { + name + } + """ + def document = new Parser().parseDocument(query) + + when: + // foo (1) + a:foo (1) + b:foo (1) + c:foo (1) + name*3 (3) = 7 fields + def limits = QueryComplexityLimits.newLimits() + .maxFieldsCount(6) + .build() + def errors = new Validator().validateDocument(schema, document, { r -> true }, Locale.ENGLISH, limits) + + then: + errors.size() == 1 + errors[0].validationErrorType == ValidationErrorType.MaxQueryFieldsExceeded + errors[0].message.contains("7") + + when: "limit of 7 should pass" + def limitsPass = QueryComplexityLimits.newLimits() + .maxFieldsCount(7) + .build() + def errorsPass = new Validator().validateDocument(schema, document, { r -> true }, Locale.ENGLISH, limitsPass) + + then: + errorsPass.isEmpty() + } + + // ==================== Original Tests ==================== + + def "default limits are applied automatically"() { + expect: + QueryComplexityLimits.DEFAULT.getMaxDepth() == 100 + QueryComplexityLimits.DEFAULT.getMaxFieldsCount() == 100_000 + QueryComplexityLimits.getDefaultLimits() == QueryComplexityLimits.DEFAULT + } + + def "default limits can be changed globally"() { + given: + def originalDefault = QueryComplexityLimits.getDefaultLimits() + + when: "we set custom default limits" + def customLimits = QueryComplexityLimits.newLimits().maxDepth(5).maxFieldsCount(10).build() + QueryComplexityLimits.setDefaultLimits(customLimits) + + then: + QueryComplexityLimits.getDefaultLimits() == customLimits + + when: "we can disable limits globally with NONE" + QueryComplexityLimits.setDefaultLimits(QueryComplexityLimits.NONE) + + then: + QueryComplexityLimits.getDefaultLimits() == QueryComplexityLimits.NONE + + cleanup: + QueryComplexityLimits.setDefaultLimits(originalDefault) + } + + def "simple queries pass with default limits"() { + def query = """ + query deepQuery { + dog { + name + owner { + name + } + } + } + """ + when: + def validationErrors = validate(query) + + then: + validationErrors.isEmpty() + } + + def "NONE disables limits entirely"() { + def schema = TestUtil.schema(""" + type Query { a: A } + type A { b: B } + type B { c: C } + type C { d: String } + """) + // This query has depth 4, which exceeds default of 50? No, 4 < 50. Let me create a deeper one. + // Actually let's just verify NONE works by setting a very low custom limit first, then NONE + def query = "{ a { b { c { d }}}}" + def document = new Parser().parseDocument(query) + + when: "using NONE, no limits are enforced" + def errors = new Validator().validateDocument(schema, document, { r -> true }, Locale.ENGLISH, QueryComplexityLimits.NONE) + + then: + errors.isEmpty() + } + + def "field count limit is enforced"() { + def query = """ + query { + dog { + name + nickname + barkVolume + } + } + """ + when: + def limits = QueryComplexityLimits.newLimits() + .maxFieldsCount(3) + .build() + def document = new Parser().parseDocument(query) + def validationErrors = new Validator().validateDocument( + SpecValidationSchema.specValidationSchema, document, { r -> true }, Locale.ENGLISH, limits) + + then: + validationErrors.size() == 1 + validationErrors[0].validationErrorType == ValidationErrorType.MaxQueryFieldsExceeded + validationErrors[0].message.contains("4") // actual + validationErrors[0].message.contains("3") // limit + } + + def "depth limit is enforced"() { + def query = """ + query { + dog { + owner { + name + } + } + } + """ + when: + def limits = QueryComplexityLimits.newLimits() + .maxDepth(2) + .build() + def document = new Parser().parseDocument(query) + def validationErrors = new Validator().validateDocument( + SpecValidationSchema.specValidationSchema, document, { r -> true }, Locale.ENGLISH, limits) + + then: + validationErrors.size() == 1 + validationErrors[0].validationErrorType == ValidationErrorType.MaxQueryDepthExceeded + validationErrors[0].message.contains("3") // actual depth + validationErrors[0].message.contains("2") // limit + } + + def "fragment fields are counted at each spread site"() { + // Fragment F has 2 fields (name, nickname) + // Query has: dog1, dog2, dog3 = 3 fields + 3 spreads * 2 fields = 9 total fields + def query = """ + fragment F on Dog { name nickname } + query { + dog1: dog { ...F } + dog2: dog { ...F } + dog3: dog { ...F } + } + """ + when: + def limits = QueryComplexityLimits.newLimits() + .maxFieldsCount(8) + .build() + def document = new Parser().parseDocument(query) + def validationErrors = new Validator().validateDocument( + SpecValidationSchema.specValidationSchema, document, { r -> true }, Locale.ENGLISH, limits) + + then: + validationErrors.size() == 1 + validationErrors[0].validationErrorType == ValidationErrorType.MaxQueryFieldsExceeded + validationErrors[0].message.contains("9") // actual + validationErrors[0].message.contains("8") // limit + } + + def "fragment depth adds to current depth"() { + // Query depth: dog at depth 1, fragment adds 1 more (name) = max depth 2 + def query = """ + fragment F on Dog { name } + query { + dog { ...F } + } + """ + when: + def limits = QueryComplexityLimits.newLimits() + .maxDepth(1) + .build() + def document = new Parser().parseDocument(query) + def validationErrors = new Validator().validateDocument( + SpecValidationSchema.specValidationSchema, document, { r -> true }, Locale.ENGLISH, limits) + + then: + validationErrors.size() == 1 + validationErrors[0].validationErrorType == ValidationErrorType.MaxQueryDepthExceeded + } + + def "nested fragments are handled correctly"() { + // Fragment A spreads fragment B, each has 1 field + // Total: dog (1) + A's name (1) + B's nickname (1) = 3 fields + def query = """ + fragment A on Dog { name ...B } + fragment B on Dog { nickname } + query { + dog { ...A } + } + """ + when: + def limits = QueryComplexityLimits.newLimits() + .maxFieldsCount(2) + .build() + def document = new Parser().parseDocument(query) + def validationErrors = new Validator().validateDocument( + SpecValidationSchema.specValidationSchema, document, { r -> true }, Locale.ENGLISH, limits) + + then: + validationErrors.size() == 1 + validationErrors[0].validationErrorType == ValidationErrorType.MaxQueryFieldsExceeded + } + + def "multiple operations each have separate limits"() { + // Each operation should be validated independently + def query = """ + query First { + dog { name } + } + query Second { + dog { name nickname barkVolume } + } + """ + when: + def limits = QueryComplexityLimits.newLimits() + .maxFieldsCount(3) + .build() + def document = new Parser().parseDocument(query) + def validationErrors = new Validator().validateDocument( + SpecValidationSchema.specValidationSchema, document, { r -> true }, Locale.ENGLISH, limits) + + then: + // Second operation has 4 fields (dog + 3 scalar fields), which exceeds limit of 3 + validationErrors.size() == 1 + validationErrors[0].validationErrorType == ValidationErrorType.MaxQueryFieldsExceeded + } + + def "inline fragments count their fields"() { + def query = """ + query { + dog { + ... on Dog { + name + nickname + } + } + } + """ + when: + def limits = QueryComplexityLimits.newLimits() + .maxFieldsCount(2) + .build() + def document = new Parser().parseDocument(query) + def validationErrors = new Validator().validateDocument( + SpecValidationSchema.specValidationSchema, document, { r -> true }, Locale.ENGLISH, limits) + + then: + // dog (1) + name (1) + nickname (1) = 3 fields + validationErrors.size() == 1 + validationErrors[0].validationErrorType == ValidationErrorType.MaxQueryFieldsExceeded + } + + def "passes when within limits"() { + def query = """ + query { + dog { + name + owner { + name + } + } + } + """ + when: + def limits = QueryComplexityLimits.newLimits() + .maxFieldsCount(10) + .maxDepth(5) + .build() + def document = new Parser().parseDocument(query) + def validationErrors = new Validator().validateDocument( + SpecValidationSchema.specValidationSchema, document, { r -> true }, Locale.ENGLISH, limits) + + then: + validationErrors.isEmpty() + } + + def "QueryComplexityLimits.NONE has no limits"() { + expect: + QueryComplexityLimits.NONE.getMaxDepth() == Integer.MAX_VALUE + QueryComplexityLimits.NONE.getMaxFieldsCount() == Integer.MAX_VALUE + } + + def "builder validates positive values"() { + when: + QueryComplexityLimits.newLimits().maxDepth(0).build() + + then: + thrown(IllegalArgumentException) + + when: + QueryComplexityLimits.newLimits().maxFieldsCount(-1).build() + + then: + thrown(IllegalArgumentException) + } + + def "cyclic fragments don't cause infinite loop in complexity calculation"() { + // This query has a cycle: A -> B -> A + // The validation should detect the cycle error, but complexity calculation shouldn't hang + def query = """ + fragment A on Dog { ...B } + fragment B on Dog { ...A } + query { + dog { ...A } + } + """ + when: + def limits = QueryComplexityLimits.newLimits() + .maxFieldsCount(100) + .maxDepth(100) + .build() + def document = new Parser().parseDocument(query) + def validationErrors = new Validator().validateDocument( + SpecValidationSchema.specValidationSchema, document, { r -> true }, Locale.ENGLISH, limits) + + then: + // Should get fragment cycle error, not hang + validationErrors.any { it.validationErrorType == ValidationErrorType.FragmentCycle } + } +}