diff --git a/build.gradle b/build.gradle
index 7bfc69a09e..4229857164 100644
--- a/build.gradle
+++ b/build.gradle
@@ -156,6 +156,7 @@ dependencies {
// this is needed for the idea jmh plugin to work correctly
jmh 'org.openjdk.jmh:jmh-core:1.37'
jmh 'org.openjdk.jmh:jmh-generator-annprocess:1.37'
+ jmh 'me.bechberger:ap-loader-all:4.0-10'
// comment this in if you want to run JMH benchmarks from idea
// jmhAnnotationProcessor 'org.openjdk.jmh:jmh-generator-annprocess:1.37'
@@ -228,7 +229,38 @@ jmh {
includes = [project.property('jmhInclude')]
}
if (project.hasProperty('jmhProfilers')) {
- profilers = [project.property('jmhProfilers')]
+ def profStr = project.property('jmhProfilers') as String
+ if (profStr.startsWith('async')) {
+ // Resolve native lib from ap-loader JAR on the jmh classpath
+ def apJar = configurations.jmh.files.find { it.name.contains('ap-loader') }
+ if (apJar) {
+ def proc = ['java', '-jar', apJar.absolutePath, 'agentpath'].execute()
+ proc.waitFor(10, java.util.concurrent.TimeUnit.SECONDS)
+ def libPath = proc.text.trim()
+ if (libPath && new File(libPath).exists()) {
+ if (profStr == 'async') {
+ profilers = ["async:libPath=${libPath}"]
+ } else {
+ profilers = [profStr.replaceFirst('async:', "async:libPath=${libPath};")]
+ }
+ } else {
+ profilers = [profStr]
+ }
+ } else {
+ profilers = [profStr]
+ }
+ } else {
+ profilers = [profStr]
+ }
+ }
+ if (project.hasProperty('jmhFork')) {
+ fork = project.property('jmhFork') as int
+ }
+ if (project.hasProperty('jmhIterations')) {
+ iterations = project.property('jmhIterations') as int
+ }
+ if (project.hasProperty('jmhWarmupIterations')) {
+ warmupIterations = project.property('jmhWarmupIterations') as int
}
}
diff --git a/src/jmh/java/benchmark/ExecutionBenchmark.java b/src/jmh/java/benchmark/ExecutionBenchmark.java
new file mode 100644
index 0000000000..b2ff51cbee
--- /dev/null
+++ b/src/jmh/java/benchmark/ExecutionBenchmark.java
@@ -0,0 +1,427 @@
+package benchmark;
+
+import graphql.ExecutionInput;
+import graphql.ExecutionResult;
+import graphql.GraphQL;
+import graphql.execution.instrumentation.dataloader.DataLoaderDispatchingContextKeys;
+import graphql.execution.preparsed.persisted.InMemoryPersistedQueryCache;
+import graphql.execution.preparsed.persisted.PersistedQueryCache;
+import graphql.execution.preparsed.persisted.PersistedQuerySupport;
+import graphql.schema.DataFetcher;
+import graphql.schema.FieldCoordinates;
+import graphql.schema.GraphQLCodeRegistry;
+import graphql.schema.GraphQLFieldDefinition;
+import graphql.schema.GraphQLList;
+import graphql.schema.GraphQLObjectType;
+import graphql.schema.GraphQLSchema;
+import org.dataloader.BatchLoader;
+import org.dataloader.DataLoaderFactory;
+import org.dataloader.DataLoaderRegistry;
+import org.openjdk.jmh.annotations.Benchmark;
+import org.openjdk.jmh.annotations.BenchmarkMode;
+import org.openjdk.jmh.annotations.Fork;
+import org.openjdk.jmh.annotations.Measurement;
+import org.openjdk.jmh.annotations.Mode;
+import org.openjdk.jmh.annotations.OutputTimeUnit;
+import org.openjdk.jmh.annotations.Warmup;
+import org.openjdk.jmh.runner.Runner;
+import org.openjdk.jmh.runner.RunnerException;
+import org.openjdk.jmh.runner.options.Options;
+import org.openjdk.jmh.runner.options.OptionsBuilder;
+
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.LinkedHashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Optional;
+import java.util.concurrent.CompletableFuture;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Executors;
+import java.util.concurrent.TimeUnit;
+import java.util.stream.Collectors;
+
+import static graphql.Scalars.GraphQLString;
+
+/**
+ * Measures the graphql-java engine's core execution overhead (field resolution,
+ * type checking, result building) with a balanced, realistic workload while
+ * minimising data-fetching work.
+ *
+ * Schema: 20 object types across 4 depth levels (5 types per level).
+ * Query shape: ~530 queried fields, ~2000 result scalar values.
+ * Width (~7 fields per selection set) ≈ Depth (5 levels).
+ *
+ * Two variants: baseline (PropertyDataFetcher with embedded Maps) and
+ * DataLoader (child fields resolved via batched DataLoader calls).
+ */
+@Warmup(iterations = 2, time = 5)
+@Measurement(iterations = 3)
+@Fork(2)
+public class ExecutionBenchmark {
+
+ // 4 levels of object types below Query → total query depth = 5
+ private static final int LEVELS = 4;
+ // 5 types per level = 20 types total
+ private static final int TYPES_PER_LEVEL = 5;
+ // Intermediate types: 5 scalar fields + child_a + child_b = 7 selections
+ private static final int SCALAR_FIELDS = 5;
+ // Leaf types: 7 scalar fields
+ private static final int LEAF_SCALAR_FIELDS = 7;
+ // Query: 5 top-level fields (2 single + 3 list)
+ private static final int QUERY_FIELDS = 5;
+ private static final int QUERY_SINGLE_COUNT = 2;
+ // List fields return 2 items each
+ private static final int LIST_SIZE = 2;
+
+ // Schema types shared by both variants: types[0] = L4 (leaf), types[LEVELS-1] = L1
+ private static final GraphQLObjectType[][] schemaTypes = buildSchemaTypes();
+ private static final GraphQLObjectType queryType = buildQueryType();
+ static final String query = mkQuery();
+ private static final String queryId = "exec-benchmark-query";
+
+ // ---- Baseline variant (PropertyDataFetcher with embedded Maps) ----
+ static final GraphQL graphQL = buildGraphQL();
+
+ // ---- DataLoader variant ----
+ // levelStores[i] holds all DTOs at schema level i+1 (index 0 = L1, 3 = L4)
+ @SuppressWarnings("unchecked")
+ private static final Map>[] levelStores = new Map[LEVELS];
+ static {
+ for (int i = 0; i < LEVELS; i++) {
+ levelStores[i] = new HashMap<>();
+ }
+ }
+ static final GraphQL graphQLWithDL = buildGraphQLWithDataLoader();
+ private static final ExecutorService batchLoadExecutor = Executors.newCachedThreadPool();
+ private static final BatchLoader> batchLoaderL2 =
+ keys -> CompletableFuture.supplyAsync(
+ () -> keys.stream().map(k -> levelStores[1].get(k)).collect(Collectors.toList()),
+ batchLoadExecutor);
+ private static final BatchLoader> batchLoaderL3 =
+ keys -> CompletableFuture.supplyAsync(
+ () -> keys.stream().map(k -> levelStores[2].get(k)).collect(Collectors.toList()),
+ batchLoadExecutor);
+ private static final BatchLoader> batchLoaderL4 =
+ keys -> CompletableFuture.supplyAsync(
+ () -> keys.stream().map(k -> levelStores[3].get(k)).collect(Collectors.toList()),
+ batchLoadExecutor);
+
+ // ================ Benchmark methods ================
+
+ @Benchmark
+ @BenchmarkMode(Mode.Throughput)
+ @OutputTimeUnit(TimeUnit.SECONDS)
+ public ExecutionResult benchmarkThroughput() {
+ return execute();
+ }
+
+ @Benchmark
+ @BenchmarkMode(Mode.AverageTime)
+ @OutputTimeUnit(TimeUnit.MILLISECONDS)
+ public ExecutionResult benchmarkAvgTime() {
+ return execute();
+ }
+
+ @Benchmark
+ @BenchmarkMode(Mode.Throughput)
+ @OutputTimeUnit(TimeUnit.SECONDS)
+ public ExecutionResult benchmarkDataLoaderThroughput() {
+ return executeWithDataLoader();
+ }
+
+ @Benchmark
+ @BenchmarkMode(Mode.AverageTime)
+ @OutputTimeUnit(TimeUnit.MILLISECONDS)
+ public ExecutionResult benchmarkDataLoaderAvgTime() {
+ return executeWithDataLoader();
+ }
+
+ private static ExecutionResult execute() {
+ return graphQL.execute(query);
+ }
+
+ private static ExecutionResult executeWithDataLoader() {
+ DataLoaderRegistry registry = DataLoaderRegistry.newRegistry()
+ .register("dl_2", DataLoaderFactory.newDataLoader(batchLoaderL2))
+ .register("dl_3", DataLoaderFactory.newDataLoader(batchLoaderL3))
+ .register("dl_4", DataLoaderFactory.newDataLoader(batchLoaderL4))
+ .build();
+ ExecutionInput input = ExecutionInput.newExecutionInput()
+ .query(query)
+ .dataLoaderRegistry(registry)
+ .build();
+ input.getGraphQLContext().put(
+ DataLoaderDispatchingContextKeys.ENABLE_DATA_LOADER_EXHAUSTED_DISPATCHING, true);
+ return graphQLWithDL.execute(input);
+ }
+
+ // ================ Query generation ================
+
+ static String mkQuery() {
+ StringBuilder sb = new StringBuilder();
+ sb.append("{ ");
+ for (int i = 1; i <= QUERY_FIELDS; i++) {
+ sb.append("field_").append(i).append(" ");
+ appendSelection(sb, 1);
+ sb.append(" ");
+ }
+ sb.append("}");
+ return sb.toString();
+ }
+
+ private static void appendSelection(StringBuilder sb, int level) {
+ sb.append("{ ");
+ if (level < LEVELS) {
+ for (int f = 1; f <= SCALAR_FIELDS; f++) {
+ sb.append("s").append(f).append(" ");
+ }
+ sb.append("child_a ");
+ appendSelection(sb, level + 1);
+ sb.append(" child_b ");
+ appendSelection(sb, level + 1);
+ } else {
+ // leaf level
+ for (int f = 1; f <= LEAF_SCALAR_FIELDS; f++) {
+ sb.append("s").append(f).append(" ");
+ }
+ }
+ sb.append("}");
+ }
+
+ // ================ Schema types (shared) ================
+
+ private static GraphQLObjectType[][] buildSchemaTypes() {
+ GraphQLObjectType[][] types = new GraphQLObjectType[LEVELS][TYPES_PER_LEVEL];
+
+ // Leaf types (level 4): 7 scalar fields each
+ for (int i = 0; i < TYPES_PER_LEVEL; i++) {
+ List fields = new ArrayList<>();
+ for (int f = 1; f <= LEAF_SCALAR_FIELDS; f++) {
+ fields.add(GraphQLFieldDefinition.newFieldDefinition()
+ .name("s" + f).type(GraphQLString).build());
+ }
+ types[0][i] = GraphQLObjectType.newObject()
+ .name("Type_L4_" + (i + 1)).fields(fields).build();
+ }
+
+ // Intermediate types (levels 3 down to 1)
+ for (int lvlIdx = 1; lvlIdx < LEVELS; lvlIdx++) {
+ GraphQLObjectType[] childLevel = types[lvlIdx - 1];
+ int schemaLevel = LEVELS - lvlIdx; // naming: L3, L2, L1
+ for (int i = 0; i < TYPES_PER_LEVEL; i++) {
+ List fields = new ArrayList<>();
+ for (int f = 1; f <= SCALAR_FIELDS; f++) {
+ fields.add(GraphQLFieldDefinition.newFieldDefinition()
+ .name("s" + f).type(GraphQLString).build());
+ }
+ fields.add(GraphQLFieldDefinition.newFieldDefinition()
+ .name("child_a").type(childLevel[i]).build());
+ fields.add(GraphQLFieldDefinition.newFieldDefinition()
+ .name("child_b")
+ .type(GraphQLList.list(childLevel[(i + 1) % TYPES_PER_LEVEL]))
+ .build());
+ types[lvlIdx][i] = GraphQLObjectType.newObject()
+ .name("Type_L" + schemaLevel + "_" + (i + 1)).fields(fields).build();
+ }
+ }
+ return types;
+ }
+
+ private static GraphQLObjectType buildQueryType() {
+ GraphQLObjectType[] l1Types = schemaTypes[LEVELS - 1];
+ List queryFields = new ArrayList<>();
+ for (int i = 0; i < QUERY_FIELDS; i++) {
+ if (i < QUERY_SINGLE_COUNT) {
+ queryFields.add(GraphQLFieldDefinition.newFieldDefinition()
+ .name("field_" + (i + 1)).type(l1Types[i]).build());
+ } else {
+ queryFields.add(GraphQLFieldDefinition.newFieldDefinition()
+ .name("field_" + (i + 1))
+ .type(GraphQLList.list(l1Types[i])).build());
+ }
+ }
+ return GraphQLObjectType.newObject().name("Query").fields(queryFields).build();
+ }
+
+ // ================ Baseline variant ================
+
+ private static GraphQL buildGraphQL() {
+ GraphQLCodeRegistry.Builder codeRegistry = GraphQLCodeRegistry.newCodeRegistry();
+ for (int i = 0; i < QUERY_FIELDS; i++) {
+ final Object data;
+ if (i < QUERY_SINGLE_COUNT) {
+ data = buildEmbeddedDto(1, i);
+ } else {
+ List