diff --git a/.github/workflows/scorecard.yml b/.github/workflows/scorecard.yml
index 185f0ef115..b04c8ba875 100644
--- a/.github/workflows/scorecard.yml
+++ b/.github/workflows/scorecard.yml
@@ -67,6 +67,6 @@ jobs:
# Upload the results to GitHub's code scanning dashboard.
- name: "Upload to code-scanning"
- uses: github/codeql-action/upload-sarif@673cceb2b4886e2dfff697ab64a1ecd1c0a14a05 # v2.28.0
+ uses: github/codeql-action/upload-sarif@b8d3b6e8af63cde30bdc382c0bc28114f4346c88 # v2.28.1
with:
sarif_file: results.sarif
diff --git a/.github/workflows/unmanaged_dependency_check.yaml b/.github/workflows/unmanaged_dependency_check.yaml
index 5ced7ae24d..813d09f1ba 100644
--- a/.github/workflows/unmanaged_dependency_check.yaml
+++ b/.github/workflows/unmanaged_dependency_check.yaml
@@ -17,7 +17,7 @@ jobs:
# repository
.kokoro/build.sh
- name: Unmanaged dependency check
- uses: googleapis/sdk-platform-java/java-shared-dependencies/unmanaged-dependency-check@google-cloud-shared-dependencies/v3.41.1
+ uses: googleapis/sdk-platform-java/java-shared-dependencies/unmanaged-dependency-check@google-cloud-shared-dependencies/v3.42.0
with:
# java-bigquery does not produce a BOM. Fortunately the root pom.xml
# defines google-cloud-bigquery in dependencyManagement section. So
diff --git a/.kokoro/continuous/graalvm-native-17.cfg b/.kokoro/continuous/graalvm-native-17.cfg
index a512b35af9..ce53947530 100644
--- a/.kokoro/continuous/graalvm-native-17.cfg
+++ b/.kokoro/continuous/graalvm-native-17.cfg
@@ -3,7 +3,7 @@
# Configure the docker image for kokoro-trampoline.
env_vars: {
key: "TRAMPOLINE_IMAGE"
- value: "gcr.io/cloud-devrel-public-resources/graalvm_sdk_platform_b:3.41.1"
+ value: "gcr.io/cloud-devrel-public-resources/graalvm_sdk_platform_b:3.42.0"
}
env_vars: {
diff --git a/.kokoro/continuous/graalvm-native.cfg b/.kokoro/continuous/graalvm-native.cfg
index 554601f249..ffdac23782 100644
--- a/.kokoro/continuous/graalvm-native.cfg
+++ b/.kokoro/continuous/graalvm-native.cfg
@@ -3,7 +3,7 @@
# Configure the docker image for kokoro-trampoline.
env_vars: {
key: "TRAMPOLINE_IMAGE"
- value: "gcr.io/cloud-devrel-public-resources/graalvm_sdk_platform_a:3.41.1"
+ value: "gcr.io/cloud-devrel-public-resources/graalvm_sdk_platform_a:3.42.0"
}
env_vars: {
diff --git a/.kokoro/presubmit/graalvm-native-17.cfg b/.kokoro/presubmit/graalvm-native-17.cfg
index 4218cff43a..6331bd5861 100644
--- a/.kokoro/presubmit/graalvm-native-17.cfg
+++ b/.kokoro/presubmit/graalvm-native-17.cfg
@@ -3,7 +3,7 @@
# Configure the docker image for kokoro-trampoline.
env_vars: {
key: "TRAMPOLINE_IMAGE"
- value: "gcr.io/cloud-devrel-public-resources/graalvm_sdk_platform_b:3.41.1""
+ value: "gcr.io/cloud-devrel-public-resources/graalvm_sdk_platform_b:3.42.0""
}
env_vars: {
diff --git a/.kokoro/presubmit/graalvm-native.cfg b/.kokoro/presubmit/graalvm-native.cfg
index b070666b48..f08c4be3e0 100644
--- a/.kokoro/presubmit/graalvm-native.cfg
+++ b/.kokoro/presubmit/graalvm-native.cfg
@@ -3,7 +3,7 @@
# Configure the docker image for kokoro-trampoline.
env_vars: {
key: "TRAMPOLINE_IMAGE"
- value: "gcr.io/cloud-devrel-public-resources/graalvm_sdk_platform_a:3.41.1"
+ value: "gcr.io/cloud-devrel-public-resources/graalvm_sdk_platform_a:3.42.0"
}
env_vars: {
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 3e93809751..addb3d7233 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,5 +1,25 @@
# Changelog
+## [2.47.0](https://github.com/googleapis/java-bigquery/compare/v2.46.0...v2.47.0) (2025-01-29)
+
+
+### Features
+
+* **bigquery:** Support resource tags for datasets in java client ([#3647](https://github.com/googleapis/java-bigquery/issues/3647)) ([01e0b74](https://github.com/googleapis/java-bigquery/commit/01e0b742b9ffeafaa89b080a39d8a66c12c1fd3b))
+
+
+### Bug Fixes
+
+* **bigquery:** Remove ReadAPI bypass in executeSelect() ([#3624](https://github.com/googleapis/java-bigquery/issues/3624)) ([fadd992](https://github.com/googleapis/java-bigquery/commit/fadd992a63fd1bc87c99cc689ed103f05de49a99))
+* Close bq read client ([#3644](https://github.com/googleapis/java-bigquery/issues/3644)) ([8833c97](https://github.com/googleapis/java-bigquery/commit/8833c97d73e3ba8e6a2061bbc55a6254b9e6668e))
+
+
+### Dependencies
+
+* Update dependency com.google.apis:google-api-services-bigquery to v2-rev20250112-2.0.0 ([#3651](https://github.com/googleapis/java-bigquery/issues/3651)) ([fd06100](https://github.com/googleapis/java-bigquery/commit/fd06100c4c18b0416d384ec1f6bdfc796b70ad9f))
+* Update dependency com.google.cloud:sdk-platform-java-config to v3.42.0 ([#3653](https://github.com/googleapis/java-bigquery/issues/3653)) ([1a14342](https://github.com/googleapis/java-bigquery/commit/1a143428c7f584db3dd6e827c2ee8fe980afe18c))
+* Update github/codeql-action action to v2.28.1 ([#3637](https://github.com/googleapis/java-bigquery/issues/3637)) ([858e517](https://github.com/googleapis/java-bigquery/commit/858e51792d98276f10fd780ef6edd0bb4a1b4f54))
+
## [2.46.0](https://github.com/googleapis/java-bigquery/compare/v2.45.0...v2.46.0) (2025-01-11)
diff --git a/benchmark/pom.xml b/benchmark/pom.xml
index 18d5535d1a..602d048577 100644
--- a/benchmark/pom.xml
+++ b/benchmark/pom.xml
@@ -6,7 +6,7 @@
google-cloud-bigquery-parent
com.google.cloud
- 2.46.0
+ 2.47.0
diff --git a/google-cloud-bigquery-bom/pom.xml b/google-cloud-bigquery-bom/pom.xml
index 3326027c8e..3a5c6f3e1d 100644
--- a/google-cloud-bigquery-bom/pom.xml
+++ b/google-cloud-bigquery-bom/pom.xml
@@ -3,12 +3,12 @@
4.0.0
com.google.cloud
google-cloud-bigquery-bom
- 2.46.0
+ 2.47.0
pom
com.google.cloud
sdk-platform-java-config
- 3.41.1
+ 3.42.0
@@ -54,7 +54,7 @@
com.google.cloud
google-cloud-bigquery
- 2.46.0
+ 2.47.0
diff --git a/google-cloud-bigquery/clirr-ignored-differences.xml b/google-cloud-bigquery/clirr-ignored-differences.xml
index b6546847fe..9ef680c667 100644
--- a/google-cloud-bigquery/clirr-ignored-differences.xml
+++ b/google-cloud-bigquery/clirr-ignored-differences.xml
@@ -134,4 +134,9 @@
com/google/cloud/bigquery/DatasetInfo*
*setMaxTimeTravelHours(*)
+
+ 7013
+ com/google/cloud/bigquery/DatasetInfo*
+ *setResourceTags(*)
+
\ No newline at end of file
diff --git a/google-cloud-bigquery/pom.xml b/google-cloud-bigquery/pom.xml
index ed717baf09..13651acacb 100644
--- a/google-cloud-bigquery/pom.xml
+++ b/google-cloud-bigquery/pom.xml
@@ -3,7 +3,7 @@
4.0.0
com.google.cloud
google-cloud-bigquery
- 2.46.0
+ 2.47.0
jar
BigQuery
https://github.com/googleapis/java-bigquery
@@ -11,7 +11,7 @@
com.google.cloud
google-cloud-bigquery-parent
- 2.46.0
+ 2.47.0
google-cloud-bigquery
diff --git a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/ConnectionImpl.java b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/ConnectionImpl.java
index 1f08bf4eb5..5ad9fe2843 100644
--- a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/ConnectionImpl.java
+++ b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/ConnectionImpl.java
@@ -97,6 +97,7 @@ class ConnectionImpl implements Connection {
private final Logger logger = Logger.getLogger(this.getClass().getName());
private BigQueryReadClient bqReadClient;
private static final long EXECUTOR_TIMEOUT_SEC = 10;
+ private static final long BIGQUERY_TIMEOUT_SEC = 10;
private BlockingQueue>
bufferFvl; // initialized lazily iff we end up using the tabledata.list end point
private BlockingQueue
@@ -148,8 +149,15 @@ public synchronized boolean close() throws BigQuerySQLException {
flagEndOfStream(); // an End of Stream flag in the buffer so that the `ResultSet.next()` stops
// advancing the cursor
queryTaskExecutor.shutdownNow();
+ boolean isBqReadClientTerminated = true;
try {
- if (queryTaskExecutor.awaitTermination(EXECUTOR_TIMEOUT_SEC, TimeUnit.SECONDS)) {
+ if (bqReadClient != null) {
+ bqReadClient.shutdownNow();
+ isBqReadClientTerminated =
+ bqReadClient.awaitTermination(BIGQUERY_TIMEOUT_SEC, TimeUnit.SECONDS);
+ }
+ if (queryTaskExecutor.awaitTermination(EXECUTOR_TIMEOUT_SEC, TimeUnit.SECONDS)
+ && isBqReadClientTerminated) {
return true;
} // else queryTaskExecutor.isShutdown() will be returned outside this try block
} catch (InterruptedException e) {
@@ -159,7 +167,9 @@ public synchronized boolean close() throws BigQuerySQLException {
e); // Logging InterruptedException instead of throwing the exception back, close method
// will return queryTaskExecutor.isShutdown()
}
- return queryTaskExecutor.isShutdown(); // check if the executor has been shutdown
+
+ return queryTaskExecutor.isShutdown()
+ && isBqReadClientTerminated; // check if the executor has been shutdown
}
/**
@@ -476,22 +486,29 @@ private BigQueryResult queryRpc(
}
// Query finished running and we can paginate all the results
- if (results.getJobComplete() && results.getSchema() != null) {
+ // Results should be read using the high throughput read API if sufficiently large.
+ boolean resultsLargeEnoughForReadApi =
+ connectionSettings.getUseReadAPI()
+ && results.getTotalRows() != null
+ && results.getTotalRows().longValue() > connectionSettings.getMinResultSize();
+ if (results.getJobComplete() && results.getSchema() != null && !resultsLargeEnoughForReadApi) {
return processQueryResponseResults(results);
} else {
- // Query is long-running (> 10s) and hasn't completed yet, or query completed but didn't
- // return the schema, fallback to jobs.insert path. Some operations don't return the schema
- // and can be optimized here, but this is left as future work.
- Long totalRows = results.getTotalRows() == null ? null : results.getTotalRows().longValue();
- Long pageRows = results.getRows() == null ? null : (long) (results.getRows().size());
+ // Query is long-running (> 10s) and hasn't completed yet, query completed but didn't
+ // return the schema, or results are sufficiently large to use the high throughput read API,
+ // fallback to jobs.insert path. Some operations don't return the schema and can be optimized
+ // here, but this is left as future work.
+ JobId jobId = JobId.fromPb(results.getJobReference());
+ GetQueryResultsResponse firstPage = getQueryResultsFirstPage(jobId);
+ Long totalRows =
+ firstPage.getTotalRows() == null ? null : firstPage.getTotalRows().longValue();
+ Long pageRows = firstPage.getRows() == null ? null : (long) (firstPage.getRows().size());
logger.log(
Level.WARNING,
"\n"
+ String.format(
"results.getJobComplete(): %s, isSchemaNull: %s , totalRows: %s, pageRows: %s",
results.getJobComplete(), results.getSchema() == null, totalRows, pageRows));
- JobId jobId = JobId.fromPb(results.getJobReference());
- GetQueryResultsResponse firstPage = getQueryResultsFirstPage(jobId);
return getSubsequentQueryResultsWithJob(
totalRows, pageRows, jobId, firstPage, hasQueryParameters);
}
@@ -985,7 +1002,6 @@ BigQueryResult highThroughPutRead(
// DO a regex check using order by and use multiple streams
;
ReadSession readSession = bqReadClient.createReadSession(builder.build());
-
bufferRow = new LinkedBlockingDeque<>(getBufferSize());
Map arrowNameToIndex = new HashMap<>();
// deserialize and populate the buffer async, so that the client isn't blocked
@@ -996,6 +1012,7 @@ BigQueryResult highThroughPutRead(
schema);
logger.log(Level.INFO, "\n Using BigQuery Read API");
+ stats.getQueryStatistics().setUseReadApi(true);
return new BigQueryResultImpl(schema, totalRows, bufferRow, stats);
} catch (IOException e) {
@@ -1042,6 +1059,7 @@ private void processArrowStreamAsync(
"\n" + Thread.currentThread().getName() + " Interrupted @ markLast",
e);
}
+ bqReadClient.shutdownNow(); // Shutdown the read client
queryTaskExecutor.shutdownNow(); // Shutdown the thread pool
}
};
diff --git a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/Dataset.java b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/Dataset.java
index 4fc8577853..18606e7013 100644
--- a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/Dataset.java
+++ b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/Dataset.java
@@ -170,6 +170,12 @@ public Builder setMaxTimeTravelHours(Long maxTimeTravelHours) {
return this;
}
+ @Override
+ public Builder setResourceTags(Map resourceTags) {
+ infoBuilder.setResourceTags(resourceTags);
+ return this;
+ }
+
@Override
public Dataset build() {
return new Dataset(bigquery, infoBuilder);
diff --git a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/DatasetInfo.java b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/DatasetInfo.java
index f9b7f03e17..90d6c2cd55 100644
--- a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/DatasetInfo.java
+++ b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/DatasetInfo.java
@@ -76,6 +76,7 @@ public Dataset apply(DatasetInfo datasetInfo) {
private final ExternalDatasetReference externalDatasetReference;
private final String storageBillingModel;
private final Long maxTimeTravelHours;
+ private final Annotations resourceTags;
/** A builder for {@code DatasetInfo} objects. */
public abstract static class Builder {
@@ -184,6 +185,19 @@ public abstract Builder setDefaultEncryptionConfiguration(
*/
public abstract Builder setDefaultCollation(String defaultCollation);
+ /**
+ * Optional. The tags attached to this
+ * dataset. Tag keys are globally unique. Tag key is expected to be in the namespaced format,
+ * for example "123456789012/environment" where 123456789012 is the ID of the parent
+ * organization or project resource for this tag key. Tag value is expected to be the short
+ * name, for example "Production".
+ *
+ * @see Tag
+ * definitions for more details.
+ * @param resourceTags resourceTags or {@code null} for none
+ */
+ public abstract Builder setResourceTags(Map resourceTags);
+
/** Creates a {@code DatasetInfo} object. */
public abstract DatasetInfo build();
}
@@ -208,6 +222,7 @@ static final class BuilderImpl extends Builder {
private ExternalDatasetReference externalDatasetReference;
private String storageBillingModel;
private Long maxTimeTravelHours;
+ private Annotations resourceTags = Annotations.ZERO;
BuilderImpl() {}
@@ -230,6 +245,7 @@ static final class BuilderImpl extends Builder {
this.externalDatasetReference = datasetInfo.externalDatasetReference;
this.storageBillingModel = datasetInfo.storageBillingModel;
this.maxTimeTravelHours = datasetInfo.maxTimeTravelHours;
+ this.resourceTags = datasetInfo.resourceTags;
}
BuilderImpl(com.google.api.services.bigquery.model.Dataset datasetPb) {
@@ -270,6 +286,7 @@ public Acl apply(Dataset.Access accessPb) {
}
this.storageBillingModel = datasetPb.getStorageBillingModel();
this.maxTimeTravelHours = datasetPb.getMaxTimeTravelHours();
+ this.resourceTags = Annotations.fromPb(datasetPb.getResourceTags());
}
@Override
@@ -388,6 +405,12 @@ public Builder setMaxTimeTravelHours(Long maxTimeTravelHours) {
return this;
}
+ @Override
+ public Builder setResourceTags(Map resourceTags) {
+ this.resourceTags = Annotations.fromUser(resourceTags);
+ return this;
+ }
+
@Override
public DatasetInfo build() {
return new DatasetInfo(this);
@@ -413,6 +436,7 @@ public DatasetInfo build() {
externalDatasetReference = builder.externalDatasetReference;
storageBillingModel = builder.storageBillingModel;
maxTimeTravelHours = builder.maxTimeTravelHours;
+ resourceTags = builder.resourceTags;
}
/** Returns the dataset identity. */
@@ -554,6 +578,21 @@ public Long getMaxTimeTravelHours() {
return maxTimeTravelHours;
}
+ /**
+ * Optional. The tags attached to this
+ * dataset. Tag keys are globally unique. Tag key is expected to be in the namespaced format, for
+ * example "123456789012/environment" where 123456789012 is the ID of the parent organization or
+ * project resource for this tag key. Tag value is expected to be the short name, for example
+ * "Production".
+ *
+ * @see Tag
+ * definitions for more details.
+ * @return value or {@code null} for none
+ */
+ public Map getResourceTags() {
+ return resourceTags.userMap();
+ }
+
/**
* Returns information about the external metadata storage where the dataset is defined. Filled
* out when the dataset type is EXTERNAL.
@@ -588,6 +627,7 @@ public String toString() {
.add("externalDatasetReference", externalDatasetReference)
.add("storageBillingModel", storageBillingModel)
.add("maxTimeTravelHours", maxTimeTravelHours)
+ .add("resourceTags", resourceTags)
.toString();
}
@@ -675,6 +715,7 @@ public Dataset.Access apply(Acl acl) {
if (maxTimeTravelHours != null) {
datasetPb.setMaxTimeTravelHours(maxTimeTravelHours);
}
+ datasetPb.setResourceTags(resourceTags.toPb());
return datasetPb;
}
diff --git a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/JobStatistics.java b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/JobStatistics.java
index efbfda0222..407e25a8f3 100644
--- a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/JobStatistics.java
+++ b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/JobStatistics.java
@@ -27,6 +27,7 @@
import com.google.auto.value.AutoValue;
import com.google.cloud.StringEnumType;
import com.google.cloud.StringEnumValue;
+import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Function;
import com.google.common.base.MoreObjects;
import com.google.common.base.MoreObjects.ToStringHelper;
@@ -396,6 +397,7 @@ public static class QueryStatistics extends JobStatistics {
private final BiEngineStats biEngineStats;
private final Integer billingTier;
private final Boolean cacheHit;
+ private Boolean useReadApi;
private final String ddlOperationPerformed;
private final TableId ddlTargetTable;
private final RoutineId ddlTargetRoutine;
@@ -796,6 +798,7 @@ private QueryStatistics(Builder builder) {
this.biEngineStats = builder.biEngineStats;
this.billingTier = builder.billingTier;
this.cacheHit = builder.cacheHit;
+ this.useReadApi = false;
this.ddlOperationPerformed = builder.ddlOperationPerformed;
this.ddlTargetTable = builder.ddlTargetTable;
this.ddlTargetRoutine = builder.ddlTargetRoutine;
@@ -835,6 +838,18 @@ public Boolean getCacheHit() {
return cacheHit;
}
+ /** Returns whether the query result is read from the high throughput ReadAPI. */
+ @VisibleForTesting
+ public Boolean getUseReadApi() {
+ return useReadApi;
+ }
+
+ /** Sets internal state to reflect the use of the high throughput ReadAPI. */
+ @VisibleForTesting
+ public void setUseReadApi(Boolean useReadApi) {
+ this.useReadApi = useReadApi;
+ }
+
/** [BETA] For DDL queries, returns the operation applied to the DDL target table. */
public String getDdlOperationPerformed() {
return ddlOperationPerformed;
diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/DatasetInfoTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/DatasetInfoTest.java
index 49a392baf7..e999b86e25 100644
--- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/DatasetInfoTest.java
+++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/DatasetInfoTest.java
@@ -62,6 +62,10 @@ public class DatasetInfoTest {
private static final String STORAGE_BILLING_MODEL = "LOGICAL";
private static final Long MAX_TIME_TRAVEL_HOURS_5_DAYS = 120L;
private static final Long MAX_TIME_TRAVEL_HOURS_7_DAYS = 168L;
+ private static final Map RESOURCE_TAGS =
+ ImmutableMap.of(
+ "example-key1", "example-value1",
+ "example-key2", "example-value2");
private static final ExternalDatasetReference EXTERNAL_DATASET_REFERENCE =
ExternalDatasetReference.newBuilder()
@@ -85,6 +89,7 @@ public class DatasetInfoTest {
.setDefaultPartitionExpirationMs(DEFAULT_PARTITION__EXPIRATION)
.setStorageBillingModel(STORAGE_BILLING_MODEL)
.setMaxTimeTravelHours(MAX_TIME_TRAVEL_HOURS_7_DAYS)
+ .setResourceTags(RESOURCE_TAGS)
.build();
private static final DatasetInfo DATASET_INFO_COMPLETE =
DATASET_INFO
@@ -183,6 +188,7 @@ public void testBuilder() {
assertEquals(
MAX_TIME_TRAVEL_HOURS_5_DAYS,
DATASET_INFO_WITH_MAX_TIME_TRAVEL_5_DAYS.getMaxTimeTravelHours());
+ assertEquals(RESOURCE_TAGS, DATASET_INFO.getResourceTags());
}
@Test
@@ -272,5 +278,6 @@ private void compareDatasets(DatasetInfo expected, DatasetInfo value) {
assertEquals(expected.getExternalDatasetReference(), value.getExternalDatasetReference());
assertEquals(expected.getStorageBillingModel(), value.getStorageBillingModel());
assertEquals(expected.getMaxTimeTravelHours(), value.getMaxTimeTravelHours());
+ assertEquals(expected.getResourceTags(), value.getResourceTags());
}
}
diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/DatasetTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/DatasetTest.java
index e2d7c635ce..d138e3cb51 100644
--- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/DatasetTest.java
+++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/DatasetTest.java
@@ -68,6 +68,10 @@ public class DatasetTest {
private static final Field FIELD = Field.of("FieldName", LegacySQLTypeName.INTEGER);
private static final String STORAGE_BILLING_MODEL = "LOGICAL";
private static final Long MAX_TIME_TRAVEL_HOURS = 168L;
+ private static final Map RESOURCE_TAGS =
+ ImmutableMap.of(
+ "example-key1", "example-value1",
+ "example-key2", "example-value2");
private static final StandardTableDefinition TABLE_DEFINITION =
StandardTableDefinition.of(Schema.of(FIELD));
private static final ViewDefinition VIEW_DEFINITION = ViewDefinition.of("QUERY");
@@ -124,6 +128,7 @@ public void testBuilder() {
.setLabels(LABELS)
.setStorageBillingModel(STORAGE_BILLING_MODEL)
.setMaxTimeTravelHours(MAX_TIME_TRAVEL_HOURS)
+ .setResourceTags(RESOURCE_TAGS)
.build();
assertEquals(DATASET_ID, builtDataset.getDatasetId());
assertEquals(ACCESS_RULES, builtDataset.getAcl());
@@ -139,6 +144,7 @@ public void testBuilder() {
assertEquals(LABELS, builtDataset.getLabels());
assertEquals(STORAGE_BILLING_MODEL, builtDataset.getStorageBillingModel());
assertEquals(MAX_TIME_TRAVEL_HOURS, builtDataset.getMaxTimeTravelHours());
+ assertEquals(RESOURCE_TAGS, builtDataset.getResourceTags());
}
@Test
@@ -348,6 +354,7 @@ public void testExternalDatasetReference() {
.setExternalDatasetReference(EXTERNAL_DATASET_REFERENCE)
.setStorageBillingModel(STORAGE_BILLING_MODEL)
.setMaxTimeTravelHours(MAX_TIME_TRAVEL_HOURS)
+ .setResourceTags(RESOURCE_TAGS)
.build();
assertEquals(
EXTERNAL_DATASET_REFERENCE,
@@ -379,5 +386,6 @@ private void compareDatasetInfo(DatasetInfo expected, DatasetInfo value) {
assertEquals(expected.getExternalDatasetReference(), value.getExternalDatasetReference());
assertEquals(expected.getStorageBillingModel(), value.getStorageBillingModel());
assertEquals(expected.getMaxTimeTravelHours(), value.getMaxTimeTravelHours());
+ assertEquals(expected.getResourceTags(), value.getResourceTags());
}
}
diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/it/ITBigQueryTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/it/ITBigQueryTest.java
index 24bf84f6ea..0178ac10ad 100644
--- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/it/ITBigQueryTest.java
+++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/it/ITBigQueryTest.java
@@ -3489,6 +3489,63 @@ public void testExecuteSelectDefaultConnectionSettings() throws SQLException {
String query = "SELECT corpus FROM `bigquery-public-data.samples.shakespeare` GROUP BY corpus;";
BigQueryResult bigQueryResult = connection.executeSelect(query);
assertEquals(42, bigQueryResult.getTotalRows());
+ assertFalse(bigQueryResult.getBigQueryResultStats().getQueryStatistics().getUseReadApi());
+ }
+
+ @Test
+ public void testExecuteSelectWithReadApi() throws SQLException {
+ final int rowLimit = 5000;
+ final String QUERY =
+ "SELECT * FROM bigquery-public-data.new_york_taxi_trips.tlc_yellow_trips_2017 LIMIT %s";
+ // Job timeout is somewhat arbitrary - just ensures that fast query is not used.
+ // min result size and page row count ratio ensure that the ReadAPI is used.
+ ConnectionSettings connectionSettingsReadAPIEnabledFastQueryDisabled =
+ ConnectionSettings.newBuilder()
+ .setUseReadAPI(true)
+ .setJobTimeoutMs(Long.MAX_VALUE)
+ .setMinResultSize(500)
+ .setTotalToPageRowCountRatio(1)
+ .build();
+
+ Connection connectionReadAPIEnabled =
+ bigquery.createConnection(connectionSettingsReadAPIEnabledFastQueryDisabled);
+
+ String selectQuery = String.format(QUERY, rowLimit);
+
+ BigQueryResult bigQueryResultSet = connectionReadAPIEnabled.executeSelect(selectQuery);
+ ResultSet rs = bigQueryResultSet.getResultSet();
+ // Paginate results to avoid an InterruptedException
+ while (rs.next()) {}
+
+ assertTrue(bigQueryResultSet.getBigQueryResultStats().getQueryStatistics().getUseReadApi());
+ connectionReadAPIEnabled.close();
+ }
+
+ @Test
+ public void testExecuteSelectWithFastQueryReadApi() throws SQLException {
+ final int rowLimit = 5000;
+ final String QUERY =
+ "SELECT * FROM bigquery-public-data.new_york_taxi_trips.tlc_yellow_trips_2017 LIMIT %s";
+ // min result size and page row count ratio ensure that the ReadAPI is used.
+ ConnectionSettings connectionSettingsReadAPIEnabledFastQueryDisabled =
+ ConnectionSettings.newBuilder()
+ .setUseReadAPI(true)
+ .setMinResultSize(500)
+ .setTotalToPageRowCountRatio(1)
+ .build();
+
+ Connection connectionReadAPIEnabled =
+ bigquery.createConnection(connectionSettingsReadAPIEnabledFastQueryDisabled);
+
+ String selectQuery = String.format(QUERY, rowLimit);
+
+ BigQueryResult bigQueryResultSet = connectionReadAPIEnabled.executeSelect(selectQuery);
+ ResultSet rs = bigQueryResultSet.getResultSet();
+ // Paginate results to avoid an InterruptedException
+ while (rs.next()) {}
+
+ assertTrue(bigQueryResultSet.getBigQueryResultStats().getQueryStatistics().getUseReadApi());
+ connectionReadAPIEnabled.close();
}
@Test
@@ -3540,6 +3597,7 @@ public void testExecuteSelectWithCredentials() throws SQLException {
+ TABLE_ID_LARGE.getTable(); // Large query result is needed to use BigQueryReadClient.
BigQueryResult bigQueryResult = connectionGoodCredentials.executeSelect(query);
assertEquals(313348, bigQueryResult.getTotalRows());
+ assertTrue(bigQueryResult.getBigQueryResultStats().getQueryStatistics().getUseReadApi());
// Scenario 2.
// Create a new bigQuery object but explicitly an invalid credential.
diff --git a/pom.xml b/pom.xml
index 55b63fffa1..d6826d5665 100644
--- a/pom.xml
+++ b/pom.xml
@@ -4,7 +4,7 @@
com.google.cloud
google-cloud-bigquery-parent
pom
- 2.46.0
+ 2.47.0
BigQuery Parent
https://github.com/googleapis/java-bigquery
@@ -14,7 +14,7 @@
com.google.cloud
sdk-platform-java-config
- 3.41.1
+ 3.42.0
@@ -54,7 +54,7 @@
UTF-8
github
google-cloud-bigquery-parent
- v2-rev20241222-2.0.0
+ v2-rev20250112-2.0.0
@@ -71,7 +71,7 @@
com.google.cloud
google-cloud-bigquerystorage-bom
- 3.11.1
+ 3.11.2
pom
import
@@ -93,7 +93,7 @@
com.google.cloud
google-cloud-bigquery
- 2.46.0
+ 2.47.0
@@ -137,7 +137,7 @@
com.google.cloud
google-cloud-storage
- 2.47.0
+ 2.48.0
test
diff --git a/samples/install-without-bom/pom.xml b/samples/install-without-bom/pom.xml
index 6badfec9c8..9f33fb2e60 100644
--- a/samples/install-without-bom/pom.xml
+++ b/samples/install-without-bom/pom.xml
@@ -45,7 +45,7 @@
com.google.cloud
google-cloud-bigquery
- 2.45.0
+ 2.46.0
@@ -63,7 +63,7 @@
com.google.cloud
google-cloud-bigtable
- 2.51.0
+ 2.51.1
test
diff --git a/samples/snapshot/pom.xml b/samples/snapshot/pom.xml
index 291f257b10..bb0ca36c12 100644
--- a/samples/snapshot/pom.xml
+++ b/samples/snapshot/pom.xml
@@ -44,7 +44,7 @@
com.google.cloud
google-cloud-bigquery
- 2.46.0
+ 2.47.0
@@ -61,7 +61,7 @@
com.google.cloud
google-cloud-bigtable
- 2.51.0
+ 2.51.1
test
diff --git a/samples/snippets/pom.xml b/samples/snippets/pom.xml
index bc783dde29..635d02a71c 100644
--- a/samples/snippets/pom.xml
+++ b/samples/snippets/pom.xml
@@ -79,7 +79,7 @@
com.google.cloud
google-cloud-bigtable
- 2.51.0
+ 2.51.1
test
diff --git a/versions.txt b/versions.txt
index a944a00ff4..d6010fec4a 100644
--- a/versions.txt
+++ b/versions.txt
@@ -1,4 +1,4 @@
# Format:
# module:released-version:current-version
-google-cloud-bigquery:2.46.0:2.46.0
\ No newline at end of file
+google-cloud-bigquery:2.47.0:2.47.0
\ No newline at end of file